hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f713a22268d35fec0c248fbb31768628bbadd87f | 412 | py | Python | csv_to_table/migrations/0004_auto_20200119_1410.py | KariSpace/CRM_Sedicomm | cb19e90ca99c7a50a1841afbfb878191f62dec5c | [
"MIT"
] | null | null | null | csv_to_table/migrations/0004_auto_20200119_1410.py | KariSpace/CRM_Sedicomm | cb19e90ca99c7a50a1841afbfb878191f62dec5c | [
"MIT"
] | null | null | null | csv_to_table/migrations/0004_auto_20200119_1410.py | KariSpace/CRM_Sedicomm | cb19e90ca99c7a50a1841afbfb878191f62dec5c | [
"MIT"
] | null | null | null | # Generated by Django 2.2 on 2020-01-19 12:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('csv_to_table', '0003_auto_20200119_1405'),
]
operations = [
migrations.AlterField(
model_name='people',
name='date',
field=models.DateField(auto_now_add=True, verbose_name='Date'),
),
]
| 21.684211 | 75 | 0.614078 |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('csv_to_table', '0003_auto_20200119_1405'),
]
operations = [
migrations.AlterField(
model_name='people',
name='date',
field=models.DateField(auto_now_add=True, verbose_name='Date'),
),
]
| true | true |
f713a32ffa56c392f78bb86890ac67b2a4fb768f | 1,249 | py | Python | classes/abstract_command.py | platofff/quote-bot | ab7e96b8ce67f16e879cf8628cbbbac6d63eab83 | [
"MIT"
] | 1 | 2021-10-16T18:48:05.000Z | 2021-10-16T18:48:05.000Z | classes/abstract_command.py | platofff/quote-bot | ab7e96b8ce67f16e879cf8628cbbbac6d63eab83 | [
"MIT"
] | null | null | null | classes/abstract_command.py | platofff/quote-bot | ab7e96b8ce67f16e879cf8628cbbbac6d63eab83 | [
"MIT"
] | 1 | 2022-01-15T14:11:26.000Z | 2022-01-15T14:11:26.000Z | import json
config = 'config.json'
with open(config, 'r') as f:
data = json.load(f)
default = data["default"]
class AbstractCommand():
def __init__(self, handler = [], description = None):
self.handler = handler
self.description = description
def hdl(self):
return self.handler
def dsc(self):
return self.description
@staticmethod
async def ans_up(ans, m, att = None):
if (m.text.count(' ') == 0):
if (m.text == m.text.upper()):
up = True
else:
up = False
else:
ind = m.text.index(' ')
text = m.text[:ind]
if (text == text.upper()):
up = True
else:
up = False
if (ans != ''):
if (up):
await m.answer(default["prefix"] + ans.upper())
return True
else:
await m.answer(ans)
return True
elif (att != None):
if (up):
await m.answer(default["prefix"].upper(), attachment=att)
return True
else:
await m.answer(attachment=att)
return True | 26.020833 | 73 | 0.453963 | import json
config = 'config.json'
with open(config, 'r') as f:
data = json.load(f)
default = data["default"]
class AbstractCommand():
def __init__(self, handler = [], description = None):
self.handler = handler
self.description = description
def hdl(self):
return self.handler
def dsc(self):
return self.description
@staticmethod
async def ans_up(ans, m, att = None):
if (m.text.count(' ') == 0):
if (m.text == m.text.upper()):
up = True
else:
up = False
else:
ind = m.text.index(' ')
text = m.text[:ind]
if (text == text.upper()):
up = True
else:
up = False
if (ans != ''):
if (up):
await m.answer(default["prefix"] + ans.upper())
return True
else:
await m.answer(ans)
return True
elif (att != None):
if (up):
await m.answer(default["prefix"].upper(), attachment=att)
return True
else:
await m.answer(attachment=att)
return True | true | true |
f713a46f949cdeca6f625144d43e51fc0aacc310 | 24,542 | py | Python | localstack/services/install.py | VanRoy/localstack | 39f1a1c034ae345f87a1485d717428b59308e6fc | [
"Apache-2.0"
] | 1 | 2022-01-05T10:10:38.000Z | 2022-01-05T10:10:38.000Z | localstack/services/install.py | VanRoy/localstack | 39f1a1c034ae345f87a1485d717428b59308e6fc | [
"Apache-2.0"
] | null | null | null | localstack/services/install.py | VanRoy/localstack | 39f1a1c034ae345f87a1485d717428b59308e6fc | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
import functools
import glob
import logging
import os
import platform
import re
import shutil
import stat
import sys
import tempfile
import time
from pathlib import Path
from typing import Callable, Dict, List, Tuple
import requests
from plugin import Plugin, PluginManager
from localstack import config
from localstack.config import dirs, is_env_true
from localstack.constants import (
DEFAULT_SERVICE_PORTS,
DYNAMODB_JAR_URL,
ELASTICMQ_JAR_URL,
ELASTICSEARCH_DEFAULT_VERSION,
ELASTICSEARCH_DELETE_MODULES,
ELASTICSEARCH_PLUGIN_LIST,
KMS_URL_PATTERN,
LOCALSTACK_MAVEN_VERSION,
MODULE_MAIN_PATH,
STS_JAR_URL,
)
from localstack.runtime import hooks
from localstack.utils.common import (
chmod_r,
download,
file_exists_not_empty,
get_arch,
get_os,
is_windows,
load_file,
mkdir,
new_tmp_file,
parallelize,
retry,
rm_rf,
run,
safe_run,
save_file,
untar,
unzip,
)
from localstack.utils.docker_utils import DOCKER_CLIENT
LOG = logging.getLogger(__name__)
INSTALL_DIR_NPM = "%s/node_modules" % MODULE_MAIN_PATH # FIXME: migrate to infra
INSTALL_DIR_DDB = "%s/dynamodb" % dirs.static_libs
INSTALL_DIR_KCL = "%s/amazon-kinesis-client" % dirs.static_libs
INSTALL_DIR_STEPFUNCTIONS = "%s/stepfunctions" % dirs.static_libs
INSTALL_DIR_KMS = "%s/kms" % dirs.static_libs
INSTALL_DIR_ELASTICMQ = "%s/elasticmq" % dirs.static_libs
INSTALL_PATH_LOCALSTACK_FAT_JAR = "%s/localstack-utils-fat.jar" % dirs.static_libs
INSTALL_PATH_DDB_JAR = os.path.join(INSTALL_DIR_DDB, "DynamoDBLocal.jar")
INSTALL_PATH_KCL_JAR = os.path.join(INSTALL_DIR_KCL, "aws-java-sdk-sts.jar")
INSTALL_PATH_STEPFUNCTIONS_JAR = os.path.join(INSTALL_DIR_STEPFUNCTIONS, "StepFunctionsLocal.jar")
INSTALL_PATH_KMS_BINARY_PATTERN = os.path.join(INSTALL_DIR_KMS, "local-kms.<arch>.bin")
INSTALL_PATH_ELASTICMQ_JAR = os.path.join(INSTALL_DIR_ELASTICMQ, "elasticmq-server.jar")
INSTALL_PATH_KINESALITE_CLI = os.path.join(INSTALL_DIR_NPM, "kinesalite", "cli.js")
INSTALL_PATH_KINESIS_MOCK = os.path.join(dirs.static_libs, "kinesis-mock")
URL_LOCALSTACK_FAT_JAR = (
"https://repo1.maven.org/maven2/"
+ "cloud/localstack/localstack-utils/{v}/localstack-utils-{v}-fat.jar"
).format(v=LOCALSTACK_MAVEN_VERSION)
MARKER_FILE_LIGHT_VERSION = "%s/.light-version" % dirs.static_libs
IMAGE_NAME_SFN_LOCAL = "amazon/aws-stepfunctions-local"
ARTIFACTS_REPO = "https://github.com/localstack/localstack-artifacts"
SFN_PATCH_URL_PREFIX = (
f"{ARTIFACTS_REPO}/raw/047cc6dcd2e31f5ff3ec52d293c61b875f606958/stepfunctions-local-patch"
)
SFN_PATCH_CLASS1 = "com/amazonaws/stepfunctions/local/runtime/Config.class"
SFN_PATCH_CLASS2 = (
"com/amazonaws/stepfunctions/local/runtime/executors/task/LambdaTaskStateExecutor.class"
)
SFN_PATCH_CLASS_STARTER = "cloud/localstack/StepFunctionsStarter.class"
SFN_PATCH_CLASS_REGION = "cloud/localstack/RegionAspect.class"
SFN_PATCH_FILE_METAINF = "META-INF/aop.xml"
# additional JAR libs required for multi-region and persistence (PRO only) support
MAVEN_REPO = "https://repo1.maven.org/maven2"
URL_ASPECTJRT = f"{MAVEN_REPO}/org/aspectj/aspectjrt/1.9.7/aspectjrt-1.9.7.jar"
URL_ASPECTJWEAVER = f"{MAVEN_REPO}/org/aspectj/aspectjweaver/1.9.7/aspectjweaver-1.9.7.jar"
JAR_URLS = [URL_ASPECTJRT, URL_ASPECTJWEAVER]
# kinesis-mock version
KINESIS_MOCK_VERSION = os.environ.get("KINESIS_MOCK_VERSION") or "0.2.0"
KINESIS_MOCK_RELEASE_URL = (
"https://api.github.com/repos/etspaceman/kinesis-mock/releases/tags/" + KINESIS_MOCK_VERSION
)
# debugpy module
DEBUGPY_MODULE = "debugpy"
DEBUGPY_DEPENDENCIES = ["gcc", "python3-dev", "musl-dev"]
# Target version for javac, to ensure compatibility with earlier JREs
JAVAC_TARGET_VERSION = "1.8"
# SQS backend implementation provider - either "moto" or "elasticmq"
SQS_BACKEND_IMPL = os.environ.get("SQS_PROVIDER") or "moto"
# GO Lambda runtime
GO_RUNTIME_VERSION = "0.4.0"
GO_RUNTIME_DOWNLOAD_URL_TEMPLATE = "https://github.com/localstack/awslamba-go-runtime/releases/download/v{version}/awslamba-go-runtime-{version}-{os}-{arch}.tar.gz"
GO_INSTALL_FOLDER = os.path.join(config.dirs.var_libs, "awslamba-go-runtime")
GO_LAMBDA_RUNTIME = os.path.join(GO_INSTALL_FOLDER, "aws-lambda-mock")
GO_LAMBDA_MOCKSERVER = os.path.join(GO_INSTALL_FOLDER, "mockserver")
# Terraform (used for tests, whose templates require TF < 0.14.0 )
TERRAFORM_VERSION = "0.13.7"
TERRAFORM_URL_TEMPLATE = (
"https://releases.hashicorp.com/terraform/{version}/terraform_{version}_{os}_{arch}.zip"
)
TERRAFORM_BIN = os.path.join(dirs.static_libs, f"terraform-{TERRAFORM_VERSION}", "terraform")
# Java Test Jar Download (used for tests)
TEST_LAMBDA_JAVA = os.path.join(config.dirs.var_libs, "localstack-utils-tests.jar")
MAVEN_BASE_URL = "https://repo.maven.apache.org/maven2"
TEST_LAMBDA_JAR_URL = "{url}/cloud/localstack/{name}/{version}/{name}-{version}-tests.jar".format(
version=LOCALSTACK_MAVEN_VERSION, url=MAVEN_BASE_URL, name="localstack-utils"
)
def get_elasticsearch_install_version(version: str) -> str:
from localstack.services.es import versions
if config.SKIP_INFRA_DOWNLOADS:
return ELASTICSEARCH_DEFAULT_VERSION
return versions.get_install_version(version)
def get_elasticsearch_install_dir(version: str) -> str:
version = get_elasticsearch_install_version(version)
if version == ELASTICSEARCH_DEFAULT_VERSION and not os.path.exists(MARKER_FILE_LIGHT_VERSION):
# install the default version into a subfolder of the code base
install_dir = os.path.join(dirs.static_libs, "elasticsearch")
else:
# put all other versions into the TMP_FOLDER
install_dir = os.path.join(config.dirs.tmp, "elasticsearch", version)
return install_dir
def install_elasticsearch(version=None):
from localstack.services.es import versions
if not version:
version = ELASTICSEARCH_DEFAULT_VERSION
version = get_elasticsearch_install_version(version)
install_dir = get_elasticsearch_install_dir(version)
installed_executable = os.path.join(install_dir, "bin", "elasticsearch")
if not os.path.exists(installed_executable):
log_install_msg("Elasticsearch (%s)" % version)
es_url = versions.get_download_url(version)
install_dir_parent = os.path.dirname(install_dir)
mkdir(install_dir_parent)
# download and extract archive
tmp_archive = os.path.join(config.dirs.tmp, "localstack.%s" % os.path.basename(es_url))
download_and_extract_with_retry(es_url, tmp_archive, install_dir_parent)
elasticsearch_dir = glob.glob(os.path.join(install_dir_parent, "elasticsearch*"))
if not elasticsearch_dir:
raise Exception("Unable to find Elasticsearch folder in %s" % install_dir_parent)
shutil.move(elasticsearch_dir[0], install_dir)
for dir_name in ("data", "logs", "modules", "plugins", "config/scripts"):
dir_path = os.path.join(install_dir, dir_name)
mkdir(dir_path)
chmod_r(dir_path, 0o777)
# install default plugins
for plugin in ELASTICSEARCH_PLUGIN_LIST:
plugin_binary = os.path.join(install_dir, "bin", "elasticsearch-plugin")
plugin_dir = os.path.join(install_dir, "plugins", plugin)
if not os.path.exists(plugin_dir):
LOG.info("Installing Elasticsearch plugin %s", plugin)
def try_install():
safe_run([plugin_binary, "install", "-b", plugin])
# We're occasionally seeing javax.net.ssl.SSLHandshakeException -> add download retries
download_attempts = 3
try:
retry(try_install, retries=download_attempts - 1, sleep=2)
except Exception:
LOG.warning(
"Unable to download Elasticsearch plugin '%s' after %s attempts",
plugin,
download_attempts,
)
if not os.environ.get("IGNORE_ES_DOWNLOAD_ERRORS"):
raise
# delete some plugins to free up space
for plugin in ELASTICSEARCH_DELETE_MODULES:
module_dir = os.path.join(install_dir, "modules", plugin)
rm_rf(module_dir)
# disable x-pack-ml plugin (not working on Alpine)
xpack_dir = os.path.join(install_dir, "modules", "x-pack-ml", "platform")
rm_rf(xpack_dir)
# patch JVM options file - replace hardcoded heap size settings
jvm_options_file = os.path.join(install_dir, "config", "jvm.options")
if os.path.exists(jvm_options_file):
jvm_options = load_file(jvm_options_file)
jvm_options_replaced = re.sub(
r"(^-Xm[sx][a-zA-Z0-9\.]+$)", r"# \1", jvm_options, flags=re.MULTILINE
)
if jvm_options != jvm_options_replaced:
save_file(jvm_options_file, jvm_options_replaced)
def install_sqs_provider():
if SQS_BACKEND_IMPL == "elasticmq":
install_elasticmq()
def install_elasticmq():
# TODO remove this function if we stop using ElasticMQ entirely
if not os.path.exists(INSTALL_PATH_ELASTICMQ_JAR):
log_install_msg("ElasticMQ")
mkdir(INSTALL_DIR_ELASTICMQ)
# download archive
tmp_archive = os.path.join(config.dirs.tmp, "elasticmq-server.jar")
if not os.path.exists(tmp_archive):
download(ELASTICMQ_JAR_URL, tmp_archive)
shutil.copy(tmp_archive, INSTALL_DIR_ELASTICMQ)
def install_kinesis():
if config.KINESIS_PROVIDER == "kinesalite":
return install_kinesalite()
elif config.KINESIS_PROVIDER == "kinesis-mock":
return install_kinesis_mock()
else:
raise ValueError("unknown kinesis provider %s" % config.KINESIS_PROVIDER)
def install_kinesalite():
if not os.path.exists(INSTALL_PATH_KINESALITE_CLI):
log_install_msg("Kinesis")
run('cd "%s" && npm install' % MODULE_MAIN_PATH)
def install_kinesis_mock():
target_dir = INSTALL_PATH_KINESIS_MOCK
machine = platform.machine().lower()
system = platform.system().lower()
version = platform.version().lower()
is_probably_m1 = system == "darwin" and ("arm64" in version or "arm32" in version)
LOG.debug("getting kinesis-mock for %s %s", system, machine)
if is_env_true("KINESIS_MOCK_FORCE_JAVA"):
# sometimes the static binaries may have problems, and we want to fal back to Java
bin_file = "kinesis-mock.jar"
elif (machine == "x86_64" or machine == "amd64") and not is_probably_m1:
if system == "windows":
bin_file = "kinesis-mock-mostly-static.exe"
elif system == "linux":
bin_file = "kinesis-mock-linux-amd64-static"
elif system == "darwin":
bin_file = "kinesis-mock-macos-amd64-dynamic"
else:
bin_file = "kinesis-mock.jar"
else:
bin_file = "kinesis-mock.jar"
bin_file_path = os.path.join(target_dir, bin_file)
if os.path.exists(bin_file_path):
LOG.debug("kinesis-mock found at %s", bin_file_path)
return bin_file_path
response = requests.get(KINESIS_MOCK_RELEASE_URL)
if not response.ok:
raise ValueError(
"Could not get list of releases from %s: %s" % (KINESIS_MOCK_RELEASE_URL, response.text)
)
github_release = response.json()
download_url = None
for asset in github_release.get("assets", []):
# find the correct binary in the release
if asset["name"] == bin_file:
download_url = asset["browser_download_url"]
break
if download_url is None:
raise ValueError(
"could not find required binary %s in release %s" % (bin_file, KINESIS_MOCK_RELEASE_URL)
)
mkdir(target_dir)
LOG.info("downloading kinesis-mock binary from %s", download_url)
download(download_url, bin_file_path)
chmod_r(bin_file_path, 0o777)
return bin_file_path
def install_local_kms():
local_arch = get_os()
binary_path = INSTALL_PATH_KMS_BINARY_PATTERN.replace("<arch>", local_arch)
if not os.path.exists(binary_path):
log_install_msg("KMS")
mkdir(INSTALL_DIR_KMS)
# TODO ARM download platform specific binary
kms_url = KMS_URL_PATTERN.replace("<arch>", local_arch)
download(kms_url, binary_path)
chmod_r(binary_path, 0o777)
def install_stepfunctions_local():
if not os.path.exists(INSTALL_PATH_STEPFUNCTIONS_JAR):
# pull the JAR file from the Docker image, which is more up-to-date than the downloadable JAR file
# TODO: works only when running on the host, outside of Docker -> add a fallback if running in Docker?
log_install_msg("Step Functions")
mkdir(INSTALL_DIR_STEPFUNCTIONS)
DOCKER_CLIENT.pull_image(IMAGE_NAME_SFN_LOCAL)
docker_name = "tmp-ls-sfn"
DOCKER_CLIENT.run_container(
IMAGE_NAME_SFN_LOCAL,
remove=True,
entrypoint="",
name=docker_name,
detach=True,
command=["sleep", "15"],
)
time.sleep(5)
DOCKER_CLIENT.copy_from_container(
docker_name, local_path=dirs.static_libs, container_path="/home/stepfunctionslocal/"
)
path = Path(f"{dirs.static_libs}/stepfunctionslocal/")
for file in path.glob("*.jar"):
file.rename(Path(INSTALL_DIR_STEPFUNCTIONS) / file.name)
rm_rf("%s/stepfunctionslocal" % dirs.static_libs)
classes = [
SFN_PATCH_CLASS1,
SFN_PATCH_CLASS2,
SFN_PATCH_CLASS_REGION,
SFN_PATCH_CLASS_STARTER,
SFN_PATCH_FILE_METAINF,
]
for patch_class in classes:
patch_url = f"{SFN_PATCH_URL_PREFIX}/{patch_class}"
add_file_to_jar(patch_class, patch_url, target_jar=INSTALL_PATH_STEPFUNCTIONS_JAR)
# special case for Manifest file - extract first, replace content, then update in JAR file
manifest_file = os.path.join(INSTALL_DIR_STEPFUNCTIONS, "META-INF", "MANIFEST.MF")
if not os.path.exists(manifest_file):
content = run(["unzip", "-p", INSTALL_PATH_STEPFUNCTIONS_JAR, "META-INF/MANIFEST.MF"])
content = re.sub(
"Main-Class: .+", "Main-Class: cloud.localstack.StepFunctionsStarter", content
)
classpath = " ".join([os.path.basename(jar) for jar in JAR_URLS])
content = re.sub(r"Class-Path: \. ", f"Class-Path: {classpath} . ", content)
save_file(manifest_file, content)
run(
["zip", INSTALL_PATH_STEPFUNCTIONS_JAR, "META-INF/MANIFEST.MF"],
cwd=INSTALL_DIR_STEPFUNCTIONS,
)
# download additional jar libs
for jar_url in JAR_URLS:
target = os.path.join(INSTALL_DIR_STEPFUNCTIONS, os.path.basename(jar_url))
if not file_exists_not_empty(target):
download(jar_url, target)
def add_file_to_jar(class_file, class_url, target_jar, base_dir=None):
base_dir = base_dir or os.path.dirname(target_jar)
patch_class_file = os.path.join(base_dir, class_file)
if not os.path.exists(patch_class_file):
download(class_url, patch_class_file)
run(["zip", target_jar, class_file], cwd=base_dir)
def install_dynamodb_local():
if not os.path.exists(INSTALL_PATH_DDB_JAR):
log_install_msg("DynamoDB")
# download and extract archive
tmp_archive = os.path.join(tempfile.gettempdir(), "localstack.ddb.zip")
download_and_extract_with_retry(DYNAMODB_JAR_URL, tmp_archive, INSTALL_DIR_DDB)
# fix logging configuration for DynamoDBLocal
log4j2_config = """<Configuration status="WARN">
<Appenders>
<Console name="Console" target="SYSTEM_OUT">
<PatternLayout pattern="%d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"/>
</Console>
</Appenders>
<Loggers>
<Root level="WARN"><AppenderRef ref="Console"/></Root>
</Loggers>
</Configuration>"""
log4j2_file = os.path.join(INSTALL_DIR_DDB, "log4j2.xml")
save_file(log4j2_file, log4j2_config)
run('cd "%s" && zip -u DynamoDBLocal.jar log4j2.xml || true' % INSTALL_DIR_DDB)
def install_amazon_kinesis_client_libs():
# install KCL/STS JAR files
if not os.path.exists(INSTALL_PATH_KCL_JAR):
mkdir(INSTALL_DIR_KCL)
tmp_archive = os.path.join(tempfile.gettempdir(), "aws-java-sdk-sts.jar")
if not os.path.exists(tmp_archive):
download(STS_JAR_URL, tmp_archive)
shutil.copy(tmp_archive, INSTALL_DIR_KCL)
# Compile Java files
from localstack.utils.kinesis import kclipy_helper
classpath = kclipy_helper.get_kcl_classpath()
if is_windows():
classpath = re.sub(r":([^\\])", r";\1", classpath)
java_files = "%s/utils/kinesis/java/cloud/localstack/*.java" % MODULE_MAIN_PATH
class_files = "%s/utils/kinesis/java/cloud/localstack/*.class" % MODULE_MAIN_PATH
if not glob.glob(class_files):
run(
'javac -source %s -target %s -cp "%s" %s'
% (JAVAC_TARGET_VERSION, JAVAC_TARGET_VERSION, classpath, java_files)
)
def install_lambda_java_libs():
# install LocalStack "fat" JAR file (contains all dependencies)
if not os.path.exists(INSTALL_PATH_LOCALSTACK_FAT_JAR):
log_install_msg("LocalStack Java libraries", verbatim=True)
download(URL_LOCALSTACK_FAT_JAR, INSTALL_PATH_LOCALSTACK_FAT_JAR)
def install_lambda_java_testlibs():
# Download the LocalStack Utils Test jar file from the maven repo
if not os.path.exists(TEST_LAMBDA_JAVA):
mkdir(os.path.dirname(TEST_LAMBDA_JAVA))
download(TEST_LAMBDA_JAR_URL, TEST_LAMBDA_JAVA)
def install_go_lambda_runtime():
if os.path.isfile(GO_LAMBDA_RUNTIME):
return
log_install_msg("Installing golang runtime")
system = platform.system().lower()
arch = get_arch()
if system not in ["linux"]:
raise ValueError("unsupported os %s for awslambda-go-runtime" % system)
if arch not in ["amd64", "arm64"]:
raise ValueError("unsupported arch %s for awslambda-go-runtime" % arch)
url = GO_RUNTIME_DOWNLOAD_URL_TEMPLATE.format(
version=GO_RUNTIME_VERSION,
os=system,
arch=arch,
)
download_and_extract(url, GO_INSTALL_FOLDER)
st = os.stat(GO_LAMBDA_RUNTIME)
os.chmod(GO_LAMBDA_RUNTIME, st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
st = os.stat(GO_LAMBDA_MOCKSERVER)
os.chmod(GO_LAMBDA_MOCKSERVER, st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
def install_cloudformation_libs():
from localstack.services.cloudformation import deployment_utils
# trigger download of CF module file
deployment_utils.get_cfn_response_mod_file()
def install_terraform() -> str:
if os.path.isfile(TERRAFORM_BIN):
return TERRAFORM_BIN
log_install_msg(f"Installing terraform {TERRAFORM_VERSION}")
system = platform.system().lower()
arch = get_arch()
url = TERRAFORM_URL_TEMPLATE.format(version=TERRAFORM_VERSION, os=system, arch=arch)
download_and_extract(url, os.path.dirname(TERRAFORM_BIN))
chmod_r(TERRAFORM_BIN, 0o777)
return TERRAFORM_BIN
def get_terraform_binary() -> str:
if not os.path.isfile(TERRAFORM_BIN):
install_terraform()
return TERRAFORM_BIN
def install_component(name):
installer = installers.get(name)
if installer:
installer()
def install_components(names):
parallelize(install_component, names)
install_lambda_java_libs()
def install_all_components():
# install dependencies - make sure that install_components(..) is called before hooks.install below!
install_components(DEFAULT_SERVICE_PORTS.keys())
hooks.install.run()
def install_debugpy_and_dependencies():
try:
import debugpy
assert debugpy
logging.debug("Debugpy module already Installed")
except ModuleNotFoundError:
logging.debug("Installing Debugpy module")
import pip
if hasattr(pip, "main"):
pip.main(["install", DEBUGPY_MODULE])
else:
pip._internal.main(["install", DEBUGPY_MODULE])
# -----------------
# HELPER FUNCTIONS
# -----------------
def log_install_msg(component, verbatim=False):
component = component if verbatim else "local %s server" % component
LOG.info("Downloading and installing %s. This may take some time.", component)
def download_and_extract(archive_url, target_dir, retries=0, sleep=3, tmp_archive=None):
mkdir(target_dir)
if tmp_archive:
_, ext = os.path.splitext(tmp_archive)
else:
_, ext = os.path.splitext(archive_url)
tmp_archive = tmp_archive or new_tmp_file()
if not os.path.exists(tmp_archive) or os.path.getsize(tmp_archive) <= 0:
# create temporary placeholder file, to avoid duplicate parallel downloads
save_file(tmp_archive, "")
for i in range(retries + 1):
try:
download(archive_url, tmp_archive)
break
except Exception:
time.sleep(sleep)
if ext == ".zip":
unzip(tmp_archive, target_dir)
elif ext == ".gz" or ext == ".bz2":
untar(tmp_archive, target_dir)
else:
raise Exception("Unsupported archive format: %s" % ext)
def download_and_extract_with_retry(archive_url, tmp_archive, target_dir):
try:
download_and_extract(archive_url, target_dir, tmp_archive=tmp_archive)
except Exception as e:
# try deleting and re-downloading the zip file
LOG.info("Unable to extract file, re-downloading ZIP archive %s: %s", tmp_archive, e)
rm_rf(tmp_archive)
download_and_extract(archive_url, target_dir, tmp_archive=tmp_archive)
# kept here for backwards compatibility (installed on "make init" - TODO should be removed)
installers = {
"cloudformation": install_cloudformation_libs,
"dynamodb": install_dynamodb_local,
"kinesis": install_kinesis,
"kms": install_local_kms,
"sqs": install_sqs_provider,
"stepfunctions": install_stepfunctions_local,
}
Installer = Tuple[str, Callable]
class InstallerRepository(Plugin):
namespace = "localstack.installer"
def get_installer(self) -> List[Installer]:
raise NotImplementedError
class CommunityInstallerRepository(InstallerRepository):
name = "community"
def get_installer(self) -> List[Installer]:
return [
("awslamba-go-runtime", install_go_lambda_runtime),
("cloudformation-libs", install_cloudformation_libs),
("dynamodb-local", install_dynamodb_local),
("elasticmq", install_elasticmq),
("elasticsearch", install_elasticsearch),
("kinesalite", install_kinesalite),
("kinesis-client-libs", install_amazon_kinesis_client_libs),
("kinesis-mock", install_kinesis_mock),
("lambda-java-libs", install_lambda_java_libs),
("local-kms", install_local_kms),
("stepfunctions-local", install_stepfunctions_local),
("terraform", install_terraform),
]
class InstallerManager:
def __init__(self):
self.repositories: PluginManager[InstallerRepository] = PluginManager(
InstallerRepository.namespace
)
@functools.lru_cache()
def get_installers(self) -> Dict[str, Callable]:
installer: List[Installer] = []
for repo in self.repositories.load_all():
installer.extend(repo.get_installer())
return dict(installer)
def install(self, package: str, *args, **kwargs):
installer = self.get_installers().get(package)
if not installer:
raise ValueError("no installer for package %s" % package)
return installer(*args, **kwargs)
def main():
if len(sys.argv) > 1:
# set test API key so pro install hooks are called
os.environ["LOCALSTACK_API_KEY"] = os.environ.get("LOCALSTACK_API_KEY") or "test"
if sys.argv[1] == "libs":
print("Initializing installation.")
logging.basicConfig(level=logging.INFO)
logging.getLogger("requests").setLevel(logging.WARNING)
install_all_components()
if sys.argv[1] in ("libs", "testlibs"):
# Install additional libraries for testing
install_amazon_kinesis_client_libs()
install_lambda_java_testlibs()
print("Done.")
if __name__ == "__main__":
main()
| 36.251108 | 164 | 0.691223 |
import functools
import glob
import logging
import os
import platform
import re
import shutil
import stat
import sys
import tempfile
import time
from pathlib import Path
from typing import Callable, Dict, List, Tuple
import requests
from plugin import Plugin, PluginManager
from localstack import config
from localstack.config import dirs, is_env_true
from localstack.constants import (
DEFAULT_SERVICE_PORTS,
DYNAMODB_JAR_URL,
ELASTICMQ_JAR_URL,
ELASTICSEARCH_DEFAULT_VERSION,
ELASTICSEARCH_DELETE_MODULES,
ELASTICSEARCH_PLUGIN_LIST,
KMS_URL_PATTERN,
LOCALSTACK_MAVEN_VERSION,
MODULE_MAIN_PATH,
STS_JAR_URL,
)
from localstack.runtime import hooks
from localstack.utils.common import (
chmod_r,
download,
file_exists_not_empty,
get_arch,
get_os,
is_windows,
load_file,
mkdir,
new_tmp_file,
parallelize,
retry,
rm_rf,
run,
safe_run,
save_file,
untar,
unzip,
)
from localstack.utils.docker_utils import DOCKER_CLIENT
LOG = logging.getLogger(__name__)
INSTALL_DIR_NPM = "%s/node_modules" % MODULE_MAIN_PATH
INSTALL_DIR_DDB = "%s/dynamodb" % dirs.static_libs
INSTALL_DIR_KCL = "%s/amazon-kinesis-client" % dirs.static_libs
INSTALL_DIR_STEPFUNCTIONS = "%s/stepfunctions" % dirs.static_libs
INSTALL_DIR_KMS = "%s/kms" % dirs.static_libs
INSTALL_DIR_ELASTICMQ = "%s/elasticmq" % dirs.static_libs
INSTALL_PATH_LOCALSTACK_FAT_JAR = "%s/localstack-utils-fat.jar" % dirs.static_libs
INSTALL_PATH_DDB_JAR = os.path.join(INSTALL_DIR_DDB, "DynamoDBLocal.jar")
INSTALL_PATH_KCL_JAR = os.path.join(INSTALL_DIR_KCL, "aws-java-sdk-sts.jar")
INSTALL_PATH_STEPFUNCTIONS_JAR = os.path.join(INSTALL_DIR_STEPFUNCTIONS, "StepFunctionsLocal.jar")
INSTALL_PATH_KMS_BINARY_PATTERN = os.path.join(INSTALL_DIR_KMS, "local-kms.<arch>.bin")
INSTALL_PATH_ELASTICMQ_JAR = os.path.join(INSTALL_DIR_ELASTICMQ, "elasticmq-server.jar")
INSTALL_PATH_KINESALITE_CLI = os.path.join(INSTALL_DIR_NPM, "kinesalite", "cli.js")
INSTALL_PATH_KINESIS_MOCK = os.path.join(dirs.static_libs, "kinesis-mock")
URL_LOCALSTACK_FAT_JAR = (
"https://repo1.maven.org/maven2/"
+ "cloud/localstack/localstack-utils/{v}/localstack-utils-{v}-fat.jar"
).format(v=LOCALSTACK_MAVEN_VERSION)
MARKER_FILE_LIGHT_VERSION = "%s/.light-version" % dirs.static_libs
IMAGE_NAME_SFN_LOCAL = "amazon/aws-stepfunctions-local"
ARTIFACTS_REPO = "https://github.com/localstack/localstack-artifacts"
SFN_PATCH_URL_PREFIX = (
f"{ARTIFACTS_REPO}/raw/047cc6dcd2e31f5ff3ec52d293c61b875f606958/stepfunctions-local-patch"
)
SFN_PATCH_CLASS1 = "com/amazonaws/stepfunctions/local/runtime/Config.class"
SFN_PATCH_CLASS2 = (
"com/amazonaws/stepfunctions/local/runtime/executors/task/LambdaTaskStateExecutor.class"
)
SFN_PATCH_CLASS_STARTER = "cloud/localstack/StepFunctionsStarter.class"
SFN_PATCH_CLASS_REGION = "cloud/localstack/RegionAspect.class"
SFN_PATCH_FILE_METAINF = "META-INF/aop.xml"
MAVEN_REPO = "https://repo1.maven.org/maven2"
URL_ASPECTJRT = f"{MAVEN_REPO}/org/aspectj/aspectjrt/1.9.7/aspectjrt-1.9.7.jar"
URL_ASPECTJWEAVER = f"{MAVEN_REPO}/org/aspectj/aspectjweaver/1.9.7/aspectjweaver-1.9.7.jar"
JAR_URLS = [URL_ASPECTJRT, URL_ASPECTJWEAVER]
KINESIS_MOCK_VERSION = os.environ.get("KINESIS_MOCK_VERSION") or "0.2.0"
KINESIS_MOCK_RELEASE_URL = (
"https://api.github.com/repos/etspaceman/kinesis-mock/releases/tags/" + KINESIS_MOCK_VERSION
)
DEBUGPY_MODULE = "debugpy"
DEBUGPY_DEPENDENCIES = ["gcc", "python3-dev", "musl-dev"]
JAVAC_TARGET_VERSION = "1.8"
SQS_BACKEND_IMPL = os.environ.get("SQS_PROVIDER") or "moto"
GO_RUNTIME_VERSION = "0.4.0"
GO_RUNTIME_DOWNLOAD_URL_TEMPLATE = "https://github.com/localstack/awslamba-go-runtime/releases/download/v{version}/awslamba-go-runtime-{version}-{os}-{arch}.tar.gz"
GO_INSTALL_FOLDER = os.path.join(config.dirs.var_libs, "awslamba-go-runtime")
GO_LAMBDA_RUNTIME = os.path.join(GO_INSTALL_FOLDER, "aws-lambda-mock")
GO_LAMBDA_MOCKSERVER = os.path.join(GO_INSTALL_FOLDER, "mockserver")
TERRAFORM_VERSION = "0.13.7"
TERRAFORM_URL_TEMPLATE = (
"https://releases.hashicorp.com/terraform/{version}/terraform_{version}_{os}_{arch}.zip"
)
TERRAFORM_BIN = os.path.join(dirs.static_libs, f"terraform-{TERRAFORM_VERSION}", "terraform")
TEST_LAMBDA_JAVA = os.path.join(config.dirs.var_libs, "localstack-utils-tests.jar")
MAVEN_BASE_URL = "https://repo.maven.apache.org/maven2"
TEST_LAMBDA_JAR_URL = "{url}/cloud/localstack/{name}/{version}/{name}-{version}-tests.jar".format(
version=LOCALSTACK_MAVEN_VERSION, url=MAVEN_BASE_URL, name="localstack-utils"
)
def get_elasticsearch_install_version(version: str) -> str:
from localstack.services.es import versions
if config.SKIP_INFRA_DOWNLOADS:
return ELASTICSEARCH_DEFAULT_VERSION
return versions.get_install_version(version)
def get_elasticsearch_install_dir(version: str) -> str:
version = get_elasticsearch_install_version(version)
if version == ELASTICSEARCH_DEFAULT_VERSION and not os.path.exists(MARKER_FILE_LIGHT_VERSION):
install_dir = os.path.join(dirs.static_libs, "elasticsearch")
else:
install_dir = os.path.join(config.dirs.tmp, "elasticsearch", version)
return install_dir
def install_elasticsearch(version=None):
from localstack.services.es import versions
if not version:
version = ELASTICSEARCH_DEFAULT_VERSION
version = get_elasticsearch_install_version(version)
install_dir = get_elasticsearch_install_dir(version)
installed_executable = os.path.join(install_dir, "bin", "elasticsearch")
if not os.path.exists(installed_executable):
log_install_msg("Elasticsearch (%s)" % version)
es_url = versions.get_download_url(version)
install_dir_parent = os.path.dirname(install_dir)
mkdir(install_dir_parent)
tmp_archive = os.path.join(config.dirs.tmp, "localstack.%s" % os.path.basename(es_url))
download_and_extract_with_retry(es_url, tmp_archive, install_dir_parent)
elasticsearch_dir = glob.glob(os.path.join(install_dir_parent, "elasticsearch*"))
if not elasticsearch_dir:
raise Exception("Unable to find Elasticsearch folder in %s" % install_dir_parent)
shutil.move(elasticsearch_dir[0], install_dir)
for dir_name in ("data", "logs", "modules", "plugins", "config/scripts"):
dir_path = os.path.join(install_dir, dir_name)
mkdir(dir_path)
chmod_r(dir_path, 0o777)
for plugin in ELASTICSEARCH_PLUGIN_LIST:
plugin_binary = os.path.join(install_dir, "bin", "elasticsearch-plugin")
plugin_dir = os.path.join(install_dir, "plugins", plugin)
if not os.path.exists(plugin_dir):
LOG.info("Installing Elasticsearch plugin %s", plugin)
def try_install():
safe_run([plugin_binary, "install", "-b", plugin])
download_attempts = 3
try:
retry(try_install, retries=download_attempts - 1, sleep=2)
except Exception:
LOG.warning(
"Unable to download Elasticsearch plugin '%s' after %s attempts",
plugin,
download_attempts,
)
if not os.environ.get("IGNORE_ES_DOWNLOAD_ERRORS"):
raise
# delete some plugins to free up space
for plugin in ELASTICSEARCH_DELETE_MODULES:
module_dir = os.path.join(install_dir, "modules", plugin)
rm_rf(module_dir)
# disable x-pack-ml plugin (not working on Alpine)
xpack_dir = os.path.join(install_dir, "modules", "x-pack-ml", "platform")
rm_rf(xpack_dir)
# patch JVM options file - replace hardcoded heap size settings
jvm_options_file = os.path.join(install_dir, "config", "jvm.options")
if os.path.exists(jvm_options_file):
jvm_options = load_file(jvm_options_file)
jvm_options_replaced = re.sub(
r"(^-Xm[sx][a-zA-Z0-9\.]+$)", r"# \1", jvm_options, flags=re.MULTILINE
)
if jvm_options != jvm_options_replaced:
save_file(jvm_options_file, jvm_options_replaced)
def install_sqs_provider():
if SQS_BACKEND_IMPL == "elasticmq":
install_elasticmq()
def install_elasticmq():
# TODO remove this function if we stop using ElasticMQ entirely
if not os.path.exists(INSTALL_PATH_ELASTICMQ_JAR):
log_install_msg("ElasticMQ")
mkdir(INSTALL_DIR_ELASTICMQ)
# download archive
tmp_archive = os.path.join(config.dirs.tmp, "elasticmq-server.jar")
if not os.path.exists(tmp_archive):
download(ELASTICMQ_JAR_URL, tmp_archive)
shutil.copy(tmp_archive, INSTALL_DIR_ELASTICMQ)
def install_kinesis():
if config.KINESIS_PROVIDER == "kinesalite":
return install_kinesalite()
elif config.KINESIS_PROVIDER == "kinesis-mock":
return install_kinesis_mock()
else:
raise ValueError("unknown kinesis provider %s" % config.KINESIS_PROVIDER)
def install_kinesalite():
if not os.path.exists(INSTALL_PATH_KINESALITE_CLI):
log_install_msg("Kinesis")
run('cd "%s" && npm install' % MODULE_MAIN_PATH)
def install_kinesis_mock():
target_dir = INSTALL_PATH_KINESIS_MOCK
machine = platform.machine().lower()
system = platform.system().lower()
version = platform.version().lower()
is_probably_m1 = system == "darwin" and ("arm64" in version or "arm32" in version)
LOG.debug("getting kinesis-mock for %s %s", system, machine)
if is_env_true("KINESIS_MOCK_FORCE_JAVA"):
# sometimes the static binaries may have problems, and we want to fal back to Java
bin_file = "kinesis-mock.jar"
elif (machine == "x86_64" or machine == "amd64") and not is_probably_m1:
if system == "windows":
bin_file = "kinesis-mock-mostly-static.exe"
elif system == "linux":
bin_file = "kinesis-mock-linux-amd64-static"
elif system == "darwin":
bin_file = "kinesis-mock-macos-amd64-dynamic"
else:
bin_file = "kinesis-mock.jar"
else:
bin_file = "kinesis-mock.jar"
bin_file_path = os.path.join(target_dir, bin_file)
if os.path.exists(bin_file_path):
LOG.debug("kinesis-mock found at %s", bin_file_path)
return bin_file_path
response = requests.get(KINESIS_MOCK_RELEASE_URL)
if not response.ok:
raise ValueError(
"Could not get list of releases from %s: %s" % (KINESIS_MOCK_RELEASE_URL, response.text)
)
github_release = response.json()
download_url = None
for asset in github_release.get("assets", []):
# find the correct binary in the release
if asset["name"] == bin_file:
download_url = asset["browser_download_url"]
break
if download_url is None:
raise ValueError(
"could not find required binary %s in release %s" % (bin_file, KINESIS_MOCK_RELEASE_URL)
)
mkdir(target_dir)
LOG.info("downloading kinesis-mock binary from %s", download_url)
download(download_url, bin_file_path)
chmod_r(bin_file_path, 0o777)
return bin_file_path
def install_local_kms():
local_arch = get_os()
binary_path = INSTALL_PATH_KMS_BINARY_PATTERN.replace("<arch>", local_arch)
if not os.path.exists(binary_path):
log_install_msg("KMS")
mkdir(INSTALL_DIR_KMS)
# TODO ARM download platform specific binary
kms_url = KMS_URL_PATTERN.replace("<arch>", local_arch)
download(kms_url, binary_path)
chmod_r(binary_path, 0o777)
def install_stepfunctions_local():
if not os.path.exists(INSTALL_PATH_STEPFUNCTIONS_JAR):
# pull the JAR file from the Docker image, which is more up-to-date than the downloadable JAR file
# TODO: works only when running on the host, outside of Docker -> add a fallback if running in Docker?
log_install_msg("Step Functions")
mkdir(INSTALL_DIR_STEPFUNCTIONS)
DOCKER_CLIENT.pull_image(IMAGE_NAME_SFN_LOCAL)
docker_name = "tmp-ls-sfn"
DOCKER_CLIENT.run_container(
IMAGE_NAME_SFN_LOCAL,
remove=True,
entrypoint="",
name=docker_name,
detach=True,
command=["sleep", "15"],
)
time.sleep(5)
DOCKER_CLIENT.copy_from_container(
docker_name, local_path=dirs.static_libs, container_path="/home/stepfunctionslocal/"
)
path = Path(f"{dirs.static_libs}/stepfunctionslocal/")
for file in path.glob("*.jar"):
file.rename(Path(INSTALL_DIR_STEPFUNCTIONS) / file.name)
rm_rf("%s/stepfunctionslocal" % dirs.static_libs)
classes = [
SFN_PATCH_CLASS1,
SFN_PATCH_CLASS2,
SFN_PATCH_CLASS_REGION,
SFN_PATCH_CLASS_STARTER,
SFN_PATCH_FILE_METAINF,
]
for patch_class in classes:
patch_url = f"{SFN_PATCH_URL_PREFIX}/{patch_class}"
add_file_to_jar(patch_class, patch_url, target_jar=INSTALL_PATH_STEPFUNCTIONS_JAR)
# special case for Manifest file - extract first, replace content, then update in JAR file
manifest_file = os.path.join(INSTALL_DIR_STEPFUNCTIONS, "META-INF", "MANIFEST.MF")
if not os.path.exists(manifest_file):
content = run(["unzip", "-p", INSTALL_PATH_STEPFUNCTIONS_JAR, "META-INF/MANIFEST.MF"])
content = re.sub(
"Main-Class: .+", "Main-Class: cloud.localstack.StepFunctionsStarter", content
)
classpath = " ".join([os.path.basename(jar) for jar in JAR_URLS])
content = re.sub(r"Class-Path: \. ", f"Class-Path: {classpath} . ", content)
save_file(manifest_file, content)
run(
["zip", INSTALL_PATH_STEPFUNCTIONS_JAR, "META-INF/MANIFEST.MF"],
cwd=INSTALL_DIR_STEPFUNCTIONS,
)
# download additional jar libs
for jar_url in JAR_URLS:
target = os.path.join(INSTALL_DIR_STEPFUNCTIONS, os.path.basename(jar_url))
if not file_exists_not_empty(target):
download(jar_url, target)
def add_file_to_jar(class_file, class_url, target_jar, base_dir=None):
base_dir = base_dir or os.path.dirname(target_jar)
patch_class_file = os.path.join(base_dir, class_file)
if not os.path.exists(patch_class_file):
download(class_url, patch_class_file)
run(["zip", target_jar, class_file], cwd=base_dir)
def install_dynamodb_local():
if not os.path.exists(INSTALL_PATH_DDB_JAR):
log_install_msg("DynamoDB")
# download and extract archive
tmp_archive = os.path.join(tempfile.gettempdir(), "localstack.ddb.zip")
download_and_extract_with_retry(DYNAMODB_JAR_URL, tmp_archive, INSTALL_DIR_DDB)
# fix logging configuration for DynamoDBLocal
log4j2_config = """<Configuration status="WARN">
<Appenders>
<Console name="Console" target="SYSTEM_OUT">
<PatternLayout pattern="%d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"/>
</Console>
</Appenders>
<Loggers>
<Root level="WARN"><AppenderRef ref="Console"/></Root>
</Loggers>
</Configuration>"""
log4j2_file = os.path.join(INSTALL_DIR_DDB, "log4j2.xml")
save_file(log4j2_file, log4j2_config)
run('cd "%s" && zip -u DynamoDBLocal.jar log4j2.xml || true' % INSTALL_DIR_DDB)
def install_amazon_kinesis_client_libs():
# install KCL/STS JAR files
if not os.path.exists(INSTALL_PATH_KCL_JAR):
mkdir(INSTALL_DIR_KCL)
tmp_archive = os.path.join(tempfile.gettempdir(), "aws-java-sdk-sts.jar")
if not os.path.exists(tmp_archive):
download(STS_JAR_URL, tmp_archive)
shutil.copy(tmp_archive, INSTALL_DIR_KCL)
# Compile Java files
from localstack.utils.kinesis import kclipy_helper
classpath = kclipy_helper.get_kcl_classpath()
if is_windows():
classpath = re.sub(r":([^\\])", r";\1", classpath)
java_files = "%s/utils/kinesis/java/cloud/localstack/*.java" % MODULE_MAIN_PATH
class_files = "%s/utils/kinesis/java/cloud/localstack/*.class" % MODULE_MAIN_PATH
if not glob.glob(class_files):
run(
'javac -source %s -target %s -cp "%s" %s'
% (JAVAC_TARGET_VERSION, JAVAC_TARGET_VERSION, classpath, java_files)
)
def install_lambda_java_libs():
# install LocalStack "fat" JAR file (contains all dependencies)
if not os.path.exists(INSTALL_PATH_LOCALSTACK_FAT_JAR):
log_install_msg("LocalStack Java libraries", verbatim=True)
download(URL_LOCALSTACK_FAT_JAR, INSTALL_PATH_LOCALSTACK_FAT_JAR)
def install_lambda_java_testlibs():
# Download the LocalStack Utils Test jar file from the maven repo
if not os.path.exists(TEST_LAMBDA_JAVA):
mkdir(os.path.dirname(TEST_LAMBDA_JAVA))
download(TEST_LAMBDA_JAR_URL, TEST_LAMBDA_JAVA)
def install_go_lambda_runtime():
if os.path.isfile(GO_LAMBDA_RUNTIME):
return
log_install_msg("Installing golang runtime")
system = platform.system().lower()
arch = get_arch()
if system not in ["linux"]:
raise ValueError("unsupported os %s for awslambda-go-runtime" % system)
if arch not in ["amd64", "arm64"]:
raise ValueError("unsupported arch %s for awslambda-go-runtime" % arch)
url = GO_RUNTIME_DOWNLOAD_URL_TEMPLATE.format(
version=GO_RUNTIME_VERSION,
os=system,
arch=arch,
)
download_and_extract(url, GO_INSTALL_FOLDER)
st = os.stat(GO_LAMBDA_RUNTIME)
os.chmod(GO_LAMBDA_RUNTIME, st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
st = os.stat(GO_LAMBDA_MOCKSERVER)
os.chmod(GO_LAMBDA_MOCKSERVER, st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
def install_cloudformation_libs():
from localstack.services.cloudformation import deployment_utils
# trigger download of CF module file
deployment_utils.get_cfn_response_mod_file()
def install_terraform() -> str:
if os.path.isfile(TERRAFORM_BIN):
return TERRAFORM_BIN
log_install_msg(f"Installing terraform {TERRAFORM_VERSION}")
system = platform.system().lower()
arch = get_arch()
url = TERRAFORM_URL_TEMPLATE.format(version=TERRAFORM_VERSION, os=system, arch=arch)
download_and_extract(url, os.path.dirname(TERRAFORM_BIN))
chmod_r(TERRAFORM_BIN, 0o777)
return TERRAFORM_BIN
def get_terraform_binary() -> str:
if not os.path.isfile(TERRAFORM_BIN):
install_terraform()
return TERRAFORM_BIN
def install_component(name):
installer = installers.get(name)
if installer:
installer()
def install_components(names):
parallelize(install_component, names)
install_lambda_java_libs()
def install_all_components():
# install dependencies - make sure that install_components(..) is called before hooks.install below!
install_components(DEFAULT_SERVICE_PORTS.keys())
hooks.install.run()
def install_debugpy_and_dependencies():
try:
import debugpy
assert debugpy
logging.debug("Debugpy module already Installed")
except ModuleNotFoundError:
logging.debug("Installing Debugpy module")
import pip
if hasattr(pip, "main"):
pip.main(["install", DEBUGPY_MODULE])
else:
pip._internal.main(["install", DEBUGPY_MODULE])
# -----------------
# HELPER FUNCTIONS
# -----------------
def log_install_msg(component, verbatim=False):
component = component if verbatim else "local %s server" % component
LOG.info("Downloading and installing %s. This may take some time.", component)
def download_and_extract(archive_url, target_dir, retries=0, sleep=3, tmp_archive=None):
mkdir(target_dir)
if tmp_archive:
_, ext = os.path.splitext(tmp_archive)
else:
_, ext = os.path.splitext(archive_url)
tmp_archive = tmp_archive or new_tmp_file()
if not os.path.exists(tmp_archive) or os.path.getsize(tmp_archive) <= 0:
# create temporary placeholder file, to avoid duplicate parallel downloads
save_file(tmp_archive, "")
for i in range(retries + 1):
try:
download(archive_url, tmp_archive)
break
except Exception:
time.sleep(sleep)
if ext == ".zip":
unzip(tmp_archive, target_dir)
elif ext == ".gz" or ext == ".bz2":
untar(tmp_archive, target_dir)
else:
raise Exception("Unsupported archive format: %s" % ext)
def download_and_extract_with_retry(archive_url, tmp_archive, target_dir):
try:
download_and_extract(archive_url, target_dir, tmp_archive=tmp_archive)
except Exception as e:
# try deleting and re-downloading the zip file
LOG.info("Unable to extract file, re-downloading ZIP archive %s: %s", tmp_archive, e)
rm_rf(tmp_archive)
download_and_extract(archive_url, target_dir, tmp_archive=tmp_archive)
# kept here for backwards compatibility (installed on "make init" - TODO should be removed)
installers = {
"cloudformation": install_cloudformation_libs,
"dynamodb": install_dynamodb_local,
"kinesis": install_kinesis,
"kms": install_local_kms,
"sqs": install_sqs_provider,
"stepfunctions": install_stepfunctions_local,
}
Installer = Tuple[str, Callable]
class InstallerRepository(Plugin):
namespace = "localstack.installer"
def get_installer(self) -> List[Installer]:
raise NotImplementedError
class CommunityInstallerRepository(InstallerRepository):
name = "community"
def get_installer(self) -> List[Installer]:
return [
("awslamba-go-runtime", install_go_lambda_runtime),
("cloudformation-libs", install_cloudformation_libs),
("dynamodb-local", install_dynamodb_local),
("elasticmq", install_elasticmq),
("elasticsearch", install_elasticsearch),
("kinesalite", install_kinesalite),
("kinesis-client-libs", install_amazon_kinesis_client_libs),
("kinesis-mock", install_kinesis_mock),
("lambda-java-libs", install_lambda_java_libs),
("local-kms", install_local_kms),
("stepfunctions-local", install_stepfunctions_local),
("terraform", install_terraform),
]
class InstallerManager:
def __init__(self):
self.repositories: PluginManager[InstallerRepository] = PluginManager(
InstallerRepository.namespace
)
@functools.lru_cache()
def get_installers(self) -> Dict[str, Callable]:
installer: List[Installer] = []
for repo in self.repositories.load_all():
installer.extend(repo.get_installer())
return dict(installer)
def install(self, package: str, *args, **kwargs):
installer = self.get_installers().get(package)
if not installer:
raise ValueError("no installer for package %s" % package)
return installer(*args, **kwargs)
def main():
if len(sys.argv) > 1:
# set test API key so pro install hooks are called
os.environ["LOCALSTACK_API_KEY"] = os.environ.get("LOCALSTACK_API_KEY") or "test"
if sys.argv[1] == "libs":
print("Initializing installation.")
logging.basicConfig(level=logging.INFO)
logging.getLogger("requests").setLevel(logging.WARNING)
install_all_components()
if sys.argv[1] in ("libs", "testlibs"):
# Install additional libraries for testing
install_amazon_kinesis_client_libs()
install_lambda_java_testlibs()
print("Done.")
if __name__ == "__main__":
main()
| true | true |
f713a54b6e20ca35a6b59abf3d1037510ba24f26 | 4,880 | py | Python | script_projet/MainAnnotator.py | JyLIU-emma/Complexit-_recettes | 9ae6db442cbf3f10460a5e1b6e2d7d33fdf8f8f3 | [
"CC0-1.0"
] | null | null | null | script_projet/MainAnnotator.py | JyLIU-emma/Complexit-_recettes | 9ae6db442cbf3f10460a5e1b6e2d7d33fdf8f8f3 | [
"CC0-1.0"
] | null | null | null | script_projet/MainAnnotator.py | JyLIU-emma/Complexit-_recettes | 9ae6db442cbf3f10460a5e1b6e2d7d33fdf8f8f3 | [
"CC0-1.0"
] | null | null | null | """
le script principale sert à annoter un répertoire de fichiers xml de recettes
"""
import glob
import re
import os
from oper_utils import xml_to_recipe_annotated
from Ner_classifieur_annote import load_crf_model, predict_text, transform_to_xml_annote
from NER_ingredient_detector import get_content_from_xmlfile
from ComplexCalculator import ComplexCalculator
modelpath = "../ml_models/model-20210515.pkl"
ner_clf = load_crf_model(modelpath)
def annote_with_crf(filename, ner_clf):
"""
Annoter le fichier avec CRF, renvoie une string de recette avec annotation
"""
ingredients, text_recette = get_content_from_xmlfile(filename)
liste = predict_text(text_recette,ner_clf)
text_after = transform_to_xml_annote(liste)
return text_after
def transform_doc_to_xml(doc):
text_after = []
for token in doc:
if token.ent_iob_ == "O":
text_after.append(token.text)
elif token.ent_iob_ == "B" and token.i == doc[-1].i:
text_after.append(f'<{token.ent_type_} id="{token.ent_kb_id_ + token.ent_id_}">' + token.text + f"</{token.ent_type_}>")
elif token.ent_iob_ == "B" and doc[token.i+1].ent_iob_ == "I":
text_after.append(f'<{token.ent_type_} id="{token.ent_kb_id_ + token.ent_id_}">' + token.text)
elif token.ent_iob_ == "B" and doc[token.i+1].ent_iob_ != "I":
text_after.append(f'<{token.ent_type_} id="{token.ent_kb_id_ + token.ent_id_}">' + token.text + f"</{token.ent_type_}>")
elif token.ent_iob_ == "I" and token.i == doc[-1].i:
text_after.append(token.text + f"</{token.ent_type_}>")
elif token.ent_iob_ == "I" and doc[token.i+1].ent_iob_ == "I":
text_after.append(token.text)
elif token.ent_iob_ == "I" and doc[token.i+1].ent_iob_ != "I":
text_after.append(token.text + f"</{token.ent_type_}>")
text_after = " ".join(text_after)
text_after = re.sub("' ", "'", text_after)
text_after = re.sub(r" (,|\.)", "\\1", text_after)
return text_after
def parcours_corpus_annote(corpus_path, output_dir, liste=False):
if not liste:
fics = glob.glob(f"{corpus_path}\*.xml")
# fics = glob.glob(f"{corpus_path}{os.sep}*.xml")
else:
fics = corpus_path
for fic in fics:
try:
fic_name = fic.split(f'{os.sep}')[-1]
recette_annote_crf = annote_with_crf(fic, ner_clf)
recette_doc_spacy, dico_ingreds, dico_opers = xml_to_recipe_annotated(fic)
recette_annote_rules = transform_doc_to_xml(recette_doc_spacy)
calculator = ComplexCalculator(dico_ingreds, dico_opers)
complex_temps = calculator.get_O_temps()
complex_espace = calculator.O_espace_f()
ingreds = dico_ingreds_to_xml(dico_ingreds)
opers = dico_opers_to_xml(dico_opers)
## add to xmlfile
with open(fic,encoding="utf8") as f:
xml_text = f.read()
recette_xml_rules = '\n <annotation methode="symbolique">\n '+ recette_annote_rules + '\n </annotation>'
recette_xml_crf = '\n <annotation methode="crf">\n '+ recette_annote_crf + '\n </annotation>'
complexite_t = '\n <complexite>\n <temps>' + complex_temps + '</temps>\n <complexite>'
complexite_e = '\n <complexite>\n <espace>' + complex_espace + '</espace>\n <complexite>'
xml_text = re.sub("(</preparation>)", "\\1" + recette_xml_rules + recette_xml_crf + complexite_t + complexite_e + ingreds + opers, xml_text)
with open(output_dir + os.sep + fic_name, "w", encoding="utf8") as f:
f.write(xml_text)
except Exception:
print(f"Rencontrer problème pour: {fic}")
def dico_ingreds_to_xml(dico_ingreds):
liste = []
for ingred in dico_ingreds.values():
formate = f'ingredient:{ingred["ingredient"]}\t id:{ingred["id"]}\t quantité:{ingred["quantite"]}\t unité:{ingred["unit"]}\t denombrable:{ingred["denombrable"]}\t recipient:{ingred["recipient"]}\n'
liste.append(formate)
liste = "".join(liste)
liste = "\n<ingredients_trouve>\n<![CDATA[\n" + liste + "]]>\n</ingredients_trouve>"
return liste
def dico_opers_to_xml(dico_opers):
liste = []
for oper_id,oper in dico_opers.items():
formate = f'operation:{oper["action"]}\t id:{oper_id}\t ingrédients_ralatifs:{oper["ingreds"]}\t nombre_opération_atomique:{oper["nb_oper"]}\t temps:{oper["temps"]}\t recipient:{oper["recipient"]}\n'
liste.append(formate)
liste = "".join(liste)
liste = "\n<operation_trouve>\n<![CDATA[\n" + liste + "]]>\n</operation_trouve>"
return liste
if __name__ == "__main__":
corpus_path = "../corpus_recettes/corpus_for_final"
output = "../corpus_recettes/out_put"
parcours_corpus_annote(corpus_path, output) | 43.963964 | 207 | 0.647541 |
import glob
import re
import os
from oper_utils import xml_to_recipe_annotated
from Ner_classifieur_annote import load_crf_model, predict_text, transform_to_xml_annote
from NER_ingredient_detector import get_content_from_xmlfile
from ComplexCalculator import ComplexCalculator
modelpath = "../ml_models/model-20210515.pkl"
ner_clf = load_crf_model(modelpath)
def annote_with_crf(filename, ner_clf):
ingredients, text_recette = get_content_from_xmlfile(filename)
liste = predict_text(text_recette,ner_clf)
text_after = transform_to_xml_annote(liste)
return text_after
def transform_doc_to_xml(doc):
text_after = []
for token in doc:
if token.ent_iob_ == "O":
text_after.append(token.text)
elif token.ent_iob_ == "B" and token.i == doc[-1].i:
text_after.append(f'<{token.ent_type_} id="{token.ent_kb_id_ + token.ent_id_}">' + token.text + f"</{token.ent_type_}>")
elif token.ent_iob_ == "B" and doc[token.i+1].ent_iob_ == "I":
text_after.append(f'<{token.ent_type_} id="{token.ent_kb_id_ + token.ent_id_}">' + token.text)
elif token.ent_iob_ == "B" and doc[token.i+1].ent_iob_ != "I":
text_after.append(f'<{token.ent_type_} id="{token.ent_kb_id_ + token.ent_id_}">' + token.text + f"</{token.ent_type_}>")
elif token.ent_iob_ == "I" and token.i == doc[-1].i:
text_after.append(token.text + f"</{token.ent_type_}>")
elif token.ent_iob_ == "I" and doc[token.i+1].ent_iob_ == "I":
text_after.append(token.text)
elif token.ent_iob_ == "I" and doc[token.i+1].ent_iob_ != "I":
text_after.append(token.text + f"</{token.ent_type_}>")
text_after = " ".join(text_after)
text_after = re.sub("' ", "'", text_after)
text_after = re.sub(r" (,|\.)", "\\1", text_after)
return text_after
def parcours_corpus_annote(corpus_path, output_dir, liste=False):
if not liste:
fics = glob.glob(f"{corpus_path}\*.xml")
else:
fics = corpus_path
for fic in fics:
try:
fic_name = fic.split(f'{os.sep}')[-1]
recette_annote_crf = annote_with_crf(fic, ner_clf)
recette_doc_spacy, dico_ingreds, dico_opers = xml_to_recipe_annotated(fic)
recette_annote_rules = transform_doc_to_xml(recette_doc_spacy)
calculator = ComplexCalculator(dico_ingreds, dico_opers)
complex_temps = calculator.get_O_temps()
complex_espace = calculator.O_espace_f()
ingreds = dico_ingreds_to_xml(dico_ingreds)
opers = dico_opers_to_xml(dico_opers)
h open(fic,encoding="utf8") as f:
xml_text = f.read()
recette_xml_rules = '\n <annotation methode="symbolique">\n '+ recette_annote_rules + '\n </annotation>'
recette_xml_crf = '\n <annotation methode="crf">\n '+ recette_annote_crf + '\n </annotation>'
complexite_t = '\n <complexite>\n <temps>' + complex_temps + '</temps>\n <complexite>'
complexite_e = '\n <complexite>\n <espace>' + complex_espace + '</espace>\n <complexite>'
xml_text = re.sub("(</preparation>)", "\\1" + recette_xml_rules + recette_xml_crf + complexite_t + complexite_e + ingreds + opers, xml_text)
with open(output_dir + os.sep + fic_name, "w", encoding="utf8") as f:
f.write(xml_text)
except Exception:
print(f"Rencontrer problème pour: {fic}")
def dico_ingreds_to_xml(dico_ingreds):
liste = []
for ingred in dico_ingreds.values():
formate = f'ingredient:{ingred["ingredient"]}\t id:{ingred["id"]}\t quantité:{ingred["quantite"]}\t unité:{ingred["unit"]}\t denombrable:{ingred["denombrable"]}\t recipient:{ingred["recipient"]}\n'
liste.append(formate)
liste = "".join(liste)
liste = "\n<ingredients_trouve>\n<![CDATA[\n" + liste + "]]>\n</ingredients_trouve>"
return liste
def dico_opers_to_xml(dico_opers):
liste = []
for oper_id,oper in dico_opers.items():
formate = f'operation:{oper["action"]}\t id:{oper_id}\t ingrédients_ralatifs:{oper["ingreds"]}\t nombre_opération_atomique:{oper["nb_oper"]}\t temps:{oper["temps"]}\t recipient:{oper["recipient"]}\n'
liste.append(formate)
liste = "".join(liste)
liste = "\n<operation_trouve>\n<![CDATA[\n" + liste + "]]>\n</operation_trouve>"
return liste
if __name__ == "__main__":
corpus_path = "../corpus_recettes/corpus_for_final"
output = "../corpus_recettes/out_put"
parcours_corpus_annote(corpus_path, output) | true | true |
f713a559516860b1f3ff1a93a7e37ed2fca2b47d | 526 | py | Python | lognotify/setup.py | shashank-ssriva/lognotify | 800cb76c55d01d8fafde214486059794846887df | [
"MIT"
] | null | null | null | lognotify/setup.py | shashank-ssriva/lognotify | 800cb76c55d01d8fafde214486059794846887df | [
"MIT"
] | null | null | null | lognotify/setup.py | shashank-ssriva/lognotify | 800cb76c55d01d8fafde214486059794846887df | [
"MIT"
] | null | null | null | from setuptools import setup
setup(name='lognotify',
version='0.1',
py_modules = ['lognotify'],
description='A real-time log monitoring & notification utility which pops up a notification (while running your application) whenever it sees an error in log-file.',
url='http://github.com/shashank-ssriva',
author='Shashank Srivastava',
license='MIT',
entry_points={
'console_scripts':[
'lognotify = lognotify.app:main'
]
},
zip_safe=False)
| 32.875 | 171 | 0.63308 | from setuptools import setup
setup(name='lognotify',
version='0.1',
py_modules = ['lognotify'],
description='A real-time log monitoring & notification utility which pops up a notification (while running your application) whenever it sees an error in log-file.',
url='http://github.com/shashank-ssriva',
author='Shashank Srivastava',
license='MIT',
entry_points={
'console_scripts':[
'lognotify = lognotify.app:main'
]
},
zip_safe=False)
| true | true |
f713a65884e06087a7d7534a6c0f82513c734815 | 1,428 | py | Python | 60824855-centroid-arcLength-approxPolyDP/centroid_room.py | nathancy/stackoverflow | e9e2e2b8fba61e41526638a13ac7ada6de2d7560 | [
"MIT"
] | 3 | 2019-09-18T10:45:20.000Z | 2021-09-18T08:36:49.000Z | 60824855-centroid-arcLength-approxPolyDP/centroid_room.py | nathancy/stackoverflow | e9e2e2b8fba61e41526638a13ac7ada6de2d7560 | [
"MIT"
] | 1 | 2020-03-19T15:49:31.000Z | 2020-03-30T14:54:03.000Z | 60824855-centroid-arcLength-approxPolyDP/centroid_room.py | nathancy/stackoverflow | e9e2e2b8fba61e41526638a13ac7ada6de2d7560 | [
"MIT"
] | 1 | 2021-04-08T19:30:42.000Z | 2021-04-08T19:30:42.000Z | import cv2
import numpy as np
# Load image, grayscale, Otsu's threshold
image = cv2.imread('1.png')
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
thresh = cv2.threshold(gray, 0, 255, cv2.THRESH_BINARY_INV + cv2.THRESH_OTSU)[1]
# Remove text
cnts = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
cnts = cnts[0] if len(cnts) == 2 else cnts[1]
for c in cnts:
area = cv2.contourArea(c)
if area < 1000:
cv2.drawContours(thresh, [c], -1, 0, -1)
thresh = 255 - thresh
result = cv2.cvtColor(thresh, cv2.COLOR_GRAY2BGR)
coordinates = []
# Find rectangular boxes and obtain centroid coordinates
cnts = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
cnts = cnts[0] if len(cnts) == 2 else cnts[1]
for c in cnts:
area = cv2.contourArea(c)
peri = cv2.arcLength(c, True)
approx = cv2.approxPolyDP(c, 0.05 * peri, True)
if len(approx) == 4 and area < 100000:
# cv2.drawContours(result, [c], -1, (36,255,12), 1)
M = cv2.moments(c)
cx = int(M['m10']/M['m00'])
cy = int(M['m01']/M['m00'])
coordinates.append((cx, cy))
cv2.circle(result, (cx, cy), 3, (36,255,12), -1)
cv2.putText(result, '({}, {})'.format(int(cx), int(cy)), (int(cx) -40, int(cy) -10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (36,255,12), 2)
print(coordinates)
cv2.imshow('thresh', thresh)
cv2.imshow('image', image)
cv2.imshow('result', result)
cv2.waitKey()
| 34 | 139 | 0.648459 | import cv2
import numpy as np
image = cv2.imread('1.png')
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
thresh = cv2.threshold(gray, 0, 255, cv2.THRESH_BINARY_INV + cv2.THRESH_OTSU)[1]
# Remove text
cnts = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
cnts = cnts[0] if len(cnts) == 2 else cnts[1]
for c in cnts:
area = cv2.contourArea(c)
if area < 1000:
cv2.drawContours(thresh, [c], -1, 0, -1)
thresh = 255 - thresh
result = cv2.cvtColor(thresh, cv2.COLOR_GRAY2BGR)
coordinates = []
# Find rectangular boxes and obtain centroid coordinates
cnts = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
cnts = cnts[0] if len(cnts) == 2 else cnts[1]
for c in cnts:
area = cv2.contourArea(c)
peri = cv2.arcLength(c, True)
approx = cv2.approxPolyDP(c, 0.05 * peri, True)
if len(approx) == 4 and area < 100000:
# cv2.drawContours(result, [c], -1, (36,255,12), 1)
M = cv2.moments(c)
cx = int(M['m10']/M['m00'])
cy = int(M['m01']/M['m00'])
coordinates.append((cx, cy))
cv2.circle(result, (cx, cy), 3, (36,255,12), -1)
cv2.putText(result, '({}, {})'.format(int(cx), int(cy)), (int(cx) -40, int(cy) -10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (36,255,12), 2)
print(coordinates)
cv2.imshow('thresh', thresh)
cv2.imshow('image', image)
cv2.imshow('result', result)
cv2.waitKey()
| true | true |
f713a67c0c5b77aacca20f819144495e8969b5c4 | 45,738 | py | Python | conans/client/conan_api.py | kolrami/conan | 93e4191e970d7ea464901c62d680d76517bb76ca | [
"MIT"
] | null | null | null | conans/client/conan_api.py | kolrami/conan | 93e4191e970d7ea464901c62d680d76517bb76ca | [
"MIT"
] | null | null | null | conans/client/conan_api.py | kolrami/conan | 93e4191e970d7ea464901c62d680d76517bb76ca | [
"MIT"
] | null | null | null | import os
import sys
import requests
from collections import OrderedDict
import conans
from conans import __version__ as client_version
from conans.client.cmd.create import create
from conans.client.hook_manager import HookManager
from conans.client.recorder.action_recorder import ActionRecorder
from conans.client.client_cache import ClientCache
from conans.client.conf import MIN_SERVER_COMPATIBLE_VERSION, ConanClientConfigParser
from conans.client.manager import ConanManager
from conans.client.migrations import ClientMigrator
from conans.client.output import ConanOutput, ScopedOutput
from conans.client.profile_loader import read_profile, profile_from_args, \
read_conaninfo_profile
from conans.client.recorder.search_recorder import SearchRecorder
from conans.client.recorder.upload_recoder import UploadRecorder
from conans.client.remote_manager import RemoteManager
from conans.client.remote_registry import RemoteRegistry
from conans.client.rest.auth_manager import ConanApiAuthManager
from conans.client.rest.rest_client import RestApiClient
from conans.client.rest.conan_requester import ConanRequester
from conans.client.rest.version_checker import VersionCheckerRequester
from conans.client.runner import ConanRunner
from conans.client.store.localdb import LocalDB
from conans.client.cmd.test import PackageTester
from conans.client.userio import UserIO
from conans.errors import ConanException
from conans.model.ref import ConanFileReference, PackageReference, check_valid_ref
from conans.model.version import Version
from conans.paths import get_conan_user_home, CONANINFO, BUILD_INFO
from conans.util.env_reader import get_env
from conans.util.files import save_files, exception_message_safe, mkdir
from conans.util.log import configure_logger
from conans.util.tracer import log_command, log_exception
from conans.tools import set_global_instances
from conans.client.cmd.uploader import CmdUpload
from conans.client.cmd.profile import cmd_profile_update, cmd_profile_get,\
cmd_profile_delete_key, cmd_profile_create, cmd_profile_list
from conans.client.cmd.search import Search
from conans.client.cmd.user import users_clean, users_list, user_set
from conans.client.importer import undo_imports, run_imports
from conans.client.cmd.export import cmd_export, export_alias, export_source, export_recipe
from conans.unicode import get_cwd
from conans.client.remover import ConanRemover
from conans.client.cmd.download import download
from conans.model.workspace import Workspace
from conans.client.graph.graph_manager import GraphManager
from conans.client.loader import ConanFileLoader
from conans.client.graph.proxy import ConanProxy
from conans.client.graph.python_requires import ConanPythonRequire
from conans.client.graph.range_resolver import RangeResolver
from conans.client import packager
from conans.client.source import config_source_local
from conans.client.cmd.build import build
from conans.client.cmd.export_pkg import export_pkg
from conans.client import tools
default_manifest_folder = '.conan_manifests'
def get_request_timeout():
timeout = os.getenv("CONAN_REQUEST_TIMEOUT")
try:
return float(timeout) if timeout is not None else None
except ValueError:
raise ConanException("Specify a numeric parameter for 'request_timeout'")
def get_basic_requester(client_cache):
requester = requests.Session()
# Manage the verify and the client certificates and setup proxies
return ConanRequester(requester, client_cache, get_request_timeout())
def api_method(f):
def wrapper(*args, **kwargs):
the_self = args[0]
try:
curdir = get_cwd()
log_command(f.__name__, kwargs)
with tools.environment_append(the_self._client_cache.conan_config.env_vars):
# Patch the globals in tools
return f(*args, **kwargs)
except Exception as exc:
msg = exception_message_safe(exc)
try:
log_exception(exc, msg)
except BaseException:
pass
raise
finally:
os.chdir(curdir)
return wrapper
def _make_abs_path(path, cwd=None, default=None):
"""convert 'path' to absolute if necessary (could be already absolute)
if not defined (empty, or None), will return 'default' one or 'cwd'
"""
cwd = cwd or get_cwd()
if not path:
abs_path = default or cwd
elif os.path.isabs(path):
abs_path = path
else:
abs_path = os.path.normpath(os.path.join(cwd, path))
return abs_path
def _get_conanfile_path(path, cwd, py):
"""
param py= True: Must be .py, False: Must be .txt, None: Try .py, then .txt
"""
candidate_paths = list()
path = _make_abs_path(path, cwd)
if os.path.isdir(path): # Can be a folder
if py:
path = os.path.join(path, "conanfile.py")
candidate_paths.append(path)
elif py is False:
path = os.path.join(path, "conanfile.txt")
candidate_paths.append(path)
else:
path_py = os.path.join(path, "conanfile.py")
candidate_paths.append(path_py)
if os.path.exists(path_py):
path = path_py
else:
path = os.path.join(path, "conanfile.txt")
candidate_paths.append(path)
else:
candidate_paths.append(path)
if not os.path.isfile(path): # Must exist
raise ConanException("Conanfile not found at %s" % " or ".join(candidate_paths))
if py and not path.endswith(".py"):
raise ConanException("A conanfile.py is needed, " + path + " is not acceptable")
return path
class ConanAPIV1(object):
@staticmethod
def instance_remote_manager(requester, client_cache, user_io, _client_version,
min_server_compatible_version, hook_manager):
# Verify client version against remotes
version_checker_req = VersionCheckerRequester(requester, _client_version,
min_server_compatible_version,
user_io.out)
# To handle remote connections
put_headers = client_cache.read_put_headers()
rest_api_client = RestApiClient(user_io.out, requester=version_checker_req,
put_headers=put_headers)
# To store user and token
localdb = LocalDB(client_cache.localdb)
# Wraps RestApiClient to add authentication support (same interface)
auth_manager = ConanApiAuthManager(rest_api_client, user_io, localdb)
# Handle remote connections
remote_manager = RemoteManager(client_cache, auth_manager, user_io.out, hook_manager)
return localdb, rest_api_client, remote_manager
@staticmethod
def factory(interactive=None):
"""Factory"""
# Respect color env setting or check tty if unset
color_set = "CONAN_COLOR_DISPLAY" in os.environ
if ((color_set and get_env("CONAN_COLOR_DISPLAY", 1))
or (not color_set
and hasattr(sys.stdout, "isatty")
and sys.stdout.isatty())):
import colorama
if get_env("PYCHARM_HOSTED"): # in PyCharm disable convert/strip
colorama.init(convert=False, strip=False)
else:
colorama.init()
color = True
else:
color = False
out = ConanOutput(sys.stdout, color)
user_io = UserIO(out=out)
try:
user_home = get_conan_user_home()
client_cache = migrate_and_get_client_cache(user_home, out)
sys.path.append(os.path.join(user_home, "python"))
except Exception as e:
out.error(str(e))
raise
with tools.environment_append(client_cache.conan_config.env_vars):
# Adjust CONAN_LOGGING_LEVEL with the env readed
conans.util.log.logger = configure_logger()
# Create Hook Manager
hook_manager = HookManager(client_cache.hooks_path, get_env("CONAN_HOOKS", list()),
user_io.out)
# Get the new command instance after migrations have been done
requester = get_basic_requester(client_cache)
_, _, remote_manager = ConanAPIV1.instance_remote_manager(
requester,
client_cache, user_io,
Version(client_version),
Version(MIN_SERVER_COMPATIBLE_VERSION),
hook_manager)
# Adjust global tool variables
set_global_instances(out, requester)
# Settings preprocessor
if interactive is None:
interactive = not get_env("CONAN_NON_INTERACTIVE", False)
conan = ConanAPIV1(client_cache, user_io, get_conan_runner(), remote_manager,
hook_manager, interactive=interactive)
return conan, client_cache, user_io
def __init__(self, client_cache, user_io, runner, remote_manager, hook_manager,
interactive=True):
assert isinstance(user_io, UserIO)
assert isinstance(client_cache, ClientCache)
self._client_cache = client_cache
self._user_io = user_io
self._runner = runner
self._remote_manager = remote_manager
self._registry = RemoteRegistry(self._client_cache.registry, self._user_io.out)
if not interactive:
self._user_io.disable_input()
self._proxy = ConanProxy(client_cache, self._user_io.out, remote_manager,
registry=self._registry)
resolver = RangeResolver(self._user_io.out, client_cache, self._proxy)
python_requires = ConanPythonRequire(self._proxy, resolver)
self._loader = ConanFileLoader(self._runner, self._user_io.out, python_requires)
self._graph_manager = GraphManager(self._user_io.out, self._client_cache, self._registry,
self._remote_manager, self._loader, self._proxy,
resolver)
self._hook_manager = hook_manager
def _init_manager(self, action_recorder):
"""Every api call gets a new recorder and new manager"""
return ConanManager(self._client_cache, self._user_io,
self._remote_manager, action_recorder, self._registry,
self._graph_manager, self._hook_manager)
@api_method
def new(self, name, header=False, pure_c=False, test=False, exports_sources=False, bare=False,
cwd=None, visual_versions=None, linux_gcc_versions=None, linux_clang_versions=None,
osx_clang_versions=None, shared=None, upload_url=None, gitignore=None,
gitlab_gcc_versions=None, gitlab_clang_versions=None,
circleci_gcc_versions=None, circleci_clang_versions=None, circleci_osx_versions=None):
from conans.client.cmd.new import cmd_new
cwd = os.path.abspath(cwd or get_cwd())
files = cmd_new(name, header=header, pure_c=pure_c, test=test,
exports_sources=exports_sources, bare=bare,
visual_versions=visual_versions,
linux_gcc_versions=linux_gcc_versions,
linux_clang_versions=linux_clang_versions,
osx_clang_versions=osx_clang_versions, shared=shared,
upload_url=upload_url, gitignore=gitignore,
gitlab_gcc_versions=gitlab_gcc_versions,
gitlab_clang_versions=gitlab_clang_versions,
circleci_gcc_versions=circleci_gcc_versions,
circleci_clang_versions=circleci_clang_versions,
circleci_osx_versions=circleci_osx_versions)
save_files(cwd, files)
for f in sorted(files):
self._user_io.out.success("File saved: %s" % f)
@api_method
def inspect(self, path, attributes, remote_name=None):
try:
reference = ConanFileReference.loads(path)
except ConanException:
reference = None
cwd = get_cwd()
conanfile_path = _get_conanfile_path(path, cwd, py=True)
else:
update = True if remote_name else False
result = self._proxy.get_recipe(reference, update, update, remote_name,
ActionRecorder())
conanfile_path, _, _, reference = result
conanfile = self._loader.load_basic(conanfile_path, self._user_io.out)
result = OrderedDict()
if not attributes:
attributes = ['name', 'version', 'url', 'homepage', 'license', 'author',
'description', 'topics', 'generators', 'exports', 'exports_sources',
'short_paths', 'apply_env', 'build_policy', 'settings', 'options',
'default_options']
for attribute in attributes:
try:
attr = getattr(conanfile, attribute)
result[attribute] = attr
except AttributeError as e:
raise ConanException(str(e))
return result
@api_method
def test(self, path, reference, profile_name=None, settings=None, options=None, env=None,
remote_name=None, update=False, build_modes=None, cwd=None, test_build_folder=None):
settings = settings or []
options = options or []
env = env or []
conanfile_path = _get_conanfile_path(path, cwd, py=True)
cwd = cwd or get_cwd()
profile = profile_from_args(profile_name, settings, options, env, cwd,
self._client_cache)
reference = ConanFileReference.loads(reference)
recorder = ActionRecorder()
manager = self._init_manager(recorder)
pt = PackageTester(manager, self._user_io)
pt.install_build_and_test(conanfile_path, reference, profile, remote_name,
update, build_modes=build_modes,
test_build_folder=test_build_folder)
@api_method
def create(self, conanfile_path, name=None, version=None, user=None, channel=None,
profile_name=None, settings=None,
options=None, env=None, test_folder=None, not_export=False,
build_modes=None,
keep_source=False, keep_build=False, verify=None,
manifests=None, manifests_interactive=None,
remote_name=None, update=False, cwd=None, test_build_folder=None):
"""
API method to create a conan package
:param test_folder: default None - looks for default 'test' or 'test_package' folder),
string - test_folder path
False - disabling tests
"""
settings = settings or []
options = options or []
env = env or []
try:
cwd = cwd or os.getcwd()
recorder = ActionRecorder()
conanfile_path = _get_conanfile_path(conanfile_path, cwd, py=True)
reference, conanfile = self._loader.load_export(conanfile_path, name, version, user,
channel)
# Make sure keep_source is set for keep_build
keep_source = keep_source or keep_build
# Forcing an export!
if not not_export:
cmd_export(conanfile_path, conanfile, reference, keep_source, self._user_io.out,
self._client_cache, self._hook_manager)
recorder.recipe_exported(reference)
if build_modes is None: # Not specified, force build the tested library
build_modes = [conanfile.name]
manifests = _parse_manifests_arguments(verify, manifests, manifests_interactive, cwd)
manifest_folder, manifest_interactive, manifest_verify = manifests
profile = profile_from_args(profile_name, settings, options, env,
cwd, self._client_cache)
manager = self._init_manager(recorder)
recorder.add_recipe_being_developed(reference)
create(reference, manager, self._user_io, profile, remote_name, update, build_modes,
manifest_folder, manifest_verify, manifest_interactive, keep_build,
test_build_folder, test_folder, conanfile_path)
return recorder.get_info()
except ConanException as exc:
recorder.error = True
exc.info = recorder.get_info()
raise
@api_method
def export_pkg(self, conanfile_path, name, channel, source_folder=None, build_folder=None,
package_folder=None, install_folder=None, profile_name=None, settings=None,
options=None, env=None, force=False, user=None, version=None, cwd=None):
settings = settings or []
options = options or []
env = env or []
cwd = cwd or get_cwd()
try:
recorder = ActionRecorder()
# Checks that info files exists if the install folder is specified
if install_folder and not existing_info_files(_make_abs_path(install_folder, cwd)):
raise ConanException("The specified install folder doesn't contain '%s' and '%s' "
"files" % (CONANINFO, BUILD_INFO))
conanfile_path = _get_conanfile_path(conanfile_path, cwd, py=True)
if package_folder:
if build_folder or source_folder:
raise ConanException("package folder definition incompatible with build "
"and source folders")
package_folder = _make_abs_path(package_folder, cwd)
build_folder = _make_abs_path(build_folder, cwd)
install_folder = _make_abs_path(install_folder, cwd, default=build_folder)
source_folder = _make_abs_path(source_folder, cwd,
default=os.path.dirname(conanfile_path))
# Checks that no both settings and info files are specified
infos_present = existing_info_files(install_folder)
if profile_name or settings or options or env or not infos_present:
profile = profile_from_args(profile_name, settings, options, env=env,
cwd=cwd, client_cache=self._client_cache)
else:
profile = read_conaninfo_profile(install_folder)
reference, conanfile = self._loader.load_export(conanfile_path, name, version, user,
channel)
recorder.recipe_exported(reference)
recorder.add_recipe_being_developed(reference)
cmd_export(conanfile_path, conanfile, reference, False, self._user_io.out,
self._client_cache, self._hook_manager)
export_pkg(self._client_cache, self._graph_manager, self._hook_manager, recorder,
self._user_io.out,
reference, source_folder=source_folder, build_folder=build_folder,
package_folder=package_folder, install_folder=install_folder,
profile=profile, force=force)
return recorder.get_info()
except ConanException as exc:
recorder.error = True
exc.info = recorder.get_info()
raise
@api_method
def download(self, reference, remote_name=None, package=None, recipe=False):
if package and recipe:
raise ConanException("recipe parameter cannot be used together with package")
# Install packages without settings (fixed ids or all)
conan_ref = ConanFileReference.loads(reference)
if check_valid_ref(conan_ref, allow_pattern=False):
recorder = ActionRecorder()
download(conan_ref, package, remote_name, recipe, self._registry, self._remote_manager,
self._client_cache, self._user_io.out, recorder, self._loader,
self._hook_manager)
else:
raise ConanException("Provide a valid full reference without wildcards.")
@api_method
def install_reference(self, reference, settings=None, options=None, env=None,
remote_name=None, verify=None, manifests=None,
manifests_interactive=None, build=None, profile_name=None,
update=False, generators=None, install_folder=None, cwd=None):
try:
recorder = ActionRecorder()
cwd = cwd or os.getcwd()
install_folder = _make_abs_path(install_folder, cwd)
manifests = _parse_manifests_arguments(verify, manifests, manifests_interactive, cwd)
manifest_folder, manifest_interactive, manifest_verify = manifests
profile = profile_from_args(profile_name, settings, options, env, cwd,
self._client_cache)
if not generators: # We don't want the default txt
generators = False
mkdir(install_folder)
manager = self._init_manager(recorder)
manager.install(reference=reference, install_folder=install_folder,
remote_name=remote_name, profile=profile, build_modes=build,
update=update, manifest_folder=manifest_folder,
manifest_verify=manifest_verify,
manifest_interactive=manifest_interactive,
generators=generators)
return recorder.get_info()
except ConanException as exc:
recorder.error = True
exc.info = recorder.get_info()
raise
@api_method
def install(self, path="", settings=None, options=None, env=None,
remote_name=None, verify=None, manifests=None,
manifests_interactive=None, build=None, profile_name=None,
update=False, generators=None, no_imports=False, install_folder=None, cwd=None):
try:
recorder = ActionRecorder()
cwd = cwd or os.getcwd()
manifests = _parse_manifests_arguments(verify, manifests, manifests_interactive, cwd)
manifest_folder, manifest_interactive, manifest_verify = manifests
profile = profile_from_args(profile_name, settings, options, env, cwd,
self._client_cache)
wspath = _make_abs_path(path, cwd)
if install_folder:
if os.path.isabs(install_folder):
wsinstall_folder = install_folder
else:
wsinstall_folder = os.path.join(cwd, install_folder)
else:
wsinstall_folder = None
workspace = Workspace.get_workspace(wspath, wsinstall_folder)
if workspace:
self._user_io.out.success("Using conanws.yml file from %s" % workspace._base_folder)
manager = self._init_manager(recorder)
manager.install_workspace(profile, workspace, remote_name, build, update)
return
install_folder = _make_abs_path(install_folder, cwd)
conanfile_path = _get_conanfile_path(path, cwd, py=None)
manager = self._init_manager(recorder)
manager.install(reference=conanfile_path,
install_folder=install_folder,
remote_name=remote_name,
profile=profile,
build_modes=build,
update=update,
manifest_folder=manifest_folder,
manifest_verify=manifest_verify,
manifest_interactive=manifest_interactive,
generators=generators,
no_imports=no_imports)
return recorder.get_info()
except ConanException as exc:
recorder.error = True
exc.info = recorder.get_info()
raise
@api_method
def config_get(self, item):
config_parser = ConanClientConfigParser(self._client_cache.conan_conf_path)
self._user_io.out.info(config_parser.get_item(item))
return config_parser.get_item(item)
@api_method
def config_set(self, item, value):
config_parser = ConanClientConfigParser(self._client_cache.conan_conf_path)
config_parser.set_item(item, value)
self._client_cache.invalidate()
@api_method
def config_rm(self, item):
config_parser = ConanClientConfigParser(self._client_cache.conan_conf_path)
config_parser.rm_item(item)
self._client_cache.invalidate()
@api_method
def config_install(self, item, verify_ssl, config_type=None, args=None):
# _make_abs_path, but could be not a path at all
if item is not None and os.path.exists(item) and not os.path.isabs(item):
item = os.path.abspath(item)
from conans.client.conf.config_installer import configuration_install
return configuration_install(item, self._client_cache, self._user_io.out, verify_ssl,
requester=self._remote_manager._auth_manager._rest_client.requester, # FIXME: Look out!
config_type=config_type, args=args)
def _info_get_profile(self, reference, install_folder, profile_name, settings, options, env):
cwd = get_cwd()
try:
reference = ConanFileReference.loads(reference)
except ConanException:
reference = _get_conanfile_path(reference, cwd=None, py=None)
if install_folder or not (profile_name or settings or options or env):
# When not install folder is specified but neither any setting, we try to read the
# info from cwd
install_folder = _make_abs_path(install_folder, cwd)
if existing_info_files(install_folder):
return reference, read_conaninfo_profile(install_folder)
return reference, profile_from_args(profile_name, settings, options, env=env,
cwd=cwd, client_cache=self._client_cache)
@api_method
def info_build_order(self, reference, settings=None, options=None, env=None,
profile_name=None, remote_name=None, build_order=None, check_updates=None,
install_folder=None):
reference, profile = self._info_get_profile(reference, install_folder, profile_name,
settings, options, env)
recorder = ActionRecorder()
deps_graph, _, _ = self._graph_manager.load_graph(reference, None, profile, ["missing"],
check_updates, False, remote_name,
recorder, workspace=None)
return deps_graph.build_order(build_order)
@api_method
def info_nodes_to_build(self, reference, build_modes, settings=None, options=None, env=None,
profile_name=None, remote_name=None, check_updates=None,
install_folder=None):
reference, profile = self._info_get_profile(reference, install_folder, profile_name,
settings, options, env)
recorder = ActionRecorder()
deps_graph, conanfile, _ = self._graph_manager.load_graph(reference, None, profile,
build_modes, check_updates,
False, remote_name, recorder,
workspace=None)
nodes_to_build = deps_graph.nodes_to_build()
return nodes_to_build, conanfile
@api_method
def info(self, reference, remote_name=None, settings=None, options=None, env=None,
profile_name=None, update=False, install_folder=None, build=None):
reference, profile = self._info_get_profile(reference, install_folder, profile_name,
settings, options, env)
recorder = ActionRecorder()
deps_graph, conanfile, _ = self._graph_manager.load_graph(reference, None, profile, build,
update, False, remote_name,
recorder, workspace=None)
return deps_graph, conanfile
@api_method
def build(self, conanfile_path, source_folder=None, package_folder=None, build_folder=None,
install_folder=None, should_configure=True, should_build=True, should_install=True,
should_test=True, cwd=None):
cwd = cwd or get_cwd()
conanfile_path = _get_conanfile_path(conanfile_path, cwd, py=True)
build_folder = _make_abs_path(build_folder, cwd)
install_folder = _make_abs_path(install_folder, cwd, default=build_folder)
source_folder = _make_abs_path(source_folder, cwd, default=os.path.dirname(conanfile_path))
default_pkg_folder = os.path.join(build_folder, "package")
package_folder = _make_abs_path(package_folder, cwd, default=default_pkg_folder)
build(self._graph_manager, self._hook_manager, conanfile_path, self._user_io.out,
source_folder, build_folder, package_folder, install_folder,
should_configure=should_configure, should_build=should_build,
should_install=should_install, should_test=should_test)
@api_method
def package(self, path, build_folder, package_folder, source_folder=None, install_folder=None,
cwd=None):
cwd = cwd or get_cwd()
conanfile_path = _get_conanfile_path(path, cwd, py=True)
build_folder = _make_abs_path(build_folder, cwd)
install_folder = _make_abs_path(install_folder, cwd, default=build_folder)
source_folder = _make_abs_path(source_folder, cwd, default=os.path.dirname(conanfile_path))
default_pkg_folder = os.path.join(build_folder, "package")
package_folder = _make_abs_path(package_folder, cwd, default=default_pkg_folder)
if package_folder == build_folder:
raise ConanException("Cannot 'conan package' to the build folder. "
"--build-folder and package folder can't be the same")
output = ScopedOutput("PROJECT", self._user_io.out)
conanfile = self._graph_manager.load_consumer_conanfile(conanfile_path, install_folder,
output, deps_info_required=True)
packager.create_package(conanfile, None, source_folder, build_folder, package_folder,
install_folder, output, self._hook_manager, conanfile_path, None,
local=True, copy_info=True)
@api_method
def source(self, path, source_folder=None, info_folder=None, cwd=None):
cwd = cwd or get_cwd()
conanfile_path = _get_conanfile_path(path, cwd, py=True)
source_folder = _make_abs_path(source_folder, cwd)
info_folder = _make_abs_path(info_folder, cwd)
mkdir(source_folder)
if not os.path.exists(info_folder):
raise ConanException("Specified info-folder doesn't exist")
output = ScopedOutput("PROJECT", self._user_io.out)
# only infos if exist
conanfile = self._graph_manager.load_consumer_conanfile(conanfile_path, info_folder, output)
conanfile_folder = os.path.dirname(conanfile_path)
if conanfile_folder != source_folder:
output.info("Executing exports to: %s" % source_folder)
export_recipe(conanfile, conanfile_folder, source_folder, output)
export_source(conanfile, conanfile_folder, source_folder, output)
config_source_local(source_folder, conanfile, output, conanfile_path,
self._hook_manager)
@api_method
def imports(self, path, dest=None, info_folder=None, cwd=None):
"""
:param path: Path to the conanfile
:param dest: Dir to put the imported files. (Abs path or relative to cwd)
:param info_folder: Dir where the conaninfo.txt and conanbuildinfo.txt files are
:param cwd: Current working directory
:return: None
"""
cwd = cwd or get_cwd()
info_folder = _make_abs_path(info_folder, cwd)
dest = _make_abs_path(dest, cwd)
mkdir(dest)
conanfile_abs_path = _get_conanfile_path(path, cwd, py=None)
output = ScopedOutput("PROJECT", self._user_io.out)
conanfile = self._graph_manager.load_consumer_conanfile(conanfile_abs_path, info_folder,
output, deps_info_required=True)
run_imports(conanfile, dest, output)
@api_method
def imports_undo(self, manifest_path):
cwd = get_cwd()
manifest_path = _make_abs_path(manifest_path, cwd)
undo_imports(manifest_path, self._user_io.out)
@api_method
def export(self, path, name, version, user, channel, keep_source=False, cwd=None):
conanfile_path = _get_conanfile_path(path, cwd, py=True)
reference, conanfile = self._loader.load_export(conanfile_path, name, version, user,
channel)
cmd_export(conanfile_path, conanfile, reference, keep_source, self._user_io.out,
self._client_cache, self._hook_manager)
@api_method
def remove(self, pattern, query=None, packages=None, builds=None, src=False, force=False,
remote_name=None, outdated=False):
remover = ConanRemover(self._client_cache, self._remote_manager, self._user_io,
self._registry)
remover.remove(pattern, remote_name, src, builds, packages, force=force,
packages_query=query, outdated=outdated)
@api_method
def copy(self, reference, user_channel, force=False, packages=None):
"""
param packages: None=No binaries, True=All binaries, else list of IDs
"""
from conans.client.cmd.copy import cmd_copy
# FIXME: conan copy does not support short-paths in Windows
reference = ConanFileReference.loads(str(reference))
cmd_copy(reference, user_channel, packages, self._client_cache,
self._user_io, self._remote_manager, self._registry, self._loader, force=force)
@api_method
def authenticate(self, name, password, remote_name):
remote = self.get_remote_by_name(remote_name)
_, remote_name, prev_user, user = self._remote_manager.authenticate(remote, name, password)
return remote_name, prev_user, user
@api_method
def user_set(self, user, remote_name=None):
remote = (self.get_default_remote() if not remote_name
else self.get_remote_by_name(remote_name))
return user_set(self._client_cache.localdb, user, remote)
@api_method
def users_clean(self):
users_clean(self._client_cache.localdb)
@api_method
def users_list(self, remote_name=None):
info = {"error": False, "remotes": []}
remotes = [self.get_remote_by_name(remote_name)] if remote_name else self.remote_list()
try:
info["remotes"] = users_list(self._client_cache.localdb, remotes)
return info
except ConanException as exc:
info["error"] = True
exc.info = info
raise
@api_method
def search_recipes(self, pattern, remote_name=None, case_sensitive=False):
recorder = SearchRecorder()
search = Search(self._client_cache, self._remote_manager, self._registry)
try:
references = search.search_recipes(pattern, remote_name, case_sensitive)
except ConanException as exc:
recorder.error = True
exc.info = recorder.get_info()
raise
for remote_name, refs in references.items():
for ref in refs:
recorder.add_recipe(remote_name, ref, with_packages=False)
return recorder.get_info()
@api_method
def search_packages(self, reference, query=None, remote_name=None, outdated=False):
recorder = SearchRecorder()
search = Search(self._client_cache, self._remote_manager, self._registry)
try:
reference = ConanFileReference.loads(str(reference))
references = search.search_packages(reference, remote_name, query=query,
outdated=outdated)
except ConanException as exc:
recorder.error = True
exc.info = recorder.get_info()
raise
for remote_name, remote_ref in references.items():
recorder.add_recipe(remote_name, reference)
if remote_ref.ordered_packages:
for package_id, properties in remote_ref.ordered_packages.items():
package_recipe_hash = properties.get("recipe_hash", None)
recorder.add_package(remote_name, reference,
package_id, properties.get("options", []),
properties.get("settings", []),
properties.get("full_requires", []),
remote_ref.recipe_hash != package_recipe_hash)
return recorder.get_info()
@api_method
def upload(self, pattern, package=None, remote_name=None, all_packages=False, confirm=False,
retry=2, retry_wait=5, integrity_check=False, policy=None, query=None):
""" Uploads a package recipe and the generated binary packages to a specified remote
"""
recorder = UploadRecorder()
uploader = CmdUpload(self._client_cache, self._user_io, self._remote_manager,
self._registry, self._loader, self._hook_manager)
try:
uploader.upload(recorder, pattern, package, all_packages, confirm, retry,
retry_wait, integrity_check, policy, remote_name, query=query)
return recorder.get_info()
except ConanException as exc:
recorder.error = True
exc.info = recorder.get_info()
raise
@api_method
def remote_list(self):
return self._registry.remotes.list
@api_method
def remote_add(self, remote_name, url, verify_ssl=True, insert=None, force=None):
return self._registry.remotes.add(remote_name, url, verify_ssl, insert, force)
@api_method
def remote_remove(self, remote_name):
return self._registry.remotes.remove(remote_name)
@api_method
def remote_update(self, remote_name, url, verify_ssl=True, insert=None):
return self._registry.remotes.update(remote_name, url, verify_ssl, insert)
@api_method
def remote_rename(self, remote_name, new_new_remote):
return self._registry.remotes.rename(remote_name, new_new_remote)
@api_method
def remote_list_ref(self):
return {r: remote_name for r, remote_name in self._registry.refs.list.items()}
@api_method
def remote_add_ref(self, reference, remote_name):
reference = ConanFileReference.loads(str(reference), validate=True)
return self._registry.refs.set(reference, remote_name, check_exists=True)
@api_method
def remote_remove_ref(self, reference):
reference = ConanFileReference.loads(str(reference), validate=True)
return self._registry.refs.remove(reference)
@api_method
def remote_update_ref(self, reference, remote_name):
reference = ConanFileReference.loads(str(reference), validate=True)
return self._registry.refs.update(reference, remote_name)
@api_method
def remote_list_pref(self, reference):
reference = ConanFileReference.loads(str(reference), validate=True)
ret = {}
tmp = self._registry.prefs.list
for r, remote in tmp.items():
pref = PackageReference.loads(r)
if pref.conan == reference:
ret[pref.full_repr()] = remote
return ret
@api_method
def remote_add_pref(self, package_reference, remote_name):
p_reference = PackageReference.loads(str(package_reference), validate=True)
return self._registry.prefs.set(p_reference, remote_name, check_exists=True)
@api_method
def remote_remove_pref(self, package_reference):
p_reference = PackageReference.loads(str(package_reference), validate=True)
return self._registry.prefs.remove(p_reference)
@api_method
def remote_update_pref(self, package_reference, remote_name):
p_reference = PackageReference.loads(str(package_reference), validate=True)
return self._registry.prefs.update(p_reference, remote_name)
def remote_clean(self):
return self._registry.remotes.clean()
@api_method
def profile_list(self):
return cmd_profile_list(self._client_cache.profiles_path, self._user_io.out)
@api_method
def create_profile(self, profile_name, detect=False):
return cmd_profile_create(profile_name, self._client_cache.profiles_path,
self._user_io.out, detect)
@api_method
def update_profile(self, profile_name, key, value):
return cmd_profile_update(profile_name, key, value, self._client_cache.profiles_path)
@api_method
def get_profile_key(self, profile_name, key):
return cmd_profile_get(profile_name, key, self._client_cache.profiles_path)
@api_method
def delete_profile_key(self, profile_name, key):
return cmd_profile_delete_key(profile_name, key, self._client_cache.profiles_path)
@api_method
def read_profile(self, profile=None):
p, _ = read_profile(profile, get_cwd(), self._client_cache.profiles_path)
return p
@api_method
def get_path(self, reference, package_id=None, path=None, remote_name=None):
from conans.client.local_file_getter import get_path
reference = ConanFileReference.loads(reference)
if not path:
path = "conanfile.py" if not package_id else "conaninfo.txt"
if not remote_name:
return get_path(self._client_cache, reference, package_id, path), path
else:
remote = self.get_remote_by_name(remote_name)
return self._remote_manager.get_path(reference, package_id, path, remote), path
@api_method
def export_alias(self, reference, target_reference):
reference = ConanFileReference.loads(reference)
target_reference = ConanFileReference.loads(target_reference)
return export_alias(reference, target_reference, self._client_cache)
@api_method
def get_default_remote(self):
return self._registry.remotes.default
@api_method
def get_remote_by_name(self, remote_name):
return self._registry.remotes.get(remote_name)
Conan = ConanAPIV1
def _parse_manifests_arguments(verify, manifests, manifests_interactive, cwd):
if manifests and manifests_interactive:
raise ConanException("Do not specify both manifests and "
"manifests-interactive arguments")
if verify and (manifests or manifests_interactive):
raise ConanException("Do not specify both 'verify' and "
"'manifests' or 'manifests-interactive' arguments")
manifest_folder = verify or manifests or manifests_interactive
if manifest_folder:
if not os.path.isabs(manifest_folder):
if not cwd:
raise ConanException("'cwd' should be defined if the manifest folder is relative.")
manifest_folder = os.path.join(cwd, manifest_folder)
manifest_verify = verify is not None
manifest_interactive = manifests_interactive is not None
else:
manifest_verify = manifest_interactive = False
return manifest_folder, manifest_interactive, manifest_verify
def existing_info_files(folder):
return os.path.exists(os.path.join(folder, CONANINFO)) and \
os.path.exists(os.path.join(folder, BUILD_INFO))
def get_conan_runner():
print_commands_to_output = get_env("CONAN_PRINT_RUN_COMMANDS", False)
generate_run_log_file = get_env("CONAN_LOG_RUN_TO_FILE", False)
log_run_to_output = get_env("CONAN_LOG_RUN_TO_OUTPUT", True)
runner = ConanRunner(print_commands_to_output, generate_run_log_file, log_run_to_output)
return runner
def migrate_and_get_client_cache(base_folder, out, storage_folder=None):
# Init paths
client_cache = ClientCache(base_folder, storage_folder, out)
# Migration system
migrator = ClientMigrator(client_cache, Version(client_version), out)
migrator.migrate()
return client_cache
| 45.829659 | 125 | 0.642792 | import os
import sys
import requests
from collections import OrderedDict
import conans
from conans import __version__ as client_version
from conans.client.cmd.create import create
from conans.client.hook_manager import HookManager
from conans.client.recorder.action_recorder import ActionRecorder
from conans.client.client_cache import ClientCache
from conans.client.conf import MIN_SERVER_COMPATIBLE_VERSION, ConanClientConfigParser
from conans.client.manager import ConanManager
from conans.client.migrations import ClientMigrator
from conans.client.output import ConanOutput, ScopedOutput
from conans.client.profile_loader import read_profile, profile_from_args, \
read_conaninfo_profile
from conans.client.recorder.search_recorder import SearchRecorder
from conans.client.recorder.upload_recoder import UploadRecorder
from conans.client.remote_manager import RemoteManager
from conans.client.remote_registry import RemoteRegistry
from conans.client.rest.auth_manager import ConanApiAuthManager
from conans.client.rest.rest_client import RestApiClient
from conans.client.rest.conan_requester import ConanRequester
from conans.client.rest.version_checker import VersionCheckerRequester
from conans.client.runner import ConanRunner
from conans.client.store.localdb import LocalDB
from conans.client.cmd.test import PackageTester
from conans.client.userio import UserIO
from conans.errors import ConanException
from conans.model.ref import ConanFileReference, PackageReference, check_valid_ref
from conans.model.version import Version
from conans.paths import get_conan_user_home, CONANINFO, BUILD_INFO
from conans.util.env_reader import get_env
from conans.util.files import save_files, exception_message_safe, mkdir
from conans.util.log import configure_logger
from conans.util.tracer import log_command, log_exception
from conans.tools import set_global_instances
from conans.client.cmd.uploader import CmdUpload
from conans.client.cmd.profile import cmd_profile_update, cmd_profile_get,\
cmd_profile_delete_key, cmd_profile_create, cmd_profile_list
from conans.client.cmd.search import Search
from conans.client.cmd.user import users_clean, users_list, user_set
from conans.client.importer import undo_imports, run_imports
from conans.client.cmd.export import cmd_export, export_alias, export_source, export_recipe
from conans.unicode import get_cwd
from conans.client.remover import ConanRemover
from conans.client.cmd.download import download
from conans.model.workspace import Workspace
from conans.client.graph.graph_manager import GraphManager
from conans.client.loader import ConanFileLoader
from conans.client.graph.proxy import ConanProxy
from conans.client.graph.python_requires import ConanPythonRequire
from conans.client.graph.range_resolver import RangeResolver
from conans.client import packager
from conans.client.source import config_source_local
from conans.client.cmd.build import build
from conans.client.cmd.export_pkg import export_pkg
from conans.client import tools
default_manifest_folder = '.conan_manifests'
def get_request_timeout():
timeout = os.getenv("CONAN_REQUEST_TIMEOUT")
try:
return float(timeout) if timeout is not None else None
except ValueError:
raise ConanException("Specify a numeric parameter for 'request_timeout'")
def get_basic_requester(client_cache):
requester = requests.Session()
return ConanRequester(requester, client_cache, get_request_timeout())
def api_method(f):
def wrapper(*args, **kwargs):
the_self = args[0]
try:
curdir = get_cwd()
log_command(f.__name__, kwargs)
with tools.environment_append(the_self._client_cache.conan_config.env_vars):
return f(*args, **kwargs)
except Exception as exc:
msg = exception_message_safe(exc)
try:
log_exception(exc, msg)
except BaseException:
pass
raise
finally:
os.chdir(curdir)
return wrapper
def _make_abs_path(path, cwd=None, default=None):
cwd = cwd or get_cwd()
if not path:
abs_path = default or cwd
elif os.path.isabs(path):
abs_path = path
else:
abs_path = os.path.normpath(os.path.join(cwd, path))
return abs_path
def _get_conanfile_path(path, cwd, py):
candidate_paths = list()
path = _make_abs_path(path, cwd)
if os.path.isdir(path):
if py:
path = os.path.join(path, "conanfile.py")
candidate_paths.append(path)
elif py is False:
path = os.path.join(path, "conanfile.txt")
candidate_paths.append(path)
else:
path_py = os.path.join(path, "conanfile.py")
candidate_paths.append(path_py)
if os.path.exists(path_py):
path = path_py
else:
path = os.path.join(path, "conanfile.txt")
candidate_paths.append(path)
else:
candidate_paths.append(path)
if not os.path.isfile(path):
raise ConanException("Conanfile not found at %s" % " or ".join(candidate_paths))
if py and not path.endswith(".py"):
raise ConanException("A conanfile.py is needed, " + path + " is not acceptable")
return path
class ConanAPIV1(object):
@staticmethod
def instance_remote_manager(requester, client_cache, user_io, _client_version,
min_server_compatible_version, hook_manager):
version_checker_req = VersionCheckerRequester(requester, _client_version,
min_server_compatible_version,
user_io.out)
put_headers = client_cache.read_put_headers()
rest_api_client = RestApiClient(user_io.out, requester=version_checker_req,
put_headers=put_headers)
localdb = LocalDB(client_cache.localdb)
auth_manager = ConanApiAuthManager(rest_api_client, user_io, localdb)
remote_manager = RemoteManager(client_cache, auth_manager, user_io.out, hook_manager)
return localdb, rest_api_client, remote_manager
@staticmethod
def factory(interactive=None):
color_set = "CONAN_COLOR_DISPLAY" in os.environ
if ((color_set and get_env("CONAN_COLOR_DISPLAY", 1))
or (not color_set
and hasattr(sys.stdout, "isatty")
and sys.stdout.isatty())):
import colorama
if get_env("PYCHARM_HOSTED"):
colorama.init(convert=False, strip=False)
else:
colorama.init()
color = True
else:
color = False
out = ConanOutput(sys.stdout, color)
user_io = UserIO(out=out)
try:
user_home = get_conan_user_home()
client_cache = migrate_and_get_client_cache(user_home, out)
sys.path.append(os.path.join(user_home, "python"))
except Exception as e:
out.error(str(e))
raise
with tools.environment_append(client_cache.conan_config.env_vars):
conans.util.log.logger = configure_logger()
hook_manager = HookManager(client_cache.hooks_path, get_env("CONAN_HOOKS", list()),
user_io.out)
requester = get_basic_requester(client_cache)
_, _, remote_manager = ConanAPIV1.instance_remote_manager(
requester,
client_cache, user_io,
Version(client_version),
Version(MIN_SERVER_COMPATIBLE_VERSION),
hook_manager)
set_global_instances(out, requester)
if interactive is None:
interactive = not get_env("CONAN_NON_INTERACTIVE", False)
conan = ConanAPIV1(client_cache, user_io, get_conan_runner(), remote_manager,
hook_manager, interactive=interactive)
return conan, client_cache, user_io
def __init__(self, client_cache, user_io, runner, remote_manager, hook_manager,
interactive=True):
assert isinstance(user_io, UserIO)
assert isinstance(client_cache, ClientCache)
self._client_cache = client_cache
self._user_io = user_io
self._runner = runner
self._remote_manager = remote_manager
self._registry = RemoteRegistry(self._client_cache.registry, self._user_io.out)
if not interactive:
self._user_io.disable_input()
self._proxy = ConanProxy(client_cache, self._user_io.out, remote_manager,
registry=self._registry)
resolver = RangeResolver(self._user_io.out, client_cache, self._proxy)
python_requires = ConanPythonRequire(self._proxy, resolver)
self._loader = ConanFileLoader(self._runner, self._user_io.out, python_requires)
self._graph_manager = GraphManager(self._user_io.out, self._client_cache, self._registry,
self._remote_manager, self._loader, self._proxy,
resolver)
self._hook_manager = hook_manager
def _init_manager(self, action_recorder):
return ConanManager(self._client_cache, self._user_io,
self._remote_manager, action_recorder, self._registry,
self._graph_manager, self._hook_manager)
@api_method
def new(self, name, header=False, pure_c=False, test=False, exports_sources=False, bare=False,
cwd=None, visual_versions=None, linux_gcc_versions=None, linux_clang_versions=None,
osx_clang_versions=None, shared=None, upload_url=None, gitignore=None,
gitlab_gcc_versions=None, gitlab_clang_versions=None,
circleci_gcc_versions=None, circleci_clang_versions=None, circleci_osx_versions=None):
from conans.client.cmd.new import cmd_new
cwd = os.path.abspath(cwd or get_cwd())
files = cmd_new(name, header=header, pure_c=pure_c, test=test,
exports_sources=exports_sources, bare=bare,
visual_versions=visual_versions,
linux_gcc_versions=linux_gcc_versions,
linux_clang_versions=linux_clang_versions,
osx_clang_versions=osx_clang_versions, shared=shared,
upload_url=upload_url, gitignore=gitignore,
gitlab_gcc_versions=gitlab_gcc_versions,
gitlab_clang_versions=gitlab_clang_versions,
circleci_gcc_versions=circleci_gcc_versions,
circleci_clang_versions=circleci_clang_versions,
circleci_osx_versions=circleci_osx_versions)
save_files(cwd, files)
for f in sorted(files):
self._user_io.out.success("File saved: %s" % f)
@api_method
def inspect(self, path, attributes, remote_name=None):
try:
reference = ConanFileReference.loads(path)
except ConanException:
reference = None
cwd = get_cwd()
conanfile_path = _get_conanfile_path(path, cwd, py=True)
else:
update = True if remote_name else False
result = self._proxy.get_recipe(reference, update, update, remote_name,
ActionRecorder())
conanfile_path, _, _, reference = result
conanfile = self._loader.load_basic(conanfile_path, self._user_io.out)
result = OrderedDict()
if not attributes:
attributes = ['name', 'version', 'url', 'homepage', 'license', 'author',
'description', 'topics', 'generators', 'exports', 'exports_sources',
'short_paths', 'apply_env', 'build_policy', 'settings', 'options',
'default_options']
for attribute in attributes:
try:
attr = getattr(conanfile, attribute)
result[attribute] = attr
except AttributeError as e:
raise ConanException(str(e))
return result
@api_method
def test(self, path, reference, profile_name=None, settings=None, options=None, env=None,
remote_name=None, update=False, build_modes=None, cwd=None, test_build_folder=None):
settings = settings or []
options = options or []
env = env or []
conanfile_path = _get_conanfile_path(path, cwd, py=True)
cwd = cwd or get_cwd()
profile = profile_from_args(profile_name, settings, options, env, cwd,
self._client_cache)
reference = ConanFileReference.loads(reference)
recorder = ActionRecorder()
manager = self._init_manager(recorder)
pt = PackageTester(manager, self._user_io)
pt.install_build_and_test(conanfile_path, reference, profile, remote_name,
update, build_modes=build_modes,
test_build_folder=test_build_folder)
@api_method
def create(self, conanfile_path, name=None, version=None, user=None, channel=None,
profile_name=None, settings=None,
options=None, env=None, test_folder=None, not_export=False,
build_modes=None,
keep_source=False, keep_build=False, verify=None,
manifests=None, manifests_interactive=None,
remote_name=None, update=False, cwd=None, test_build_folder=None):
settings = settings or []
options = options or []
env = env or []
try:
cwd = cwd or os.getcwd()
recorder = ActionRecorder()
conanfile_path = _get_conanfile_path(conanfile_path, cwd, py=True)
reference, conanfile = self._loader.load_export(conanfile_path, name, version, user,
channel)
keep_source = keep_source or keep_build
if not not_export:
cmd_export(conanfile_path, conanfile, reference, keep_source, self._user_io.out,
self._client_cache, self._hook_manager)
recorder.recipe_exported(reference)
if build_modes is None:
build_modes = [conanfile.name]
manifests = _parse_manifests_arguments(verify, manifests, manifests_interactive, cwd)
manifest_folder, manifest_interactive, manifest_verify = manifests
profile = profile_from_args(profile_name, settings, options, env,
cwd, self._client_cache)
manager = self._init_manager(recorder)
recorder.add_recipe_being_developed(reference)
create(reference, manager, self._user_io, profile, remote_name, update, build_modes,
manifest_folder, manifest_verify, manifest_interactive, keep_build,
test_build_folder, test_folder, conanfile_path)
return recorder.get_info()
except ConanException as exc:
recorder.error = True
exc.info = recorder.get_info()
raise
@api_method
def export_pkg(self, conanfile_path, name, channel, source_folder=None, build_folder=None,
package_folder=None, install_folder=None, profile_name=None, settings=None,
options=None, env=None, force=False, user=None, version=None, cwd=None):
settings = settings or []
options = options or []
env = env or []
cwd = cwd or get_cwd()
try:
recorder = ActionRecorder()
if install_folder and not existing_info_files(_make_abs_path(install_folder, cwd)):
raise ConanException("The specified install folder doesn't contain '%s' and '%s' "
"files" % (CONANINFO, BUILD_INFO))
conanfile_path = _get_conanfile_path(conanfile_path, cwd, py=True)
if package_folder:
if build_folder or source_folder:
raise ConanException("package folder definition incompatible with build "
"and source folders")
package_folder = _make_abs_path(package_folder, cwd)
build_folder = _make_abs_path(build_folder, cwd)
install_folder = _make_abs_path(install_folder, cwd, default=build_folder)
source_folder = _make_abs_path(source_folder, cwd,
default=os.path.dirname(conanfile_path))
# Checks that no both settings and info files are specified
infos_present = existing_info_files(install_folder)
if profile_name or settings or options or env or not infos_present:
profile = profile_from_args(profile_name, settings, options, env=env,
cwd=cwd, client_cache=self._client_cache)
else:
profile = read_conaninfo_profile(install_folder)
reference, conanfile = self._loader.load_export(conanfile_path, name, version, user,
channel)
recorder.recipe_exported(reference)
recorder.add_recipe_being_developed(reference)
cmd_export(conanfile_path, conanfile, reference, False, self._user_io.out,
self._client_cache, self._hook_manager)
export_pkg(self._client_cache, self._graph_manager, self._hook_manager, recorder,
self._user_io.out,
reference, source_folder=source_folder, build_folder=build_folder,
package_folder=package_folder, install_folder=install_folder,
profile=profile, force=force)
return recorder.get_info()
except ConanException as exc:
recorder.error = True
exc.info = recorder.get_info()
raise
@api_method
def download(self, reference, remote_name=None, package=None, recipe=False):
if package and recipe:
raise ConanException("recipe parameter cannot be used together with package")
# Install packages without settings (fixed ids or all)
conan_ref = ConanFileReference.loads(reference)
if check_valid_ref(conan_ref, allow_pattern=False):
recorder = ActionRecorder()
download(conan_ref, package, remote_name, recipe, self._registry, self._remote_manager,
self._client_cache, self._user_io.out, recorder, self._loader,
self._hook_manager)
else:
raise ConanException("Provide a valid full reference without wildcards.")
@api_method
def install_reference(self, reference, settings=None, options=None, env=None,
remote_name=None, verify=None, manifests=None,
manifests_interactive=None, build=None, profile_name=None,
update=False, generators=None, install_folder=None, cwd=None):
try:
recorder = ActionRecorder()
cwd = cwd or os.getcwd()
install_folder = _make_abs_path(install_folder, cwd)
manifests = _parse_manifests_arguments(verify, manifests, manifests_interactive, cwd)
manifest_folder, manifest_interactive, manifest_verify = manifests
profile = profile_from_args(profile_name, settings, options, env, cwd,
self._client_cache)
if not generators: # We don't want the default txt
generators = False
mkdir(install_folder)
manager = self._init_manager(recorder)
manager.install(reference=reference, install_folder=install_folder,
remote_name=remote_name, profile=profile, build_modes=build,
update=update, manifest_folder=manifest_folder,
manifest_verify=manifest_verify,
manifest_interactive=manifest_interactive,
generators=generators)
return recorder.get_info()
except ConanException as exc:
recorder.error = True
exc.info = recorder.get_info()
raise
@api_method
def install(self, path="", settings=None, options=None, env=None,
remote_name=None, verify=None, manifests=None,
manifests_interactive=None, build=None, profile_name=None,
update=False, generators=None, no_imports=False, install_folder=None, cwd=None):
try:
recorder = ActionRecorder()
cwd = cwd or os.getcwd()
manifests = _parse_manifests_arguments(verify, manifests, manifests_interactive, cwd)
manifest_folder, manifest_interactive, manifest_verify = manifests
profile = profile_from_args(profile_name, settings, options, env, cwd,
self._client_cache)
wspath = _make_abs_path(path, cwd)
if install_folder:
if os.path.isabs(install_folder):
wsinstall_folder = install_folder
else:
wsinstall_folder = os.path.join(cwd, install_folder)
else:
wsinstall_folder = None
workspace = Workspace.get_workspace(wspath, wsinstall_folder)
if workspace:
self._user_io.out.success("Using conanws.yml file from %s" % workspace._base_folder)
manager = self._init_manager(recorder)
manager.install_workspace(profile, workspace, remote_name, build, update)
return
install_folder = _make_abs_path(install_folder, cwd)
conanfile_path = _get_conanfile_path(path, cwd, py=None)
manager = self._init_manager(recorder)
manager.install(reference=conanfile_path,
install_folder=install_folder,
remote_name=remote_name,
profile=profile,
build_modes=build,
update=update,
manifest_folder=manifest_folder,
manifest_verify=manifest_verify,
manifest_interactive=manifest_interactive,
generators=generators,
no_imports=no_imports)
return recorder.get_info()
except ConanException as exc:
recorder.error = True
exc.info = recorder.get_info()
raise
@api_method
def config_get(self, item):
config_parser = ConanClientConfigParser(self._client_cache.conan_conf_path)
self._user_io.out.info(config_parser.get_item(item))
return config_parser.get_item(item)
@api_method
def config_set(self, item, value):
config_parser = ConanClientConfigParser(self._client_cache.conan_conf_path)
config_parser.set_item(item, value)
self._client_cache.invalidate()
@api_method
def config_rm(self, item):
config_parser = ConanClientConfigParser(self._client_cache.conan_conf_path)
config_parser.rm_item(item)
self._client_cache.invalidate()
@api_method
def config_install(self, item, verify_ssl, config_type=None, args=None):
if item is not None and os.path.exists(item) and not os.path.isabs(item):
item = os.path.abspath(item)
from conans.client.conf.config_installer import configuration_install
return configuration_install(item, self._client_cache, self._user_io.out, verify_ssl,
requester=self._remote_manager._auth_manager._rest_client.requester,
config_type=config_type, args=args)
def _info_get_profile(self, reference, install_folder, profile_name, settings, options, env):
cwd = get_cwd()
try:
reference = ConanFileReference.loads(reference)
except ConanException:
reference = _get_conanfile_path(reference, cwd=None, py=None)
if install_folder or not (profile_name or settings or options or env):
install_folder = _make_abs_path(install_folder, cwd)
if existing_info_files(install_folder):
return reference, read_conaninfo_profile(install_folder)
return reference, profile_from_args(profile_name, settings, options, env=env,
cwd=cwd, client_cache=self._client_cache)
@api_method
def info_build_order(self, reference, settings=None, options=None, env=None,
profile_name=None, remote_name=None, build_order=None, check_updates=None,
install_folder=None):
reference, profile = self._info_get_profile(reference, install_folder, profile_name,
settings, options, env)
recorder = ActionRecorder()
deps_graph, _, _ = self._graph_manager.load_graph(reference, None, profile, ["missing"],
check_updates, False, remote_name,
recorder, workspace=None)
return deps_graph.build_order(build_order)
@api_method
def info_nodes_to_build(self, reference, build_modes, settings=None, options=None, env=None,
profile_name=None, remote_name=None, check_updates=None,
install_folder=None):
reference, profile = self._info_get_profile(reference, install_folder, profile_name,
settings, options, env)
recorder = ActionRecorder()
deps_graph, conanfile, _ = self._graph_manager.load_graph(reference, None, profile,
build_modes, check_updates,
False, remote_name, recorder,
workspace=None)
nodes_to_build = deps_graph.nodes_to_build()
return nodes_to_build, conanfile
@api_method
def info(self, reference, remote_name=None, settings=None, options=None, env=None,
profile_name=None, update=False, install_folder=None, build=None):
reference, profile = self._info_get_profile(reference, install_folder, profile_name,
settings, options, env)
recorder = ActionRecorder()
deps_graph, conanfile, _ = self._graph_manager.load_graph(reference, None, profile, build,
update, False, remote_name,
recorder, workspace=None)
return deps_graph, conanfile
@api_method
def build(self, conanfile_path, source_folder=None, package_folder=None, build_folder=None,
install_folder=None, should_configure=True, should_build=True, should_install=True,
should_test=True, cwd=None):
cwd = cwd or get_cwd()
conanfile_path = _get_conanfile_path(conanfile_path, cwd, py=True)
build_folder = _make_abs_path(build_folder, cwd)
install_folder = _make_abs_path(install_folder, cwd, default=build_folder)
source_folder = _make_abs_path(source_folder, cwd, default=os.path.dirname(conanfile_path))
default_pkg_folder = os.path.join(build_folder, "package")
package_folder = _make_abs_path(package_folder, cwd, default=default_pkg_folder)
build(self._graph_manager, self._hook_manager, conanfile_path, self._user_io.out,
source_folder, build_folder, package_folder, install_folder,
should_configure=should_configure, should_build=should_build,
should_install=should_install, should_test=should_test)
@api_method
def package(self, path, build_folder, package_folder, source_folder=None, install_folder=None,
cwd=None):
cwd = cwd or get_cwd()
conanfile_path = _get_conanfile_path(path, cwd, py=True)
build_folder = _make_abs_path(build_folder, cwd)
install_folder = _make_abs_path(install_folder, cwd, default=build_folder)
source_folder = _make_abs_path(source_folder, cwd, default=os.path.dirname(conanfile_path))
default_pkg_folder = os.path.join(build_folder, "package")
package_folder = _make_abs_path(package_folder, cwd, default=default_pkg_folder)
if package_folder == build_folder:
raise ConanException("Cannot 'conan package' to the build folder. "
"--build-folder and package folder can't be the same")
output = ScopedOutput("PROJECT", self._user_io.out)
conanfile = self._graph_manager.load_consumer_conanfile(conanfile_path, install_folder,
output, deps_info_required=True)
packager.create_package(conanfile, None, source_folder, build_folder, package_folder,
install_folder, output, self._hook_manager, conanfile_path, None,
local=True, copy_info=True)
@api_method
def source(self, path, source_folder=None, info_folder=None, cwd=None):
cwd = cwd or get_cwd()
conanfile_path = _get_conanfile_path(path, cwd, py=True)
source_folder = _make_abs_path(source_folder, cwd)
info_folder = _make_abs_path(info_folder, cwd)
mkdir(source_folder)
if not os.path.exists(info_folder):
raise ConanException("Specified info-folder doesn't exist")
output = ScopedOutput("PROJECT", self._user_io.out)
conanfile = self._graph_manager.load_consumer_conanfile(conanfile_path, info_folder, output)
conanfile_folder = os.path.dirname(conanfile_path)
if conanfile_folder != source_folder:
output.info("Executing exports to: %s" % source_folder)
export_recipe(conanfile, conanfile_folder, source_folder, output)
export_source(conanfile, conanfile_folder, source_folder, output)
config_source_local(source_folder, conanfile, output, conanfile_path,
self._hook_manager)
@api_method
def imports(self, path, dest=None, info_folder=None, cwd=None):
cwd = cwd or get_cwd()
info_folder = _make_abs_path(info_folder, cwd)
dest = _make_abs_path(dest, cwd)
mkdir(dest)
conanfile_abs_path = _get_conanfile_path(path, cwd, py=None)
output = ScopedOutput("PROJECT", self._user_io.out)
conanfile = self._graph_manager.load_consumer_conanfile(conanfile_abs_path, info_folder,
output, deps_info_required=True)
run_imports(conanfile, dest, output)
@api_method
def imports_undo(self, manifest_path):
cwd = get_cwd()
manifest_path = _make_abs_path(manifest_path, cwd)
undo_imports(manifest_path, self._user_io.out)
@api_method
def export(self, path, name, version, user, channel, keep_source=False, cwd=None):
conanfile_path = _get_conanfile_path(path, cwd, py=True)
reference, conanfile = self._loader.load_export(conanfile_path, name, version, user,
channel)
cmd_export(conanfile_path, conanfile, reference, keep_source, self._user_io.out,
self._client_cache, self._hook_manager)
@api_method
def remove(self, pattern, query=None, packages=None, builds=None, src=False, force=False,
remote_name=None, outdated=False):
remover = ConanRemover(self._client_cache, self._remote_manager, self._user_io,
self._registry)
remover.remove(pattern, remote_name, src, builds, packages, force=force,
packages_query=query, outdated=outdated)
@api_method
def copy(self, reference, user_channel, force=False, packages=None):
from conans.client.cmd.copy import cmd_copy
reference = ConanFileReference.loads(str(reference))
cmd_copy(reference, user_channel, packages, self._client_cache,
self._user_io, self._remote_manager, self._registry, self._loader, force=force)
@api_method
def authenticate(self, name, password, remote_name):
remote = self.get_remote_by_name(remote_name)
_, remote_name, prev_user, user = self._remote_manager.authenticate(remote, name, password)
return remote_name, prev_user, user
@api_method
def user_set(self, user, remote_name=None):
remote = (self.get_default_remote() if not remote_name
else self.get_remote_by_name(remote_name))
return user_set(self._client_cache.localdb, user, remote)
@api_method
def users_clean(self):
users_clean(self._client_cache.localdb)
@api_method
def users_list(self, remote_name=None):
info = {"error": False, "remotes": []}
remotes = [self.get_remote_by_name(remote_name)] if remote_name else self.remote_list()
try:
info["remotes"] = users_list(self._client_cache.localdb, remotes)
return info
except ConanException as exc:
info["error"] = True
exc.info = info
raise
@api_method
def search_recipes(self, pattern, remote_name=None, case_sensitive=False):
recorder = SearchRecorder()
search = Search(self._client_cache, self._remote_manager, self._registry)
try:
references = search.search_recipes(pattern, remote_name, case_sensitive)
except ConanException as exc:
recorder.error = True
exc.info = recorder.get_info()
raise
for remote_name, refs in references.items():
for ref in refs:
recorder.add_recipe(remote_name, ref, with_packages=False)
return recorder.get_info()
@api_method
def search_packages(self, reference, query=None, remote_name=None, outdated=False):
recorder = SearchRecorder()
search = Search(self._client_cache, self._remote_manager, self._registry)
try:
reference = ConanFileReference.loads(str(reference))
references = search.search_packages(reference, remote_name, query=query,
outdated=outdated)
except ConanException as exc:
recorder.error = True
exc.info = recorder.get_info()
raise
for remote_name, remote_ref in references.items():
recorder.add_recipe(remote_name, reference)
if remote_ref.ordered_packages:
for package_id, properties in remote_ref.ordered_packages.items():
package_recipe_hash = properties.get("recipe_hash", None)
recorder.add_package(remote_name, reference,
package_id, properties.get("options", []),
properties.get("settings", []),
properties.get("full_requires", []),
remote_ref.recipe_hash != package_recipe_hash)
return recorder.get_info()
@api_method
def upload(self, pattern, package=None, remote_name=None, all_packages=False, confirm=False,
retry=2, retry_wait=5, integrity_check=False, policy=None, query=None):
recorder = UploadRecorder()
uploader = CmdUpload(self._client_cache, self._user_io, self._remote_manager,
self._registry, self._loader, self._hook_manager)
try:
uploader.upload(recorder, pattern, package, all_packages, confirm, retry,
retry_wait, integrity_check, policy, remote_name, query=query)
return recorder.get_info()
except ConanException as exc:
recorder.error = True
exc.info = recorder.get_info()
raise
@api_method
def remote_list(self):
return self._registry.remotes.list
@api_method
def remote_add(self, remote_name, url, verify_ssl=True, insert=None, force=None):
return self._registry.remotes.add(remote_name, url, verify_ssl, insert, force)
@api_method
def remote_remove(self, remote_name):
return self._registry.remotes.remove(remote_name)
@api_method
def remote_update(self, remote_name, url, verify_ssl=True, insert=None):
return self._registry.remotes.update(remote_name, url, verify_ssl, insert)
@api_method
def remote_rename(self, remote_name, new_new_remote):
return self._registry.remotes.rename(remote_name, new_new_remote)
@api_method
def remote_list_ref(self):
return {r: remote_name for r, remote_name in self._registry.refs.list.items()}
@api_method
def remote_add_ref(self, reference, remote_name):
reference = ConanFileReference.loads(str(reference), validate=True)
return self._registry.refs.set(reference, remote_name, check_exists=True)
@api_method
def remote_remove_ref(self, reference):
reference = ConanFileReference.loads(str(reference), validate=True)
return self._registry.refs.remove(reference)
@api_method
def remote_update_ref(self, reference, remote_name):
reference = ConanFileReference.loads(str(reference), validate=True)
return self._registry.refs.update(reference, remote_name)
@api_method
def remote_list_pref(self, reference):
reference = ConanFileReference.loads(str(reference), validate=True)
ret = {}
tmp = self._registry.prefs.list
for r, remote in tmp.items():
pref = PackageReference.loads(r)
if pref.conan == reference:
ret[pref.full_repr()] = remote
return ret
@api_method
def remote_add_pref(self, package_reference, remote_name):
p_reference = PackageReference.loads(str(package_reference), validate=True)
return self._registry.prefs.set(p_reference, remote_name, check_exists=True)
@api_method
def remote_remove_pref(self, package_reference):
p_reference = PackageReference.loads(str(package_reference), validate=True)
return self._registry.prefs.remove(p_reference)
@api_method
def remote_update_pref(self, package_reference, remote_name):
p_reference = PackageReference.loads(str(package_reference), validate=True)
return self._registry.prefs.update(p_reference, remote_name)
def remote_clean(self):
return self._registry.remotes.clean()
@api_method
def profile_list(self):
return cmd_profile_list(self._client_cache.profiles_path, self._user_io.out)
@api_method
def create_profile(self, profile_name, detect=False):
return cmd_profile_create(profile_name, self._client_cache.profiles_path,
self._user_io.out, detect)
@api_method
def update_profile(self, profile_name, key, value):
return cmd_profile_update(profile_name, key, value, self._client_cache.profiles_path)
@api_method
def get_profile_key(self, profile_name, key):
return cmd_profile_get(profile_name, key, self._client_cache.profiles_path)
@api_method
def delete_profile_key(self, profile_name, key):
return cmd_profile_delete_key(profile_name, key, self._client_cache.profiles_path)
@api_method
def read_profile(self, profile=None):
p, _ = read_profile(profile, get_cwd(), self._client_cache.profiles_path)
return p
@api_method
def get_path(self, reference, package_id=None, path=None, remote_name=None):
from conans.client.local_file_getter import get_path
reference = ConanFileReference.loads(reference)
if not path:
path = "conanfile.py" if not package_id else "conaninfo.txt"
if not remote_name:
return get_path(self._client_cache, reference, package_id, path), path
else:
remote = self.get_remote_by_name(remote_name)
return self._remote_manager.get_path(reference, package_id, path, remote), path
@api_method
def export_alias(self, reference, target_reference):
reference = ConanFileReference.loads(reference)
target_reference = ConanFileReference.loads(target_reference)
return export_alias(reference, target_reference, self._client_cache)
@api_method
def get_default_remote(self):
return self._registry.remotes.default
@api_method
def get_remote_by_name(self, remote_name):
return self._registry.remotes.get(remote_name)
Conan = ConanAPIV1
def _parse_manifests_arguments(verify, manifests, manifests_interactive, cwd):
if manifests and manifests_interactive:
raise ConanException("Do not specify both manifests and "
"manifests-interactive arguments")
if verify and (manifests or manifests_interactive):
raise ConanException("Do not specify both 'verify' and "
"'manifests' or 'manifests-interactive' arguments")
manifest_folder = verify or manifests or manifests_interactive
if manifest_folder:
if not os.path.isabs(manifest_folder):
if not cwd:
raise ConanException("'cwd' should be defined if the manifest folder is relative.")
manifest_folder = os.path.join(cwd, manifest_folder)
manifest_verify = verify is not None
manifest_interactive = manifests_interactive is not None
else:
manifest_verify = manifest_interactive = False
return manifest_folder, manifest_interactive, manifest_verify
def existing_info_files(folder):
return os.path.exists(os.path.join(folder, CONANINFO)) and \
os.path.exists(os.path.join(folder, BUILD_INFO))
def get_conan_runner():
print_commands_to_output = get_env("CONAN_PRINT_RUN_COMMANDS", False)
generate_run_log_file = get_env("CONAN_LOG_RUN_TO_FILE", False)
log_run_to_output = get_env("CONAN_LOG_RUN_TO_OUTPUT", True)
runner = ConanRunner(print_commands_to_output, generate_run_log_file, log_run_to_output)
return runner
def migrate_and_get_client_cache(base_folder, out, storage_folder=None):
client_cache = ClientCache(base_folder, storage_folder, out)
migrator = ClientMigrator(client_cache, Version(client_version), out)
migrator.migrate()
return client_cache
| true | true |
f713a7ea6776cb4b67b41e994ff550f530ea010a | 2,778 | py | Python | leaf/selling/order/manager.py | guiqiqi/leaf | 79e34f4b8fba8c6fd208b5a3049103dca2064ab5 | [
"Apache-2.0"
] | 119 | 2020-01-30T04:25:03.000Z | 2022-03-27T07:15:45.000Z | leaf/selling/order/manager.py | guiqiqi/leaf | 79e34f4b8fba8c6fd208b5a3049103dca2064ab5 | [
"Apache-2.0"
] | 8 | 2020-02-02T05:49:47.000Z | 2021-01-25T03:31:09.000Z | leaf/selling/order/manager.py | guiqiqi/leaf | 79e34f4b8fba8c6fd208b5a3049103dca2064ab5 | [
"Apache-2.0"
] | 11 | 2020-01-31T15:07:11.000Z | 2021-03-24T03:47:48.000Z | """
使用有限状态机算法
通过已经创建的事件和状态
对订单状态进行自动的调度
"""
# pylint: disable=arguments-differ
from typing import NoReturn
from ...core.tools import web
from ...core.algorithm import fsm
from . import events
from . import status
from . import settings
# 状态转移表
_TransferTable = (
(status.Created, events.Confirm, status.Confirmed),
(status.Created, events.UserClose, status.Closed),
(status.Created, events.OrderTimedOut, status.Closed),
(status.Confirmed, events.Paying, status.Paying),
(status.Confirmed, events.UserClose, status.Closed),
(status.Confirmed, events.OrderTimedOut, status.Closed),
(status.Paying, events.PayingSuccess, status.Paid),
(status.Paying, events.PayingFailed, status.PayFailed),
(status.PayFailed, events.OrderTimedOut, status.Closed),
(status.PayFailed, events.OrderRetry, status.Created),
(status.Paid, events.Shipped, status.Shipping),
(status.Shipping, events.Delieverd, status.Delieverd),
(status.Delieverd, events.Recieved, status.Completed),
(status.Delieverd, events.RecieveTimingExcced, status.Completed),
(status.Completed, events.RequestRefund, status.RefundReviewing),
(status.RefundReviewing, events.RefundApproved, status.Refunding),
(status.RefundReviewing, events.RefundDenied, status.Completed),
(status.Refunding, events.RefundSuccess, status.Closed),
(status.Refunding, events.RefundFailed, status.Completed)
)
class StatusManager(fsm.Machine):
"""创建一个订单状态管理器"""
def __init__(self, orderid: str) -> NoReturn:
"""
订单状态管理器构造函数:
0. 根据订单id对管理器命名
1. 初始化状态转移对应表
2. 初始化进入状态
"""
super().__init__(str(orderid))
# 初始化状态转移对应表
for record in _TransferTable:
self.add(*record)
def start(self) -> NoReturn:
"""开始从订单创建开始"""
super().start(status.Created)
def json(self) -> str:
"""将当前状态信息导出为 JSON 字串"""
if not self.current is None:
current = {
settings.Status.Key.Code: self.current.code,
settings.Status.Key.Description: self.current.description,
settings.Status.Key.Extra: self.current.extra
} # 当前状态导出字典
else:
current = None
mapping = map(lambda event: {
settings.Events.Key.Time: event.time,
settings.Events.Key.OperationCode: event.opcode,
settings.Events.Key.Extra: event.extra,
settings.Events.Key.Description: event.description
}, self.events) # 事件记录器导出为字典
return web.JSONcreater({
settings.Manager.Key.Name: self.name,
settings.Manager.Key.CurrentStat: current,
settings.Manager.Key.EventsRecorder: list(mapping)
})
| 33.46988 | 74 | 0.658747 |
from typing import NoReturn
from ...core.tools import web
from ...core.algorithm import fsm
from . import events
from . import status
from . import settings
_TransferTable = (
(status.Created, events.Confirm, status.Confirmed),
(status.Created, events.UserClose, status.Closed),
(status.Created, events.OrderTimedOut, status.Closed),
(status.Confirmed, events.Paying, status.Paying),
(status.Confirmed, events.UserClose, status.Closed),
(status.Confirmed, events.OrderTimedOut, status.Closed),
(status.Paying, events.PayingSuccess, status.Paid),
(status.Paying, events.PayingFailed, status.PayFailed),
(status.PayFailed, events.OrderTimedOut, status.Closed),
(status.PayFailed, events.OrderRetry, status.Created),
(status.Paid, events.Shipped, status.Shipping),
(status.Shipping, events.Delieverd, status.Delieverd),
(status.Delieverd, events.Recieved, status.Completed),
(status.Delieverd, events.RecieveTimingExcced, status.Completed),
(status.Completed, events.RequestRefund, status.RefundReviewing),
(status.RefundReviewing, events.RefundApproved, status.Refunding),
(status.RefundReviewing, events.RefundDenied, status.Completed),
(status.Refunding, events.RefundSuccess, status.Closed),
(status.Refunding, events.RefundFailed, status.Completed)
)
class StatusManager(fsm.Machine):
def __init__(self, orderid: str) -> NoReturn:
super().__init__(str(orderid))
for record in _TransferTable:
self.add(*record)
def start(self) -> NoReturn:
super().start(status.Created)
def json(self) -> str:
if not self.current is None:
current = {
settings.Status.Key.Code: self.current.code,
settings.Status.Key.Description: self.current.description,
settings.Status.Key.Extra: self.current.extra
}
else:
current = None
mapping = map(lambda event: {
settings.Events.Key.Time: event.time,
settings.Events.Key.OperationCode: event.opcode,
settings.Events.Key.Extra: event.extra,
settings.Events.Key.Description: event.description
}, self.events)
return web.JSONcreater({
settings.Manager.Key.Name: self.name,
settings.Manager.Key.CurrentStat: current,
settings.Manager.Key.EventsRecorder: list(mapping)
})
| true | true |
f713a8a1a1549acb0e7bad080484cb845b0706a9 | 1,917 | py | Python | numpy/ma/tests/test_regression.py | WeatherGod/numpy | 5be45b280b258e158b93163b937f8f9c08d30393 | [
"BSD-3-Clause"
] | null | null | null | numpy/ma/tests/test_regression.py | WeatherGod/numpy | 5be45b280b258e158b93163b937f8f9c08d30393 | [
"BSD-3-Clause"
] | null | null | null | numpy/ma/tests/test_regression.py | WeatherGod/numpy | 5be45b280b258e158b93163b937f8f9c08d30393 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import division, absolute_import, print_function
import numpy as np
import numpy.ma as ma
from numpy.testing import *
from numpy.compat import sixu
rlevel = 1
class TestRegression(TestCase):
def test_masked_array_create(self,level=rlevel):
"""Ticket #17"""
x = np.ma.masked_array([0,1,2,3,0,4,5,6],mask=[0,0,0,1,1,1,0,0])
assert_array_equal(np.ma.nonzero(x),[[1,2,6,7]])
def test_masked_array(self,level=rlevel):
"""Ticket #61"""
x = np.ma.array(1,mask=[1])
def test_mem_masked_where(self,level=rlevel):
"""Ticket #62"""
from numpy.ma import masked_where, MaskType
a = np.zeros((1,1))
b = np.zeros(a.shape, MaskType)
c = masked_where(b,a)
a-c
def test_masked_array_multiply(self,level=rlevel):
"""Ticket #254"""
a = np.ma.zeros((4,1))
a[2,0] = np.ma.masked
b = np.zeros((4,2))
a*b
b*a
def test_masked_array_repeat(self, level=rlevel):
"""Ticket #271"""
np.ma.array([1],mask=False).repeat(10)
def test_masked_array_repr_unicode(self):
"""Ticket #1256"""
repr(np.ma.array(sixu("Unicode")))
def test_atleast_2d(self):
"""Ticket #1559"""
a = np.ma.masked_array([0.0, 1.2, 3.5], mask=[False, True, False])
b = np.atleast_2d(a)
assert_(a.mask.ndim == 1)
assert_(b.mask.ndim == 2)
def test_set_fill_value_unicode_py3(self):
"""Ticket #2733"""
a = np.ma.masked_array(['a', 'b', 'c'], mask=[1, 0, 0])
a.fill_value = 'X'
assert_(a.fill_value == 'X')
def test_var_sets_maskedarray_scalar(self):
"""Issue gh-2757"""
a = np.ma.array(np.arange(5), mask=True)
mout = np.ma.array(-1, dtype=float)
a.var(out=mout)
assert_(mout._data == 0)
if __name__ == "__main__":
run_module_suite()
| 28.61194 | 74 | 0.581116 | from __future__ import division, absolute_import, print_function
import numpy as np
import numpy.ma as ma
from numpy.testing import *
from numpy.compat import sixu
rlevel = 1
class TestRegression(TestCase):
def test_masked_array_create(self,level=rlevel):
x = np.ma.masked_array([0,1,2,3,0,4,5,6],mask=[0,0,0,1,1,1,0,0])
assert_array_equal(np.ma.nonzero(x),[[1,2,6,7]])
def test_masked_array(self,level=rlevel):
x = np.ma.array(1,mask=[1])
def test_mem_masked_where(self,level=rlevel):
from numpy.ma import masked_where, MaskType
a = np.zeros((1,1))
b = np.zeros(a.shape, MaskType)
c = masked_where(b,a)
a-c
def test_masked_array_multiply(self,level=rlevel):
a = np.ma.zeros((4,1))
a[2,0] = np.ma.masked
b = np.zeros((4,2))
a*b
b*a
def test_masked_array_repeat(self, level=rlevel):
np.ma.array([1],mask=False).repeat(10)
def test_masked_array_repr_unicode(self):
repr(np.ma.array(sixu("Unicode")))
def test_atleast_2d(self):
a = np.ma.masked_array([0.0, 1.2, 3.5], mask=[False, True, False])
b = np.atleast_2d(a)
assert_(a.mask.ndim == 1)
assert_(b.mask.ndim == 2)
def test_set_fill_value_unicode_py3(self):
a = np.ma.masked_array(['a', 'b', 'c'], mask=[1, 0, 0])
a.fill_value = 'X'
assert_(a.fill_value == 'X')
def test_var_sets_maskedarray_scalar(self):
a = np.ma.array(np.arange(5), mask=True)
mout = np.ma.array(-1, dtype=float)
a.var(out=mout)
assert_(mout._data == 0)
if __name__ == "__main__":
run_module_suite()
| true | true |
f713a9dc7b039e0ec3ed6a843cbb5ec20cd3d558 | 23,351 | py | Python | source/spot_detection_tracking/trackmate_xml_2d.py | zhanyinx/SPT_analysis | 1cf806c1fd6051e7fc998d2860a16bea6aa9de1a | [
"MIT"
] | 1 | 2021-07-09T11:51:04.000Z | 2021-07-09T11:51:04.000Z | source/spot_detection_tracking/trackmate_xml_2d.py | zhanyinx/SPT_analysis | 1cf806c1fd6051e7fc998d2860a16bea6aa9de1a | [
"MIT"
] | null | null | null | source/spot_detection_tracking/trackmate_xml_2d.py | zhanyinx/SPT_analysis | 1cf806c1fd6051e7fc998d2860a16bea6aa9de1a | [
"MIT"
] | null | null | null | """\U0001F1EB\U0001F1EF \U00002B50 CSV track coordinate to TrackMate XML conversion.
Fiji allows for quick and easy viewing of images. TrackMate can be used to view tracks.
Unfortunately, it isn't that simple to convert "normal" coordinate output into
TrackMate-viewable format.
Requires a "tracks.csv" file that contains the following columns:
- x, y: Coordinate positions in x-/y-axis
- particle: Unique ID assigned to all coordinates along one track
- frame: Current point in time / frame
"""
import argparse
import os
import tempfile
import xml.dom.minidom
import xml.etree.ElementTree as ET
import numpy as np
import pandas as pd
import skimage.io
def get_gaps(frames):
def __longest_consecutive(a):
"""Return length of longest consecutive range in list of integers."""
a = set(a)
longest = 0
for i in a:
if i - 1 not in a:
streak = 0
while i in a:
i += 1
streak += 1
longest = max(longest, streak)
return longest
full_length = np.arange(min(frames), max(frames))
diff = np.setdiff1d(full_length, frames)
longest = __longest_consecutive(diff)
total = len(diff)
return str(longest), str(total), str(len(full_length))
def __create_model(root, spatialunits: str = "pixel", timeunits: str = "sec"):
dict_spotfeatures = [
{
"feature": "QUALITY",
"name": "Quality",
"shortname": "Quality",
"dimension": "QUALITY",
"isint": "false",
},
{
"feature": "POSITION_X",
"name": "X",
"shortname": "X",
"dimension": "POSITION",
"isint": "false",
},
{
"feature": "POSITION_Y",
"name": "Y",
"shortname": "Y",
"dimension": "POSITION",
"isint": "false",
},
{
"feature": "POSITION_Z",
"name": "Z",
"shortname": "Z",
"dimension": "POSITION",
"isint": "false",
},
{
"feature": "POSITION_T",
"name": "T",
"shortname": "T",
"dimension": "TIME",
"isint": "false",
},
{
"feature": "FRAME",
"name": "Frame",
"shortname": "Frame",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "RADIUS",
"name": "Radius",
"shortname": "R",
"dimension": "LENGTH",
"isint": "false",
},
{
"feature": "VISIBILITY",
"name": "Visibility",
"shortname": "Visibility",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "MANUAL_INTEGER_SPOT_FEATURE",
"name": "Custom Integer Spot Feature",
"shortname": "Integer Spot Feature",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "MANUAL_DOUBLE_SPOT_FEATURE",
"name": "Custom Double Spot Feature",
"shortname": "Double Spot Feature",
"dimension": "NONE",
"isint": "false",
},
{
"feature": "HAS_MAX_QUALITY_IN_FRAME",
"name": "Has max quality",
"shortname": "Max Quality",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "MANUAL_COLOR",
"name": "Manual spot color",
"shortname": "Spot color",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "MEAN_INTENSITY",
"name": "Mean intensity",
"shortname": "Mean",
"dimension": "INTENSITY",
"isint": "false",
},
{
"feature": "MEDIAN_INTENSITY",
"name": "Median intensity",
"shortname": "Median",
"dimension": "INTENSITY",
"isint": "false",
},
{
"feature": "MIN_INTENSITY",
"name": "Minimal intensity",
"shortname": "Min",
"dimension": "INTENSITY",
"isint": "false",
},
{
"feature": "MAX_INTENSITY",
"name": "Maximal intensity",
"shortname": "Max",
"dimension": "INTENSITY",
"isint": "false",
},
{
"feature": "TOTAL_INTENSITY",
"name": "Total intensity",
"shortname": "Total int.",
"dimension": "INTENSITY",
"isint": "false",
},
{
"feature": "STANDARD_DEVIATION",
"name": "Standard deviation",
"shortname": "Stdev.",
"dimension": "INTENSITY",
"isint": "false",
},
{
"feature": "ESTIMATED_DIAMETER",
"name": "Estimated diameter",
"shortname": "Diam.",
"dimension": "LENGTH",
"isint": "false",
},
{
"feature": "CONTRAST",
"name": "Contrast",
"shortname": "Constrast",
"dimension": "NONE",
"isint": "false",
},
{
"feature": "SNR",
"name": "Signal/Noise, ratio",
"shortname": "SNR",
"dimension": "NONE",
"isint": "false",
},
]
dict_edgefeatures = [
{
"feature": "SPOT_SOURCE_ID",
"name": "Source spot ID",
"shortname": "Source ID",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "SPOT_TARGET_ID",
"name": "Target spot ID",
"shortname": "Target ID",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "LINK_COST",
"name": "Link cost",
"shortname": "Cost",
"dimension": "NONE",
"isint": "false",
},
{
"feature": "EDGE_TIME",
"name": "Time (mean)",
"shortname": "T",
"dimension": "TIME",
"isint": "false",
},
{
"feature": "EDGE_X_LOCATION",
"name": "X Location (mean)",
"shortname": "X",
"dimension": "POSITION",
"isint": "false",
},
{
"feature": "EDGE_Y_LOCATION",
"name": "Y Location (mean)",
"shortname": "Y",
"dimension": "POSITION",
"isint": "false",
},
{
"feature": "EDGE_Z_LOCATION",
"name": "Z Location (mean)",
"shortname": "Z",
"dimension": "POSITION",
"isint": "false",
},
{
"feature": "VELOCITY",
"name": "Velocity",
"shortname": "V",
"dimension": "VELOCITY",
"isint": "false",
},
{
"feature": "DISPLACEMENT",
"name": "Displacement",
"shortname": "D",
"dimension": "LENGTH",
"isint": "false",
},
{
"feature": "MANUAL_COLOR",
"name": "Manual edge color",
"shortname": "Edge color",
"dimension": "NONE",
"isint": "true",
},
]
dict_trackfeatures = [
{
"feature": "MANUAL_INTEGER_TRACK_FEATURE",
"name": "Custom Integer Track Feature",
"shortname": "Integer Track Feature",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "MANUAL_DOUBLE_TRACK_FEATURE",
"name": "Custom Double Track Feature",
"shortname": "Double Track Feature",
"dimension": "NONE",
"isint": "false",
},
{
"feature": "NUMBER_SPOTS",
"name": "Number of spots in track",
"shortname": "N spots",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "NUMBER_GAPS",
"name": "Number of gaps",
"shortname": "Gaps",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "LONGEST_GAP",
"name": "Longest gap",
"shortname": "Longest gap",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "NUMBER_SPLITS",
"name": "Number of split events",
"shortname": "Splits",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "NUMBER_MERGES",
"name": "Number of merge events",
"shortname": "Merges",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "NUMBER_COMPLEX",
"name": "Complex points",
"shortname": "Complex",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "TRACK_DURATION",
"name": "Duration of track",
"shortname": "Duration",
"dimension": "TIME",
"isint": "false",
},
{
"feature": "TRACK_START",
"name": "Track start",
"shortname": "T start",
"dimension": "TIME",
"isint": "false",
},
{
"feature": "TRACK_STOP",
"name": "Track stop",
"shortname": "T stop",
"dimension": "TIME",
"isint": "false",
},
{
"feature": "TRACK_DISPLACEMENT",
"name": "Track displacement",
"shortname": "Displacement",
"dimension": "LENGTH",
"isint": "false",
},
{
"feature": "TRACK_INDEX",
"name": "Track index",
"shortname": "Index",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "TRACK_ID",
"name": "Track ID",
"shortname": "ID",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "TRACK_X_LOCATION",
"name": "X Location (mean)",
"shortname": "X",
"dimension": "POSITION",
"isint": "false",
},
{
"feature": "TRACK_Y_LOCATION",
"name": "Y Location (mean)",
"shortname": "Y",
"dimension": "POSITION",
"isint": "false",
},
{
"feature": "TRACK_Z_LOCATION",
"name": "Z Location (mean)",
"shortname": "Z",
"dimension": "POSITION",
"isint": "false",
},
{
"feature": "TRACK_MEAN_SPEED",
"name": "Mean velocity",
"shortname": "Mean V",
"dimension": "VELOCITY",
"isint": "false",
},
{
"feature": "TRACK_MAX_SPEED",
"name": "Maximal velocity",
"shortname": "Max V",
"dimension": "VELOCITY",
"isint": "false",
},
{
"feature": "TRACK_MIN_SPEED",
"name": "Minimal velocity",
"shortname": "Min V",
"dimension": "VELOCITY",
"isint": "false",
},
{
"feature": "TRACK_MEDIAN_SPEED",
"name": "Median velocity",
"shortname": "Median V",
"dimension": "VELOCITY",
"isint": "false",
},
{
"feature": "TRACK_STD_SPEED",
"name": "Velocity standard deviation",
"shortname": "V std",
"dimension": "VELOCITY",
"isint": "false",
},
{
"feature": "TRACK_MEAN_QUALITY",
"name": "Mean quality",
"shortname": "Mean Q",
"dimension": "QUALITY",
"isint": "false",
},
{
"feature": "TRACK_MAX_QUALITY",
"name": "Maximal quality",
"shortname": "Max Q",
"dimension": "QUALITY",
"isint": "false",
},
{
"feature": "TRACK_MIN_QUALITY",
"name": "Minimal quality",
"shortname": "Min Q",
"dimension": "QUALITY",
"isint": "false",
},
{
"feature": "TRACK_MEDIAN_QUALITY",
"name": "Median quality",
"shortname": "Median Q",
"dimension": "QUALITY",
"isint": "false",
},
{
"feature": "TRACK_STD_QUALITY",
"name": "Quality standard deviation",
"shortname": "Q std",
"dimension": "QUALITY",
"isint": "false",
},
]
# Model
model = ET.SubElement(root, "Model", spatialunits=spatialunits, timeunits=timeunits)
featuredeclarations = ET.SubElement(model, "FeatureDeclarations")
# SpotFeatures
spotfeatures = ET.SubElement(featuredeclarations, "SpotFeatures")
for dct in dict_spotfeatures:
_ = ET.SubElement(spotfeatures, "Feature", **dct)
# Edgefeatures
edgefeatures = ET.SubElement(featuredeclarations, "EdgeFeatures")
for dct in dict_edgefeatures:
_ = ET.SubElement(edgefeatures, "Feature", **dct)
# TrackFeatures
trackfeatures = ET.SubElement(featuredeclarations, "TrackFeatures")
for dct in dict_trackfeatures:
_ = ET.SubElement(trackfeatures, "Feature", **dct)
return model
def __create_allspots(model, df):
# List of all spots (without tracks)
allspots = ET.SubElement(model, "AllSpots", nspots=str(len(df)))
spotid = 0
for frame in df["slice"].unique():
frame_id = str(float(frame))
df_frame = df[df["slice"] == frame]
spotsinframe = ET.SubElement(allspots, "SpotsInFrame", frame=str(frame))
for row in df_frame.iterrows():
try:
size = str(row[1]["size"] * 2)
except KeyError:
size = "1.0"
dict_spot = {
"ID": f"{spotid:06}",
"name": f"ID{spotid:06}",
"QUALITY": "1.0",
"POSITION_T": frame_id,
"MAX_INTENSITY": "1.0",
"FRAME": frame_id,
"MEDIAN_INTENSITY": "1.0",
"VISIBILITY": "1",
"MEAN_INTENSITY": "1.0",
"TOTAL_INTENSITY": "1.0",
"ESTIMATED_DIAMETER": size,
"RADIUS": "1.0",
"SNR": "1.0",
"POSITION_X": str(row[1]["x"]),
"POSITION_Y": str(row[1]["y"]),
"STANDARD_DEVIATION": "1.0",
"CONTRAST": "1.0",
"MANUAL_COLOR": "-10921639",
"MIN_INTENSITY": "0.0",
"POSITION_Z": "1",
}
_ = ET.SubElement(spotsinframe, "Spot", **dict_spot)
spotid = spotid + 1
def __create_alltracks(model, df):
# List of all tracks
alltracks = ET.SubElement(model, "AllTracks")
# for particle in df["particle"].unique():
# df_track = df[df["particle"] == particle]
# track_ids = list(df_track.index)
# frames = np.array(df_track["slice"])
# longest, total, duration = get_gaps(frames)
# dict_track = {
# "name": f"Track_{particle}",
# "TRACK_ID": str(particle),
# "NUMBER_SPOTS": str(len(frames)),
# "NUMBER_GAPS": longest,
# "LONGEST_GAP": total,
# "NUMBER_SPLITS": "0",
# "NUMBER_MERGES": "0",
# "NUMBER_COMPLEX": "0",
# "TRACK_DURATION": duration,
# "TRACK_START": str(min(frames)),
# "TRACK_STOP": str(max(frames)),
# "TRACK_DISPLACEMENT": "0.01",
# "TRACK_INDEX": str(particle),
# "TRACK_X_LOCATION": str(df_track["x"].mean()),
# "TRACK_Y_LOCATION": str(df_track["y"].mean()),
# "TRACK_Z_LOCATION": "0.1",
# "TRACK_MEAN_SPEED": "0.1",
# "TRACK_MAX_SPEED": "0.1",
# "TRACK_MIN_SPEED": "0.1",
# "TRACK_MEDIAN_SPEED": "0.1",
# "TRACK_STD_SPEED": "0.1",
# "TRACK_MEAN_QUALITY": "0.1",
# "TRACK_MAX_QUALITY": "0.1",
# "TRACK_MIN_QUALITY": "0.1",
# "TRACK_MEDIAN_QUALITY": "0.1",
# "TRACK_STD_QUALITY": "0.1",
# }
# track = ET.SubElement(alltracks, "Track", **dict_track)
# # Add all spots in the corresponding track
# for row in df_track.iterrows():
# dict_edge = {
# "SPOT_SOURCE_ID": f"{row[0]:06}",
# "SPOT_TARGET_ID": f"{track_ids[track_ids.index(row[0]) - 1]:06}",
# "LINK_COST": "0.1",
# "EDGE_TIME": "0.1",
# "EDGE_X_LOCATION": str(row[1]["x"]),
# "EDGE_Y_LOCATION": str(row[1]["y"]),
# "EDGE_Z_LOCATION": "0.0",
# "VELOCITY": "0.1",
# "DISPLACEMENT": "0.1",
# }
# _ = ET.SubElement(track, "Edge", **dict_edge)
def __create_filteredtracks(model, df):
# Tracks after TrackMate's filtering
filteredtracks = ET.SubElement(model, "FilteredTracks")
# for particle in df["particle"].unique():
# _ = ET.SubElement(filteredtracks, "TrackID", TRACK_ID=str(particle))
def __create_settings(
root,
file_image,
pixelwidth: str = "1.0",
pixelheight: str = "1.0",
voxeldepth: str = "1.0",
timeinterval: str = "1.0",
):
# Image metadata
path, fname = os.path.split(file_image)
image = skimage.io.imread(file_image)
if len(image.shape) == 2:
Warning(
f"Found image with shape = 2; assuming it's 3d data with a single time point."
)
image = np.expand_dims(image, axis=0)
frames, width, height = image.shape
imagedata = {
"filename": fname,
"folder": path,
"width": str(width),
"height": str(height),
"nslices": "1",
"nframes": str(frames),
"pixelwidth": pixelwidth,
"pixelheight": pixelheight,
"voxeldepth": voxeldepth,
"timeinterval": timeinterval,
}
basicsettings = {
"xstart": "0",
"xend": str(width - 1),
"ystart": "0",
"yend": str(height - 1),
"zstart": "0",
"zend": "0",
"tstart": "0",
"tend": str(frames - 1),
}
detectorsettings = {
"DETECTOR_NAME": "LOG_DETECTOR",
"TARGET_CHANNEL": "1",
"RADIUS": "5.0",
"THRESHOLD": "1000.0",
"DO_MEDIAN_FILTERING": "false",
"DO_SUBPIXEL_LOCALIZATION": "true",
}
initialspotfilter = {"feature": "QUALITY", "value": "0.0", "isabove": "true"}
dict_trackersettings = {
"TRACKER_NAME": "SPARSE_LAP_TRACKER",
"CUTOFF_PERCENTILE": "0.9",
"ALTERNATIVE_LINKING_COST_FACTOR": "1.05",
"BLOCKING_VALUE": "Infinity",
}
dict_subtrackersettings = {
"Linking": {"LINKING_MAX_DISTANCE": "0.8"},
"GapClosing": {
"ALLOW_GAP_CLOSING": "false",
"GAP_CLOSING_MAX_DISTANCE": "0.5",
"MAX_FRAME_GAP": "3",
},
"TrackSplitting": {
"ALLOW_TRACK_SPLITTING": "false",
"SPLITTING_MAX_DISTANCE": "15.0",
},
"TrackMerging": {
"ALLOW_TRACK_MERGING": "false",
"MERGING_MAX_DISTANCE": "15.0",
},
}
dict_analyzercollection = {
"SpotAnalyzers": [
"MANUAL_SPOT_COLOR_ANALYZER",
"Spot descriptive statistics",
"Spot radius estimator",
"Spot contrast and SNR",
],
"EdgeAnalyzers": [
"Edge target",
"Edge mean location",
"Edge velocity",
"MANUAL_EDGE_COLOR_ANALYZER",
],
"TrackAnalyzers": [
"Branching analyzer",
"Track duration",
"Track index",
"Track location",
"Velocity",
"TRACK_SPOT_QUALITY",
],
}
# General Settings
settings = ET.SubElement(root, "Settings")
_ = ET.SubElement(settings, "ImageData", **imagedata)
_ = ET.SubElement(settings, "BasicSettings", **basicsettings)
_ = ET.SubElement(settings, "DetectorSettings", **detectorsettings)
_ = ET.SubElement(settings, "InitialSpotFilter", **initialspotfilter)
_ = ET.SubElement(settings, "SpotFilterCollection")
# Tracker settings
trackersettings = ET.SubElement(settings, "TrackerSettings", **dict_trackersettings)
for k, v in dict_subtrackersettings.items():
subelement = ET.SubElement(trackersettings, k, **v)
_ = ET.SubElement(subelement, "FeaturePenalties")
# Filter settings
_ = ET.SubElement(settings, "TrackFilterCollection")
analyzercollection = ET.SubElement(settings, "AnalyzerCollection")
for k, v in dict_analyzercollection.items():
subanalyzer = ET.SubElement(analyzercollection, k)
for lst in v:
_ = ET.SubElement(subanalyzer, "Analyzer", key=lst)
def __create_guistate(root):
# TrackMate's GUI settings
guistate = ET.SubElement(root, "GUIState", state="InitialFiltering")
for _ in range(4):
_ = ET.SubElement(guistate, "View", key="HYPERSTACKDISPLAYER")
def __pretty_output(root, file_output):
# Save file after fancy formatting to prettify
with tempfile.TemporaryDirectory() as tempdirname:
fname = os.path.join(tempdirname, "file.xml")
tree = ET.ElementTree(root)
tree.write(fname, encoding="UTF-8", xml_declaration=True)
dom = xml.dom.minidom.parse(fname)
pretty_xml = dom.toprettyxml()
with open(file_output, "w") as f:
f.write(pretty_xml)
def create_trackmate_xml(
spots_df,
file_image,
file_output,
spatialunits: str = "pixel",
timeunits: str = "sec",
pixelwidth: int = 1,
pixelheight: int = 1,
voxeldepth: int = 1,
timeinterval: int = 1,
):
# Check required track df columns
df = spots_df
df["x"] = df["x"] * pixelwidth
df["y"] = df["y"] * pixelheight
df["z"] = 1.0
df.to_csv(file_output.replace("xml", "csv"))
req_cols = ["x", "y", "slice"]
if not all(req in df.columns for req in req_cols):
raise ValueError(f"Not all required columns present! {req_cols} must exist.")
# XML tree
root = ET.Element("TrackMate", version="6.0.1")
# Model
model = __create_model(root, spatialunits=spatialunits, timeunits=timeunits)
__create_allspots(model, df)
__create_alltracks(model, df)
__create_filteredtracks(model, df)
# Settings
__create_settings(
root,
file_image,
pixelwidth=str(pixelwidth),
pixelheight=str(pixelheight),
voxeldepth=str(voxeldepth),
timeinterval=str(timeinterval),
)
__create_guistate(root)
# Save output
__pretty_output(root, file_output)
| 30.524183 | 90 | 0.480493 |
import argparse
import os
import tempfile
import xml.dom.minidom
import xml.etree.ElementTree as ET
import numpy as np
import pandas as pd
import skimage.io
def get_gaps(frames):
def __longest_consecutive(a):
a = set(a)
longest = 0
for i in a:
if i - 1 not in a:
streak = 0
while i in a:
i += 1
streak += 1
longest = max(longest, streak)
return longest
full_length = np.arange(min(frames), max(frames))
diff = np.setdiff1d(full_length, frames)
longest = __longest_consecutive(diff)
total = len(diff)
return str(longest), str(total), str(len(full_length))
def __create_model(root, spatialunits: str = "pixel", timeunits: str = "sec"):
dict_spotfeatures = [
{
"feature": "QUALITY",
"name": "Quality",
"shortname": "Quality",
"dimension": "QUALITY",
"isint": "false",
},
{
"feature": "POSITION_X",
"name": "X",
"shortname": "X",
"dimension": "POSITION",
"isint": "false",
},
{
"feature": "POSITION_Y",
"name": "Y",
"shortname": "Y",
"dimension": "POSITION",
"isint": "false",
},
{
"feature": "POSITION_Z",
"name": "Z",
"shortname": "Z",
"dimension": "POSITION",
"isint": "false",
},
{
"feature": "POSITION_T",
"name": "T",
"shortname": "T",
"dimension": "TIME",
"isint": "false",
},
{
"feature": "FRAME",
"name": "Frame",
"shortname": "Frame",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "RADIUS",
"name": "Radius",
"shortname": "R",
"dimension": "LENGTH",
"isint": "false",
},
{
"feature": "VISIBILITY",
"name": "Visibility",
"shortname": "Visibility",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "MANUAL_INTEGER_SPOT_FEATURE",
"name": "Custom Integer Spot Feature",
"shortname": "Integer Spot Feature",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "MANUAL_DOUBLE_SPOT_FEATURE",
"name": "Custom Double Spot Feature",
"shortname": "Double Spot Feature",
"dimension": "NONE",
"isint": "false",
},
{
"feature": "HAS_MAX_QUALITY_IN_FRAME",
"name": "Has max quality",
"shortname": "Max Quality",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "MANUAL_COLOR",
"name": "Manual spot color",
"shortname": "Spot color",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "MEAN_INTENSITY",
"name": "Mean intensity",
"shortname": "Mean",
"dimension": "INTENSITY",
"isint": "false",
},
{
"feature": "MEDIAN_INTENSITY",
"name": "Median intensity",
"shortname": "Median",
"dimension": "INTENSITY",
"isint": "false",
},
{
"feature": "MIN_INTENSITY",
"name": "Minimal intensity",
"shortname": "Min",
"dimension": "INTENSITY",
"isint": "false",
},
{
"feature": "MAX_INTENSITY",
"name": "Maximal intensity",
"shortname": "Max",
"dimension": "INTENSITY",
"isint": "false",
},
{
"feature": "TOTAL_INTENSITY",
"name": "Total intensity",
"shortname": "Total int.",
"dimension": "INTENSITY",
"isint": "false",
},
{
"feature": "STANDARD_DEVIATION",
"name": "Standard deviation",
"shortname": "Stdev.",
"dimension": "INTENSITY",
"isint": "false",
},
{
"feature": "ESTIMATED_DIAMETER",
"name": "Estimated diameter",
"shortname": "Diam.",
"dimension": "LENGTH",
"isint": "false",
},
{
"feature": "CONTRAST",
"name": "Contrast",
"shortname": "Constrast",
"dimension": "NONE",
"isint": "false",
},
{
"feature": "SNR",
"name": "Signal/Noise, ratio",
"shortname": "SNR",
"dimension": "NONE",
"isint": "false",
},
]
dict_edgefeatures = [
{
"feature": "SPOT_SOURCE_ID",
"name": "Source spot ID",
"shortname": "Source ID",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "SPOT_TARGET_ID",
"name": "Target spot ID",
"shortname": "Target ID",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "LINK_COST",
"name": "Link cost",
"shortname": "Cost",
"dimension": "NONE",
"isint": "false",
},
{
"feature": "EDGE_TIME",
"name": "Time (mean)",
"shortname": "T",
"dimension": "TIME",
"isint": "false",
},
{
"feature": "EDGE_X_LOCATION",
"name": "X Location (mean)",
"shortname": "X",
"dimension": "POSITION",
"isint": "false",
},
{
"feature": "EDGE_Y_LOCATION",
"name": "Y Location (mean)",
"shortname": "Y",
"dimension": "POSITION",
"isint": "false",
},
{
"feature": "EDGE_Z_LOCATION",
"name": "Z Location (mean)",
"shortname": "Z",
"dimension": "POSITION",
"isint": "false",
},
{
"feature": "VELOCITY",
"name": "Velocity",
"shortname": "V",
"dimension": "VELOCITY",
"isint": "false",
},
{
"feature": "DISPLACEMENT",
"name": "Displacement",
"shortname": "D",
"dimension": "LENGTH",
"isint": "false",
},
{
"feature": "MANUAL_COLOR",
"name": "Manual edge color",
"shortname": "Edge color",
"dimension": "NONE",
"isint": "true",
},
]
dict_trackfeatures = [
{
"feature": "MANUAL_INTEGER_TRACK_FEATURE",
"name": "Custom Integer Track Feature",
"shortname": "Integer Track Feature",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "MANUAL_DOUBLE_TRACK_FEATURE",
"name": "Custom Double Track Feature",
"shortname": "Double Track Feature",
"dimension": "NONE",
"isint": "false",
},
{
"feature": "NUMBER_SPOTS",
"name": "Number of spots in track",
"shortname": "N spots",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "NUMBER_GAPS",
"name": "Number of gaps",
"shortname": "Gaps",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "LONGEST_GAP",
"name": "Longest gap",
"shortname": "Longest gap",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "NUMBER_SPLITS",
"name": "Number of split events",
"shortname": "Splits",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "NUMBER_MERGES",
"name": "Number of merge events",
"shortname": "Merges",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "NUMBER_COMPLEX",
"name": "Complex points",
"shortname": "Complex",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "TRACK_DURATION",
"name": "Duration of track",
"shortname": "Duration",
"dimension": "TIME",
"isint": "false",
},
{
"feature": "TRACK_START",
"name": "Track start",
"shortname": "T start",
"dimension": "TIME",
"isint": "false",
},
{
"feature": "TRACK_STOP",
"name": "Track stop",
"shortname": "T stop",
"dimension": "TIME",
"isint": "false",
},
{
"feature": "TRACK_DISPLACEMENT",
"name": "Track displacement",
"shortname": "Displacement",
"dimension": "LENGTH",
"isint": "false",
},
{
"feature": "TRACK_INDEX",
"name": "Track index",
"shortname": "Index",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "TRACK_ID",
"name": "Track ID",
"shortname": "ID",
"dimension": "NONE",
"isint": "true",
},
{
"feature": "TRACK_X_LOCATION",
"name": "X Location (mean)",
"shortname": "X",
"dimension": "POSITION",
"isint": "false",
},
{
"feature": "TRACK_Y_LOCATION",
"name": "Y Location (mean)",
"shortname": "Y",
"dimension": "POSITION",
"isint": "false",
},
{
"feature": "TRACK_Z_LOCATION",
"name": "Z Location (mean)",
"shortname": "Z",
"dimension": "POSITION",
"isint": "false",
},
{
"feature": "TRACK_MEAN_SPEED",
"name": "Mean velocity",
"shortname": "Mean V",
"dimension": "VELOCITY",
"isint": "false",
},
{
"feature": "TRACK_MAX_SPEED",
"name": "Maximal velocity",
"shortname": "Max V",
"dimension": "VELOCITY",
"isint": "false",
},
{
"feature": "TRACK_MIN_SPEED",
"name": "Minimal velocity",
"shortname": "Min V",
"dimension": "VELOCITY",
"isint": "false",
},
{
"feature": "TRACK_MEDIAN_SPEED",
"name": "Median velocity",
"shortname": "Median V",
"dimension": "VELOCITY",
"isint": "false",
},
{
"feature": "TRACK_STD_SPEED",
"name": "Velocity standard deviation",
"shortname": "V std",
"dimension": "VELOCITY",
"isint": "false",
},
{
"feature": "TRACK_MEAN_QUALITY",
"name": "Mean quality",
"shortname": "Mean Q",
"dimension": "QUALITY",
"isint": "false",
},
{
"feature": "TRACK_MAX_QUALITY",
"name": "Maximal quality",
"shortname": "Max Q",
"dimension": "QUALITY",
"isint": "false",
},
{
"feature": "TRACK_MIN_QUALITY",
"name": "Minimal quality",
"shortname": "Min Q",
"dimension": "QUALITY",
"isint": "false",
},
{
"feature": "TRACK_MEDIAN_QUALITY",
"name": "Median quality",
"shortname": "Median Q",
"dimension": "QUALITY",
"isint": "false",
},
{
"feature": "TRACK_STD_QUALITY",
"name": "Quality standard deviation",
"shortname": "Q std",
"dimension": "QUALITY",
"isint": "false",
},
]
model = ET.SubElement(root, "Model", spatialunits=spatialunits, timeunits=timeunits)
featuredeclarations = ET.SubElement(model, "FeatureDeclarations")
spotfeatures = ET.SubElement(featuredeclarations, "SpotFeatures")
for dct in dict_spotfeatures:
_ = ET.SubElement(spotfeatures, "Feature", **dct)
edgefeatures = ET.SubElement(featuredeclarations, "EdgeFeatures")
for dct in dict_edgefeatures:
_ = ET.SubElement(edgefeatures, "Feature", **dct)
trackfeatures = ET.SubElement(featuredeclarations, "TrackFeatures")
for dct in dict_trackfeatures:
_ = ET.SubElement(trackfeatures, "Feature", **dct)
return model
def __create_allspots(model, df):
allspots = ET.SubElement(model, "AllSpots", nspots=str(len(df)))
spotid = 0
for frame in df["slice"].unique():
frame_id = str(float(frame))
df_frame = df[df["slice"] == frame]
spotsinframe = ET.SubElement(allspots, "SpotsInFrame", frame=str(frame))
for row in df_frame.iterrows():
try:
size = str(row[1]["size"] * 2)
except KeyError:
size = "1.0"
dict_spot = {
"ID": f"{spotid:06}",
"name": f"ID{spotid:06}",
"QUALITY": "1.0",
"POSITION_T": frame_id,
"MAX_INTENSITY": "1.0",
"FRAME": frame_id,
"MEDIAN_INTENSITY": "1.0",
"VISIBILITY": "1",
"MEAN_INTENSITY": "1.0",
"TOTAL_INTENSITY": "1.0",
"ESTIMATED_DIAMETER": size,
"RADIUS": "1.0",
"SNR": "1.0",
"POSITION_X": str(row[1]["x"]),
"POSITION_Y": str(row[1]["y"]),
"STANDARD_DEVIATION": "1.0",
"CONTRAST": "1.0",
"MANUAL_COLOR": "-10921639",
"MIN_INTENSITY": "0.0",
"POSITION_Z": "1",
}
_ = ET.SubElement(spotsinframe, "Spot", **dict_spot)
spotid = spotid + 1
def __create_alltracks(model, df):
alltracks = ET.SubElement(model, "AllTracks")
s(model, df):
filteredtracks = ET.SubElement(model, "FilteredTracks")
# for particle in df["particle"].unique():
# _ = ET.SubElement(filteredtracks, "TrackID", TRACK_ID=str(particle))
def __create_settings(
root,
file_image,
pixelwidth: str = "1.0",
pixelheight: str = "1.0",
voxeldepth: str = "1.0",
timeinterval: str = "1.0",
):
# Image metadata
path, fname = os.path.split(file_image)
image = skimage.io.imread(file_image)
if len(image.shape) == 2:
Warning(
f"Found image with shape = 2; assuming it's 3d data with a single time point."
)
image = np.expand_dims(image, axis=0)
frames, width, height = image.shape
imagedata = {
"filename": fname,
"folder": path,
"width": str(width),
"height": str(height),
"nslices": "1",
"nframes": str(frames),
"pixelwidth": pixelwidth,
"pixelheight": pixelheight,
"voxeldepth": voxeldepth,
"timeinterval": timeinterval,
}
basicsettings = {
"xstart": "0",
"xend": str(width - 1),
"ystart": "0",
"yend": str(height - 1),
"zstart": "0",
"zend": "0",
"tstart": "0",
"tend": str(frames - 1),
}
detectorsettings = {
"DETECTOR_NAME": "LOG_DETECTOR",
"TARGET_CHANNEL": "1",
"RADIUS": "5.0",
"THRESHOLD": "1000.0",
"DO_MEDIAN_FILTERING": "false",
"DO_SUBPIXEL_LOCALIZATION": "true",
}
initialspotfilter = {"feature": "QUALITY", "value": "0.0", "isabove": "true"}
dict_trackersettings = {
"TRACKER_NAME": "SPARSE_LAP_TRACKER",
"CUTOFF_PERCENTILE": "0.9",
"ALTERNATIVE_LINKING_COST_FACTOR": "1.05",
"BLOCKING_VALUE": "Infinity",
}
dict_subtrackersettings = {
"Linking": {"LINKING_MAX_DISTANCE": "0.8"},
"GapClosing": {
"ALLOW_GAP_CLOSING": "false",
"GAP_CLOSING_MAX_DISTANCE": "0.5",
"MAX_FRAME_GAP": "3",
},
"TrackSplitting": {
"ALLOW_TRACK_SPLITTING": "false",
"SPLITTING_MAX_DISTANCE": "15.0",
},
"TrackMerging": {
"ALLOW_TRACK_MERGING": "false",
"MERGING_MAX_DISTANCE": "15.0",
},
}
dict_analyzercollection = {
"SpotAnalyzers": [
"MANUAL_SPOT_COLOR_ANALYZER",
"Spot descriptive statistics",
"Spot radius estimator",
"Spot contrast and SNR",
],
"EdgeAnalyzers": [
"Edge target",
"Edge mean location",
"Edge velocity",
"MANUAL_EDGE_COLOR_ANALYZER",
],
"TrackAnalyzers": [
"Branching analyzer",
"Track duration",
"Track index",
"Track location",
"Velocity",
"TRACK_SPOT_QUALITY",
],
}
settings = ET.SubElement(root, "Settings")
_ = ET.SubElement(settings, "ImageData", **imagedata)
_ = ET.SubElement(settings, "BasicSettings", **basicsettings)
_ = ET.SubElement(settings, "DetectorSettings", **detectorsettings)
_ = ET.SubElement(settings, "InitialSpotFilter", **initialspotfilter)
_ = ET.SubElement(settings, "SpotFilterCollection")
trackersettings = ET.SubElement(settings, "TrackerSettings", **dict_trackersettings)
for k, v in dict_subtrackersettings.items():
subelement = ET.SubElement(trackersettings, k, **v)
_ = ET.SubElement(subelement, "FeaturePenalties")
_ = ET.SubElement(settings, "TrackFilterCollection")
analyzercollection = ET.SubElement(settings, "AnalyzerCollection")
for k, v in dict_analyzercollection.items():
subanalyzer = ET.SubElement(analyzercollection, k)
for lst in v:
_ = ET.SubElement(subanalyzer, "Analyzer", key=lst)
def __create_guistate(root):
guistate = ET.SubElement(root, "GUIState", state="InitialFiltering")
for _ in range(4):
_ = ET.SubElement(guistate, "View", key="HYPERSTACKDISPLAYER")
def __pretty_output(root, file_output):
# Save file after fancy formatting to prettify
with tempfile.TemporaryDirectory() as tempdirname:
fname = os.path.join(tempdirname, "file.xml")
tree = ET.ElementTree(root)
tree.write(fname, encoding="UTF-8", xml_declaration=True)
dom = xml.dom.minidom.parse(fname)
pretty_xml = dom.toprettyxml()
with open(file_output, "w") as f:
f.write(pretty_xml)
def create_trackmate_xml(
spots_df,
file_image,
file_output,
spatialunits: str = "pixel",
timeunits: str = "sec",
pixelwidth: int = 1,
pixelheight: int = 1,
voxeldepth: int = 1,
timeinterval: int = 1,
):
# Check required track df columns
df = spots_df
df["x"] = df["x"] * pixelwidth
df["y"] = df["y"] * pixelheight
df["z"] = 1.0
df.to_csv(file_output.replace("xml", "csv"))
req_cols = ["x", "y", "slice"]
if not all(req in df.columns for req in req_cols):
raise ValueError(f"Not all required columns present! {req_cols} must exist.")
# XML tree
root = ET.Element("TrackMate", version="6.0.1")
# Model
model = __create_model(root, spatialunits=spatialunits, timeunits=timeunits)
__create_allspots(model, df)
__create_alltracks(model, df)
__create_filteredtracks(model, df)
# Settings
__create_settings(
root,
file_image,
pixelwidth=str(pixelwidth),
pixelheight=str(pixelheight),
voxeldepth=str(voxeldepth),
timeinterval=str(timeinterval),
)
__create_guistate(root)
# Save output
__pretty_output(root, file_output)
| true | true |
f713ad1b0c007eaefdbe5b1736bad81fd1c6c192 | 5,530 | py | Python | homekit/tools.py | LucaKaufmann/homekit_python | 12f95aba5db676fa9254ccfb4382aa1f6cc8862d | [
"Apache-2.0"
] | null | null | null | homekit/tools.py | LucaKaufmann/homekit_python | 12f95aba5db676fa9254ccfb4382aa1f6cc8862d | [
"Apache-2.0"
] | null | null | null | homekit/tools.py | LucaKaufmann/homekit_python | 12f95aba5db676fa9254ccfb4382aa1f6cc8862d | [
"Apache-2.0"
] | null | null | null | #
# Copyright 2018 Joachim Lusiardi
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import sys
import base64
import binascii
from homekit.http_impl.http_client import HomeKitHTTPConnection
from homekit.zeroconf_ import find_device_ip_and_port
from homekit.protocol import get_session_keys
from homekit.model.characteristics import CharacteristicFormats
from distutils.util import strtobool
from homekit.exception import FormatException
from homekit.tlv import TlvParseException
from homekit import TLV
def load_pairing(file: str) -> dict:
"""
loads data for an existing pairing from the file.
:param file: the file name
:return: a dict containing the pairing data or None if file was not found
"""
try:
with open(file, 'r') as input_fp:
return json.load(input_fp)
except FileNotFoundError:
return None
def save_pairing(file: str, pairing_data: dict):
"""
save the data for an existing pairing.
:param file: the file name
:param pairing_data: a dict containing the pairing data
:return: None
"""
with open(file, 'w') as output_fp:
json.dump(pairing_data, output_fp, indent=4)
def create_session(file):
"""
try to obtain IP and port from the given file and establish a session to a HomeKit accessory. This function covers
IP/ports that might have changed since last run and updates the file accordingly.
:param file: the path to the file where the data is stored
:return:
conn: an instance of HomeKitHTTPConnection
c2a_key: the key used for communication from controller to accessory
a2c_key: the key used for communication from accessory to controller
"""
conn = None
c2a_key = None
a2c_key = None
# load file with pairing data
pairing_data = load_pairing(file)
if pairing_data is None:
print('File {file} not found!'.format(file=file))
sys.exit(-1)
# we need ip and port of the device
connected = False
if 'AccessoryIP' in pairing_data and 'AccessoryPort' in pairing_data:
# if it is known, try it
accessory_ip = pairing_data['AccessoryIP']
accessory_port = pairing_data['AccessoryPort']
conn = HomeKitHTTPConnection(accessory_ip, port=accessory_port)
try:
conn.connect()
c2a_key, a2c_key = get_session_keys(conn, pairing_data)
connected = True
except Exception:
connected = False
if not connected:
# no connection yet, so ip / port might have changed and we need to fall back to slow zeroconf lookup
device_id = pairing_data['AccessoryPairingID']
connection_data = find_device_ip_and_port(device_id)
if connection_data is None:
print('Device {id} not found'.format(id=device_id))
sys.exit(-1)
conn = HomeKitHTTPConnection(connection_data['ip'], port=connection_data['port'])
pairing_data['AccessoryIP'] = connection_data['ip']
pairing_data['AccessoryPort'] = connection_data['port']
save_pairing(file, pairing_data)
c2a_key, a2c_key = get_session_keys(conn, pairing_data)
return conn, c2a_key, a2c_key
def check_convert_value(val, target_format):
"""
Checks if the given value is of the given format or is convertible into the format. If the value is not convertible,
a FormatException is thrown.
:param val: the original value
:param target_format: the target type of the conversion
:raises FormatException: if the value is not of the given format or cannot be converted.
:return: the converted value
"""
if target_format == CharacteristicFormats.bool:
try:
val = strtobool(val)
except ValueError:
raise FormatException('"{v}" is no valid "{t}"!'.format(v=val, t=target_format))
if target_format in [CharacteristicFormats.uint64, CharacteristicFormats.uint32,
CharacteristicFormats.uint16, CharacteristicFormats.uint8,
CharacteristicFormats.int]:
try:
val = int(val)
except ValueError:
raise FormatException('"{v}" is no valid "{t}"!'.format(v=val, t=target_format))
if target_format == CharacteristicFormats.float:
try:
val = float(val)
except ValueError:
raise FormatException('"{v}" is no valid "{t}"!'.format(v=val, t=target_format))
if target_format == CharacteristicFormats.data:
try:
base64.decodebytes(val.encode())
except binascii.Error:
raise FormatException('"{v}" is no valid "{t}"!'.format(v=val, t=target_format))
if target_format == CharacteristicFormats.tlv8:
try:
tmp_bytes = base64.decodebytes(val.encode())
TLV.decode_bytes(tmp_bytes)
except (binascii.Error, TlvParseException):
raise FormatException('"{v}" is no valid "{t}"!'.format(v=val, t=target_format))
return val
| 37.619048 | 120 | 0.682098 |
import json
import sys
import base64
import binascii
from homekit.http_impl.http_client import HomeKitHTTPConnection
from homekit.zeroconf_ import find_device_ip_and_port
from homekit.protocol import get_session_keys
from homekit.model.characteristics import CharacteristicFormats
from distutils.util import strtobool
from homekit.exception import FormatException
from homekit.tlv import TlvParseException
from homekit import TLV
def load_pairing(file: str) -> dict:
try:
with open(file, 'r') as input_fp:
return json.load(input_fp)
except FileNotFoundError:
return None
def save_pairing(file: str, pairing_data: dict):
with open(file, 'w') as output_fp:
json.dump(pairing_data, output_fp, indent=4)
def create_session(file):
conn = None
c2a_key = None
a2c_key = None
pairing_data = load_pairing(file)
if pairing_data is None:
print('File {file} not found!'.format(file=file))
sys.exit(-1)
connected = False
if 'AccessoryIP' in pairing_data and 'AccessoryPort' in pairing_data:
accessory_ip = pairing_data['AccessoryIP']
accessory_port = pairing_data['AccessoryPort']
conn = HomeKitHTTPConnection(accessory_ip, port=accessory_port)
try:
conn.connect()
c2a_key, a2c_key = get_session_keys(conn, pairing_data)
connected = True
except Exception:
connected = False
if not connected:
device_id = pairing_data['AccessoryPairingID']
connection_data = find_device_ip_and_port(device_id)
if connection_data is None:
print('Device {id} not found'.format(id=device_id))
sys.exit(-1)
conn = HomeKitHTTPConnection(connection_data['ip'], port=connection_data['port'])
pairing_data['AccessoryIP'] = connection_data['ip']
pairing_data['AccessoryPort'] = connection_data['port']
save_pairing(file, pairing_data)
c2a_key, a2c_key = get_session_keys(conn, pairing_data)
return conn, c2a_key, a2c_key
def check_convert_value(val, target_format):
if target_format == CharacteristicFormats.bool:
try:
val = strtobool(val)
except ValueError:
raise FormatException('"{v}" is no valid "{t}"!'.format(v=val, t=target_format))
if target_format in [CharacteristicFormats.uint64, CharacteristicFormats.uint32,
CharacteristicFormats.uint16, CharacteristicFormats.uint8,
CharacteristicFormats.int]:
try:
val = int(val)
except ValueError:
raise FormatException('"{v}" is no valid "{t}"!'.format(v=val, t=target_format))
if target_format == CharacteristicFormats.float:
try:
val = float(val)
except ValueError:
raise FormatException('"{v}" is no valid "{t}"!'.format(v=val, t=target_format))
if target_format == CharacteristicFormats.data:
try:
base64.decodebytes(val.encode())
except binascii.Error:
raise FormatException('"{v}" is no valid "{t}"!'.format(v=val, t=target_format))
if target_format == CharacteristicFormats.tlv8:
try:
tmp_bytes = base64.decodebytes(val.encode())
TLV.decode_bytes(tmp_bytes)
except (binascii.Error, TlvParseException):
raise FormatException('"{v}" is no valid "{t}"!'.format(v=val, t=target_format))
return val
| true | true |
f713ad9cc4719c8e9bec1ba17ee8b320554f1bbc | 3,331 | py | Python | tests/test_models/test_engine/test_file_storage.py | yasheymateen/AirBnB_clone | 865cbef47738371f66b9264635a04b983d7accfd | [
"Unlicense"
] | null | null | null | tests/test_models/test_engine/test_file_storage.py | yasheymateen/AirBnB_clone | 865cbef47738371f66b9264635a04b983d7accfd | [
"Unlicense"
] | null | null | null | tests/test_models/test_engine/test_file_storage.py | yasheymateen/AirBnB_clone | 865cbef47738371f66b9264635a04b983d7accfd | [
"Unlicense"
] | null | null | null | #!/usr/bin/python3
"""
This module contains the tests for FileStorage class
"""
import unittest
import io
import sys
import models
from models.engine.file_storage import FileStorage
from models.base_model import BaseModel
from models.user import User
from models.state import State
from models.city import City
from models.amenity import Amenity
from models.place import Place
from models.review import Review
class TestFileStorage(unittest.TestCase):
"""
Test for class FileStorage and its methods
"""
def setUp(self):
"""
Set up method
"""
self.storage = FileStorage()
def tearDown(self):
"""
Tear down method
"""
pass
def test_private_class_attributes(self):
with self.assertRaises(AttributeError):
print(self.storage.__objects)
with self.assertRaises(AttributeError):
print(self.storage.__file_path)
def test_file_path(self):
self.assertEqual(self.storage._FileStorage__file_path, "file.json")
def test_objects(self):
self.assertIs(type(self.storage._FileStorage__objects), dict)
def test_all(self):
obj_dict = self.storage.all()
self.assertTrue(type(obj_dict) is dict)
def test_new(self):
base1 = BaseModel()
city1 = City()
state1 = State()
base1_id = "{}.{}".format(base1.__class__.__name__, base1.id)
city1_id = "{}.{}".format(city1.__class__.__name__, city1.id)
state1_id = "{}.{}".format(state1.__class__.__name__, state1.id)
obj_dict = self.storage.all()
self.assertTrue(base1_id in obj_dict)
self.assertTrue(obj_dict[base1_id] is base1)
self.assertTrue(city1_id in obj_dict)
self.assertTrue(state1_id in obj_dict)
self.assertTrue(obj_dict[city1_id] is city1)
self.assertTrue(obj_dict[state1_id] is state1)
def test_save(self):
base1 = BaseModel()
city1 = City()
state1 = State()
base1_id = "{}.{}".format(base1.__class__.__name__, base1.id)
city1_id = "{}.{}".format(city1.__class__.__name__, city1.id)
state1_id = "{}.{}".format(state1.__class__.__name__, state1.id)
obj_dict_presave = self.storage.all()
base1.save()
self.storage.reload()
obj_dict_postsave = self.storage.all()
self.assertTrue(base1_id in obj_dict_postsave)
self.assertTrue(city1_id in obj_dict_postsave)
self.assertTrue(state1_id in obj_dict_postsave)
self.assertTrue(obj_dict_postsave == obj_dict_presave)
def test_reload(self):
base1 = BaseModel()
city1 = City()
state1 = State()
base1_id = "{}.{}".format(base1.__class__.__name__, base1.id)
city1_id = "{}.{}".format(city1.__class__.__name__, city1.id)
state1_id = "{}.{}".format(state1.__class__.__name__, state1.id)
obj_dict_presave = self.storage.all()
base1.save()
self.storage.reload()
obj_dict_postsave = self.storage.all()
self.assertTrue(base1_id in obj_dict_postsave)
self.assertTrue(city1_id in obj_dict_postsave)
self.assertTrue(state1_id in obj_dict_postsave)
self.assertTrue(obj_dict_postsave == obj_dict_presave)
if __name__ == '__main__':
unittest.main()
| 33.31 | 75 | 0.65686 |
import unittest
import io
import sys
import models
from models.engine.file_storage import FileStorage
from models.base_model import BaseModel
from models.user import User
from models.state import State
from models.city import City
from models.amenity import Amenity
from models.place import Place
from models.review import Review
class TestFileStorage(unittest.TestCase):
def setUp(self):
self.storage = FileStorage()
def tearDown(self):
pass
def test_private_class_attributes(self):
with self.assertRaises(AttributeError):
print(self.storage.__objects)
with self.assertRaises(AttributeError):
print(self.storage.__file_path)
def test_file_path(self):
self.assertEqual(self.storage._FileStorage__file_path, "file.json")
def test_objects(self):
self.assertIs(type(self.storage._FileStorage__objects), dict)
def test_all(self):
obj_dict = self.storage.all()
self.assertTrue(type(obj_dict) is dict)
def test_new(self):
base1 = BaseModel()
city1 = City()
state1 = State()
base1_id = "{}.{}".format(base1.__class__.__name__, base1.id)
city1_id = "{}.{}".format(city1.__class__.__name__, city1.id)
state1_id = "{}.{}".format(state1.__class__.__name__, state1.id)
obj_dict = self.storage.all()
self.assertTrue(base1_id in obj_dict)
self.assertTrue(obj_dict[base1_id] is base1)
self.assertTrue(city1_id in obj_dict)
self.assertTrue(state1_id in obj_dict)
self.assertTrue(obj_dict[city1_id] is city1)
self.assertTrue(obj_dict[state1_id] is state1)
def test_save(self):
base1 = BaseModel()
city1 = City()
state1 = State()
base1_id = "{}.{}".format(base1.__class__.__name__, base1.id)
city1_id = "{}.{}".format(city1.__class__.__name__, city1.id)
state1_id = "{}.{}".format(state1.__class__.__name__, state1.id)
obj_dict_presave = self.storage.all()
base1.save()
self.storage.reload()
obj_dict_postsave = self.storage.all()
self.assertTrue(base1_id in obj_dict_postsave)
self.assertTrue(city1_id in obj_dict_postsave)
self.assertTrue(state1_id in obj_dict_postsave)
self.assertTrue(obj_dict_postsave == obj_dict_presave)
def test_reload(self):
base1 = BaseModel()
city1 = City()
state1 = State()
base1_id = "{}.{}".format(base1.__class__.__name__, base1.id)
city1_id = "{}.{}".format(city1.__class__.__name__, city1.id)
state1_id = "{}.{}".format(state1.__class__.__name__, state1.id)
obj_dict_presave = self.storage.all()
base1.save()
self.storage.reload()
obj_dict_postsave = self.storage.all()
self.assertTrue(base1_id in obj_dict_postsave)
self.assertTrue(city1_id in obj_dict_postsave)
self.assertTrue(state1_id in obj_dict_postsave)
self.assertTrue(obj_dict_postsave == obj_dict_presave)
if __name__ == '__main__':
unittest.main()
| true | true |
f713af60491a19d75d5d98d285087f166e61f127 | 12,467 | py | Python | scripts/asr_language_modeling/neural_rescorer/eval_neural_rescorer.py | PatrykNeubauer/NeMo | 3ada744b884dba5f233f22c6991fc6092c6ca8d0 | [
"Apache-2.0"
] | 2 | 2021-09-21T07:36:20.000Z | 2022-02-05T15:29:04.000Z | scripts/asr_language_modeling/neural_rescorer/eval_neural_rescorer.py | PatrykNeubauer/NeMo | 3ada744b884dba5f233f22c6991fc6092c6ca8d0 | [
"Apache-2.0"
] | null | null | null | scripts/asr_language_modeling/neural_rescorer/eval_neural_rescorer.py | PatrykNeubauer/NeMo | 3ada744b884dba5f233f22c6991fc6092c6ca8d0 | [
"Apache-2.0"
] | 12 | 2021-06-20T08:56:10.000Z | 2022-03-16T19:07:10.000Z | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This script would evaluate a neural language model (Transformer) trained with
`examples/nlp/language_modeling/transformer_lm.py' as a rescorer for ASR systems.
Given a trained TransformerLMModel `.nemo` file, this script can be used to re-score the beams obtained from a beam
search decoder of an ASR model.
USAGE:
1. Obtain `.tsv` file with beams and their corresponding scores. Scores can be from a regular beam search decoder or
in fusion with an N-gram LM scores. For a given beam size `beam_size` and a number of examples
for evaluation `num_eval_examples`, it should contain (`beam_size` x `num_eval_examples`) lines of
form `beam_candidate_text \t score`. This file can be generated by `scripts/asr_language_modeling/ngram_lm/eval_beamsearch_ngram.py`.
2. Rescore the candidates:
python eval_neural_rescorer.py
--lm_model=[path to .nemo file of the LM]
--beams_file=[path to beams .tsv file]
--beam_size=[size of the beams]
--eval_manifest=[path to eval manifest .json file]
--batch_size=[batch size used for inference on the LM model]
--alpha=[the value for the parameter rescorer_alpha]
--beta=[the value for the parameter rescorer_beta]
You may find more info on how to use this script at:
https://docs.nvidia.com/deeplearning/nemo/user-guide/docs/en/main/asr/asr_language_modeling.html
"""
import contextlib
import json
from argparse import ArgumentParser
import editdistance
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import torch
import tqdm
from nemo.collections.nlp.models.language_modeling import TransformerLMModel
from nemo.utils import logging
class BeamScoresDataset(torch.utils.data.Dataset):
"""
Dataset to read the score file containing the beams and their score
Args:
data_path: path to the beams file
tokenizer: tokenizer of the LM model
manifest_path: manifest `.json` file which contains the ground truths transcripts
beam_size: the number of beams per sample
max_seq_length: the maximum length of sequences
"""
def __init__(self, data_path, tokenizer, manifest_path, beam_size=128, max_seq_length=256):
self.data = pd.read_csv(data_path, delimiter="\t", header=None)
self.tokenizer = tokenizer
self.ground_truths = []
with open(manifest_path, 'r') as f_orig:
for line in f_orig:
item = json.loads(line)
self.ground_truths.append(item['text'])
self.beam_size = beam_size
self.max_seq_length = max_seq_length
def __len__(self):
return len(self.data)
def __getitem__(self, idx):
text = str(self.data[0][idx])
tokens = [self.tokenizer.bos_id] + self.tokenizer.text_to_ids(text) + [self.tokenizer.eos_id]
input_ids = [self.tokenizer.pad_id] * self.max_seq_length
input_ids[: len(tokens)] = tokens
input_ids = np.array(input_ids)
input_mask = (input_ids != self.tokenizer.pad_id).astype(np.float32)
acoustic_score = self.data[1][idx]
dist = editdistance.eval(text.split(), self.ground_truths[idx // self.beam_size].split())
ref_len = len(self.ground_truths[idx // self.beam_size].split())
len_in_chars = len(str(self.data[0][idx]))
return input_ids, input_mask, acoustic_score, dist, ref_len, len_in_chars, idx
def linear_search_wer(
dists, scores1, scores2, total_len, coef_range=[0, 10], coef_steps=10000, param_name='parameter'
):
"""
performs linear search to find the best coefficient when two set of scores are getting linearly fused.
Args:
dists: Tesnor of the distances between the ground truth and the candidates with shape of [number of samples, beam size]
scores1: Tensor of the first set of scores with shape of [number of samples, beam size]
scores2: Tensor of the second set of scores with shape of [number of samples, beam size]
total_len: The total length of all samples
coef_range: the search range for the coefficient
coef_steps: the number of steps that the search range would get divided into
param_name: the name of the parameter to be used in the figure
Output:
(best coefficient found, best WER achived)
"""
scale = scores1.mean().abs().item() / scores2.mean().abs().item()
left = coef_range[0] * scale
right = coef_range[1] * scale
coefs = np.linspace(left, right, coef_steps)
best_wer = 10000
best_coef = left
wers = []
for coef in coefs:
scores = scores1 + coef * scores2
wer = compute_wer(dists, scores, total_len)
wers.append(wer)
if wer < best_wer:
best_wer = wer
best_coef = coef
plt.plot(coefs, wers)
plt.title(f'WER% after rescoring with different values of {param_name}')
plt.ylabel('WER%')
plt.xlabel(param_name)
plt.show()
return best_coef, best_wer
def compute_wer(dists, scores, total_len):
"""
Sorts the candidates based on the scores and calculates the WER with the new top candidates.
Args:
dists: Tensor of the distances between the ground truth and the candidates with shape of [number of samples, beam size]
scores: Tensor of the scores for candidates with shape of [number of samples, beam size]
total_len: The total length of all samples
Output:
WER with the new scores
"""
indices = scores.max(dim=1, keepdim=True)[1]
wer = dists.gather(dim=1, index=indices).sum() / total_len
wer = wer.item()
return wer
def main():
parser = ArgumentParser()
parser.add_argument("--lm_model_file", type=str, required=True, help="path to LM model .nemo file")
parser.add_argument("--beams_file", type=str, required=True, help="path to beams .tsv file")
parser.add_argument(
"--eval_manifest", type=str, required=True, help="path to the evaluation `.json` manifest file"
)
parser.add_argument("--beam_size", type=int, required=True, help="number of beams per candidate")
parser.add_argument("--batch_size", type=int, default=256, help="inference batch size")
parser.add_argument("--alpha", type=float, default=None, help="parameter alpha of the fusion")
parser.add_argument("--beta", type=float, default=None, help="parameter beta of the fusion")
parser.add_argument(
"--scores_output_file", default=None, type=str, help="The optional path to store the rescored beams"
)
parser.add_argument(
"--device", default="cuda", type=str, help="The device to load the model onto to calculate the scores"
)
parser.add_argument(
"--use_amp", action="store_true", help="Whether to use AMP if available to calculate the scores"
)
args = parser.parse_args()
device = args.device
if device.startswith("cuda") and not torch.cuda.is_available():
logging.info(f"cuda is not available! switched to cpu.")
device = "cpu"
if args.lm_model_file.endswith(".nemo"):
logging.info("Attempting to initialize from .nemo file")
model = TransformerLMModel.restore_from(
restore_path=args.lm_model_file, map_location=torch.device(device)
).eval()
else:
raise NotImplementedError(f"Only supports .nemo files, but got: {args.model}")
max_seq_length = model.encoder._embedding.position_embedding.pos_enc.shape[0]
dataset = BeamScoresDataset(args.beams_file, model.tokenizer, args.eval_manifest, args.beam_size, max_seq_length)
data_loader = torch.utils.data.DataLoader(dataset=dataset, batch_size=args.batch_size)
if args.use_amp:
if torch.cuda.is_available() and hasattr(torch.cuda, 'amp') and hasattr(torch.cuda.amp, 'autocast'):
logging.info("AMP is enabled!\n")
autocast = torch.cuda.amp.autocast
else:
@contextlib.contextmanager
def autocast():
yield
logging.info(f"Rescoring with beam_size: {args.beam_size}")
logging.info("Calculating the scores...")
with autocast():
with torch.no_grad():
am_scores, lm_scores, dists, ref_lens, lens_in_chars = [], [], [], [], []
for batch in tqdm.tqdm(data_loader):
input_ids, input_mask, acoustic_score, dist, ref_len, len_in_chars, idx = batch
max_len_in_batch = input_mask.sum(dim=0).argmin().item()
input_ids, input_mask = input_ids[:, :max_len_in_batch], input_mask[:, :max_len_in_batch]
if torch.cuda.is_available():
input_ids, input_mask = input_ids.to(device), input_mask.to(device)
dist, acoustic_score, len_in_chars = (
dist.to(device),
acoustic_score.to(device),
len_in_chars.to(device),
)
log_probs = model.forward(input_ids[:, :-1], input_mask[:, :-1])
target_log_probs = log_probs.gather(2, input_ids[:, 1:].unsqueeze(2)).squeeze(2)
neural_lm_score = torch.sum(target_log_probs * input_mask[:, 1:], dim=-1)
am_scores.append(acoustic_score)
lm_scores.append(neural_lm_score)
dists.append(dist)
ref_lens.append(ref_len)
lens_in_chars.append(len_in_chars)
am_scores = torch.cat(am_scores).view(-1, args.beam_size)
lm_scores = torch.cat(lm_scores).view(-1, args.beam_size)
dists = torch.cat(dists).view(-1, args.beam_size)
ref_lens = torch.cat(ref_lens).view(-1, args.beam_size)
lens_in_chars = torch.cat(lens_in_chars).view(-1, args.beam_size).to(am_scores.dtype)
total_len = ref_lens[:, 0].sum()
model_wer = dists[:, 0].sum() / total_len
ideal_wer = dists.min(dim=1)[0].sum() / total_len
if args.alpha is None:
logging.info("Linear search for alpha...")
coef1, _ = linear_search_wer(
dists=dists, scores1=am_scores, scores2=lm_scores, total_len=total_len, param_name='alpha'
)
coef1 = np.round(coef1, 3)
logging.info(f"alpha={coef1} achieved the best WER.")
logging.info(f"------------------------------------------------")
else:
coef1 = args.alpha
scores = am_scores + coef1 * lm_scores
if args.beta is None:
logging.info("Linear search for beta...")
coef2, _ = linear_search_wer(
dists=dists, scores1=scores, scores2=lens_in_chars, total_len=total_len, param_name='beta'
)
coef2 = np.round(coef2, 3)
logging.info(f"beta={coef2} achieved the best WER.")
logging.info(f"------------------------------------------------")
else:
coef2 = args.beta
new_scores = am_scores + coef1 * lm_scores + coef2 * lens_in_chars
rescored_wer = compute_wer(dists, new_scores, total_len)
logging.info(f"Input beams WER: {np.round(model_wer.item() * 100, 2)}%")
logging.info(f"------------------------------------------------")
logging.info(f" +LM rescoring WER: {np.round(rescored_wer * 100, 2)}%")
logging.info(f" with alpha={coef1}, beta={coef2}")
logging.info(f"------------------------------------------------")
logging.info(f"Best possible WER: {np.round(ideal_wer.item() * 100, 2)}%")
logging.info(f"------------------------------------------------")
new_scores_flatten = new_scores.flatten()
if args.scores_output_file is not None:
logging.info(f'Saving the candidates with their new scores at `{args.scores_output_file}`...')
with open(args.scores_output_file, "w") as fout:
for sample_id in range(len(dataset)):
fout.write(f"{dataset.data[0][sample_id]}\t{new_scores_flatten[sample_id]}\n")
if __name__ == '__main__':
main()
| 43.138408 | 136 | 0.657416 |
import contextlib
import json
from argparse import ArgumentParser
import editdistance
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import torch
import tqdm
from nemo.collections.nlp.models.language_modeling import TransformerLMModel
from nemo.utils import logging
class BeamScoresDataset(torch.utils.data.Dataset):
def __init__(self, data_path, tokenizer, manifest_path, beam_size=128, max_seq_length=256):
self.data = pd.read_csv(data_path, delimiter="\t", header=None)
self.tokenizer = tokenizer
self.ground_truths = []
with open(manifest_path, 'r') as f_orig:
for line in f_orig:
item = json.loads(line)
self.ground_truths.append(item['text'])
self.beam_size = beam_size
self.max_seq_length = max_seq_length
def __len__(self):
return len(self.data)
def __getitem__(self, idx):
text = str(self.data[0][idx])
tokens = [self.tokenizer.bos_id] + self.tokenizer.text_to_ids(text) + [self.tokenizer.eos_id]
input_ids = [self.tokenizer.pad_id] * self.max_seq_length
input_ids[: len(tokens)] = tokens
input_ids = np.array(input_ids)
input_mask = (input_ids != self.tokenizer.pad_id).astype(np.float32)
acoustic_score = self.data[1][idx]
dist = editdistance.eval(text.split(), self.ground_truths[idx // self.beam_size].split())
ref_len = len(self.ground_truths[idx // self.beam_size].split())
len_in_chars = len(str(self.data[0][idx]))
return input_ids, input_mask, acoustic_score, dist, ref_len, len_in_chars, idx
def linear_search_wer(
dists, scores1, scores2, total_len, coef_range=[0, 10], coef_steps=10000, param_name='parameter'
):
scale = scores1.mean().abs().item() / scores2.mean().abs().item()
left = coef_range[0] * scale
right = coef_range[1] * scale
coefs = np.linspace(left, right, coef_steps)
best_wer = 10000
best_coef = left
wers = []
for coef in coefs:
scores = scores1 + coef * scores2
wer = compute_wer(dists, scores, total_len)
wers.append(wer)
if wer < best_wer:
best_wer = wer
best_coef = coef
plt.plot(coefs, wers)
plt.title(f'WER% after rescoring with different values of {param_name}')
plt.ylabel('WER%')
plt.xlabel(param_name)
plt.show()
return best_coef, best_wer
def compute_wer(dists, scores, total_len):
indices = scores.max(dim=1, keepdim=True)[1]
wer = dists.gather(dim=1, index=indices).sum() / total_len
wer = wer.item()
return wer
def main():
parser = ArgumentParser()
parser.add_argument("--lm_model_file", type=str, required=True, help="path to LM model .nemo file")
parser.add_argument("--beams_file", type=str, required=True, help="path to beams .tsv file")
parser.add_argument(
"--eval_manifest", type=str, required=True, help="path to the evaluation `.json` manifest file"
)
parser.add_argument("--beam_size", type=int, required=True, help="number of beams per candidate")
parser.add_argument("--batch_size", type=int, default=256, help="inference batch size")
parser.add_argument("--alpha", type=float, default=None, help="parameter alpha of the fusion")
parser.add_argument("--beta", type=float, default=None, help="parameter beta of the fusion")
parser.add_argument(
"--scores_output_file", default=None, type=str, help="The optional path to store the rescored beams"
)
parser.add_argument(
"--device", default="cuda", type=str, help="The device to load the model onto to calculate the scores"
)
parser.add_argument(
"--use_amp", action="store_true", help="Whether to use AMP if available to calculate the scores"
)
args = parser.parse_args()
device = args.device
if device.startswith("cuda") and not torch.cuda.is_available():
logging.info(f"cuda is not available! switched to cpu.")
device = "cpu"
if args.lm_model_file.endswith(".nemo"):
logging.info("Attempting to initialize from .nemo file")
model = TransformerLMModel.restore_from(
restore_path=args.lm_model_file, map_location=torch.device(device)
).eval()
else:
raise NotImplementedError(f"Only supports .nemo files, but got: {args.model}")
max_seq_length = model.encoder._embedding.position_embedding.pos_enc.shape[0]
dataset = BeamScoresDataset(args.beams_file, model.tokenizer, args.eval_manifest, args.beam_size, max_seq_length)
data_loader = torch.utils.data.DataLoader(dataset=dataset, batch_size=args.batch_size)
if args.use_amp:
if torch.cuda.is_available() and hasattr(torch.cuda, 'amp') and hasattr(torch.cuda.amp, 'autocast'):
logging.info("AMP is enabled!\n")
autocast = torch.cuda.amp.autocast
else:
@contextlib.contextmanager
def autocast():
yield
logging.info(f"Rescoring with beam_size: {args.beam_size}")
logging.info("Calculating the scores...")
with autocast():
with torch.no_grad():
am_scores, lm_scores, dists, ref_lens, lens_in_chars = [], [], [], [], []
for batch in tqdm.tqdm(data_loader):
input_ids, input_mask, acoustic_score, dist, ref_len, len_in_chars, idx = batch
max_len_in_batch = input_mask.sum(dim=0).argmin().item()
input_ids, input_mask = input_ids[:, :max_len_in_batch], input_mask[:, :max_len_in_batch]
if torch.cuda.is_available():
input_ids, input_mask = input_ids.to(device), input_mask.to(device)
dist, acoustic_score, len_in_chars = (
dist.to(device),
acoustic_score.to(device),
len_in_chars.to(device),
)
log_probs = model.forward(input_ids[:, :-1], input_mask[:, :-1])
target_log_probs = log_probs.gather(2, input_ids[:, 1:].unsqueeze(2)).squeeze(2)
neural_lm_score = torch.sum(target_log_probs * input_mask[:, 1:], dim=-1)
am_scores.append(acoustic_score)
lm_scores.append(neural_lm_score)
dists.append(dist)
ref_lens.append(ref_len)
lens_in_chars.append(len_in_chars)
am_scores = torch.cat(am_scores).view(-1, args.beam_size)
lm_scores = torch.cat(lm_scores).view(-1, args.beam_size)
dists = torch.cat(dists).view(-1, args.beam_size)
ref_lens = torch.cat(ref_lens).view(-1, args.beam_size)
lens_in_chars = torch.cat(lens_in_chars).view(-1, args.beam_size).to(am_scores.dtype)
total_len = ref_lens[:, 0].sum()
model_wer = dists[:, 0].sum() / total_len
ideal_wer = dists.min(dim=1)[0].sum() / total_len
if args.alpha is None:
logging.info("Linear search for alpha...")
coef1, _ = linear_search_wer(
dists=dists, scores1=am_scores, scores2=lm_scores, total_len=total_len, param_name='alpha'
)
coef1 = np.round(coef1, 3)
logging.info(f"alpha={coef1} achieved the best WER.")
logging.info(f"------------------------------------------------")
else:
coef1 = args.alpha
scores = am_scores + coef1 * lm_scores
if args.beta is None:
logging.info("Linear search for beta...")
coef2, _ = linear_search_wer(
dists=dists, scores1=scores, scores2=lens_in_chars, total_len=total_len, param_name='beta'
)
coef2 = np.round(coef2, 3)
logging.info(f"beta={coef2} achieved the best WER.")
logging.info(f"------------------------------------------------")
else:
coef2 = args.beta
new_scores = am_scores + coef1 * lm_scores + coef2 * lens_in_chars
rescored_wer = compute_wer(dists, new_scores, total_len)
logging.info(f"Input beams WER: {np.round(model_wer.item() * 100, 2)}%")
logging.info(f"------------------------------------------------")
logging.info(f" +LM rescoring WER: {np.round(rescored_wer * 100, 2)}%")
logging.info(f" with alpha={coef1}, beta={coef2}")
logging.info(f"------------------------------------------------")
logging.info(f"Best possible WER: {np.round(ideal_wer.item() * 100, 2)}%")
logging.info(f"------------------------------------------------")
new_scores_flatten = new_scores.flatten()
if args.scores_output_file is not None:
logging.info(f'Saving the candidates with their new scores at `{args.scores_output_file}`...')
with open(args.scores_output_file, "w") as fout:
for sample_id in range(len(dataset)):
fout.write(f"{dataset.data[0][sample_id]}\t{new_scores_flatten[sample_id]}\n")
if __name__ == '__main__':
main()
| true | true |
f713afaf83175476277e6164a0ad61b115c896f7 | 1,900 | py | Python | magnetic_diffusion/diffusion1D.py | mattzett/numerical_electromagnetics | 07634817ba854a5515c8c31545b735f651878c5e | [
"MIT"
] | null | null | null | magnetic_diffusion/diffusion1D.py | mattzett/numerical_electromagnetics | 07634817ba854a5515c8c31545b735f651878c5e | [
"MIT"
] | null | null | null | magnetic_diffusion/diffusion1D.py | mattzett/numerical_electromagnetics | 07634817ba854a5515c8c31545b735f651878c5e | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 21 19:43:50 2022
Illustrating a basic transient magnetic diffusion problem, See Jackson Section 5.18
@author: zettergm
"""
import numpy as np
import scipy.sparse.linalg
import scipy.sparse
from scipy.special import erf
import matplotlib.pyplot as plt
from numpy import pi,sqrt,abs
from difftools import matrix_kernel
# Material parameters
mu=4*pi*1e-7
sigma=1e6
D=1/mu/sigma # equivalent diffusion coefficient
a=1
H0=1
nu=1/mu/sigma/a**2
# Size of grid
lz=250
Nmax=200
z=np.linspace(-5*a,5*a,lz)
dz=z[1]-z[0]
dt = 5*dz**2/D/2 # explicit stabilty limit will results in really slow time stepping; use 5 times larger.
# This could definitely benefit for sparse storage and a banded/tridiagonal solver
#A=np.exp(-(x**2/2))
Hx=np.zeros(lz)
indmin=np.argmin(abs(z+a))
indmax=np.argmin(abs(z-a))
Hx[indmin:indmax]=1
# Matrix defining finite-difference equation for laplacian operator, one-time setup for this problem
Msparse=matrix_kernel(lz,dt,dz,D)
rhs=np.zeros( (lz,1) )
# time iterations
for n in range(0,Nmax):
# set up time-dependent part of the problem and solve
for i in range(1,lz-1):
rhs[i]=Hx[i]
rhssparse=scipy.sparse.csr_matrix(np.reshape(rhs,[lz,1]))
Hx=scipy.sparse.linalg.spsolve(Msparse,rhssparse,use_umfpack=True) # umfpack is overkill for this but will presumably work
# Solution from Jackson eqn. 5.176
HxJ=H0/2*( erf((1+abs(z)/a)/2/sqrt((n+1)*dt*nu)) + erf((1-abs(z)/a)/2/sqrt((n+1)*dt*nu)) )
# plot results of each time step and pause briefly
plt.figure(1,dpi=150)
plt.clf()
plt.plot(z,HxJ,'o')
plt.plot(z,Hx)
plt.xlabel("$x$")
plt.ylabel("$H_x(z)$")
plt.title( "$t$ = %6.4f s" % ( (n+1)*dt) )
plt.ylim((0,H0))
plt.xlim((-2*a,2*a))
plt.legend( ("Jackson 5.176","Numerical BTCS") )
plt.show()
plt.pause(0.01)
| 27.142857 | 128 | 0.678947 |
import numpy as np
import scipy.sparse.linalg
import scipy.sparse
from scipy.special import erf
import matplotlib.pyplot as plt
from numpy import pi,sqrt,abs
from difftools import matrix_kernel
mu=4*pi*1e-7
sigma=1e6
D=1/mu/sigma
a=1
H0=1
nu=1/mu/sigma/a**2
lz=250
Nmax=200
z=np.linspace(-5*a,5*a,lz)
dz=z[1]-z[0]
dt = 5*dz**2/D/2
Hx=np.zeros(lz)
indmin=np.argmin(abs(z+a))
indmax=np.argmin(abs(z-a))
Hx[indmin:indmax]=1
Msparse=matrix_kernel(lz,dt,dz,D)
rhs=np.zeros( (lz,1) )
for n in range(0,Nmax):
for i in range(1,lz-1):
rhs[i]=Hx[i]
rhssparse=scipy.sparse.csr_matrix(np.reshape(rhs,[lz,1]))
Hx=scipy.sparse.linalg.spsolve(Msparse,rhssparse,use_umfpack=True)
HxJ=H0/2*( erf((1+abs(z)/a)/2/sqrt((n+1)*dt*nu)) + erf((1-abs(z)/a)/2/sqrt((n+1)*dt*nu)) )
plt.figure(1,dpi=150)
plt.clf()
plt.plot(z,HxJ,'o')
plt.plot(z,Hx)
plt.xlabel("$x$")
plt.ylabel("$H_x(z)$")
plt.title( "$t$ = %6.4f s" % ( (n+1)*dt) )
plt.ylim((0,H0))
plt.xlim((-2*a,2*a))
plt.legend( ("Jackson 5.176","Numerical BTCS") )
plt.show()
plt.pause(0.01)
| true | true |
f713b03a2ce3c22466f083dc5ee76b456b3c089f | 2,341 | py | Python | src/code.py | Bikin-Bot/cnnindonesia-news-api | 4b137f5dcfa537fe4ece4fcc484cd6ab9166f5ac | [
"MIT"
] | 28 | 2020-06-29T23:18:48.000Z | 2021-12-19T07:15:32.000Z | src/code.py | Bikin-Bot/cnnindonesia-news-api | 4b137f5dcfa537fe4ece4fcc484cd6ab9166f5ac | [
"MIT"
] | 2 | 2020-11-04T06:53:08.000Z | 2022-03-18T09:06:53.000Z | src/code.py | Bikin-Bot/cnnindonesia-news-api | 4b137f5dcfa537fe4ece4fcc484cd6ab9166f5ac | [
"MIT"
] | 17 | 2020-12-11T01:39:45.000Z | 2022-03-29T01:25:24.000Z | from bs4 import BeautifulSoup
from requests import get
import json
class Script:
def query(self, url):
datas = get(url)
soup = BeautifulSoup(datas.text, 'html.parser')
tag = soup.find_all('article')
data = []
for i in tag:
try:
title = i.find('h2').text
link = i.find('a').get('href')
gambar = i.find('img').get('src')
tipe = i.find('span', class_="kanal").text
waktu = i.find('span', class_="date").text
data.append({
"judul": title,
"link": link,
"poster": gambar,
"tipe": tipe,
"waktu": waktu
})
except:
pass
return data
def index(self):
return self.query('https://www.cnnindonesia.com/')
def nasional(self):
return self.query('https://www.cnnindonesia.com/nasional')
def internasional(self):
return self.query('https://www.cnnindonesia.com/internasional')
def ekonomi(self):
return self.query('https://www.cnnindonesia.com/ekonomi')
def olahraga(self):
return self.query('https://www.cnnindonesia.com/olahraga')
def teknologi(self):
return self.query('https://www.cnnindonesia.com/teknologi')
def hiburan(self):
return self.query('https://www.cnnindonesia.com/hiburan')
def social(self):
return self.query('https://www.cnnindonesia.com/gaya-hidup')
def detail(self, url):
data = []
try:
req = get(url)
soup = BeautifulSoup(req.text, 'html.parser')
tag = soup.find('div', class_="detail_text")
gambar = soup.find('div', class_='media_artikel').find('img').get('src')
judul = soup.find('h1', class_='title').text
body = tag.text
data.append({
"judul": judul,
"poster": gambar,
"body": body,
})
except:
data.append({
"message": "network error",
})
return data
def search(self,q):
return self.query('https://www.cnnindonesia.com/search/?query=' + q)
if __name__ != '__main__':
Code = Script() | 30.012821 | 84 | 0.513883 | from bs4 import BeautifulSoup
from requests import get
import json
class Script:
def query(self, url):
datas = get(url)
soup = BeautifulSoup(datas.text, 'html.parser')
tag = soup.find_all('article')
data = []
for i in tag:
try:
title = i.find('h2').text
link = i.find('a').get('href')
gambar = i.find('img').get('src')
tipe = i.find('span', class_="kanal").text
waktu = i.find('span', class_="date").text
data.append({
"judul": title,
"link": link,
"poster": gambar,
"tipe": tipe,
"waktu": waktu
})
except:
pass
return data
def index(self):
return self.query('https://www.cnnindonesia.com/')
def nasional(self):
return self.query('https://www.cnnindonesia.com/nasional')
def internasional(self):
return self.query('https://www.cnnindonesia.com/internasional')
def ekonomi(self):
return self.query('https://www.cnnindonesia.com/ekonomi')
def olahraga(self):
return self.query('https://www.cnnindonesia.com/olahraga')
def teknologi(self):
return self.query('https://www.cnnindonesia.com/teknologi')
def hiburan(self):
return self.query('https://www.cnnindonesia.com/hiburan')
def social(self):
return self.query('https://www.cnnindonesia.com/gaya-hidup')
def detail(self, url):
data = []
try:
req = get(url)
soup = BeautifulSoup(req.text, 'html.parser')
tag = soup.find('div', class_="detail_text")
gambar = soup.find('div', class_='media_artikel').find('img').get('src')
judul = soup.find('h1', class_='title').text
body = tag.text
data.append({
"judul": judul,
"poster": gambar,
"body": body,
})
except:
data.append({
"message": "network error",
})
return data
def search(self,q):
return self.query('https://www.cnnindonesia.com/search/?query=' + q)
if __name__ != '__main__':
Code = Script() | true | true |
f713b083499d28d14fa6a7c7ae53ff0fb3fad718 | 4,785 | py | Python | extensions/utils.py | Yang-YiFan/DiracDeltaNet | 36487542422d7573fec6e852b9eece18c6cbce21 | [
"Apache-2.0"
] | 30 | 2018-12-27T06:56:26.000Z | 2022-01-10T10:43:53.000Z | extensions/utils.py | heheda365/DiracDeltaNet | 36487542422d7573fec6e852b9eece18c6cbce21 | [
"Apache-2.0"
] | 4 | 2019-11-05T10:44:38.000Z | 2021-06-22T18:41:08.000Z | extensions/utils.py | heheda365/DiracDeltaNet | 36487542422d7573fec6e852b9eece18c6cbce21 | [
"Apache-2.0"
] | 12 | 2018-12-21T08:36:15.000Z | 2021-12-16T08:38:27.000Z | '''Some helper functions for PyTorch, including:
- get_mean_and_std: calculate the mean and std value of dataset.
- msr_init: net parameter initialization.
- progress_bar: progress bar mimic xlua.progress.
'''
import os
import sys
import time
import math
import torch
import torch.nn as nn
import torch.nn.init as init
import numpy as np
def get_mean_and_std(dataset):
'''Compute the mean and std value of dataset.'''
dataloader = torch.utils.data.DataLoader(dataset, batch_size=1, shuffle=True, num_workers=2)
mean = torch.zeros(3)
std = torch.zeros(3)
print('==> Computing mean and std..')
for inputs, targets in dataloader:
for i in range(3):
mean[i] += inputs[:,i,:,:].mean()
std[i] += inputs[:,i,:,:].std()
mean.div_(len(dataset))
std.div_(len(dataset))
return mean, std
def init_params(net):
'''Init layer parameters.'''
for m in net.modules():
if isinstance(m, nn.Conv2d):
init.kaiming_normal(m.weight, mode='fan_out')
if m.bias:
init.constant(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
init.constant(m.weight, 1)
init.constant(m.bias, 0)
elif isinstance(m, nn.Linear):
init.normal(m.weight, std=1e-3)
if m.bias:
init.constant(m.bias, 0)
_, term_width = os.popen('stty size', 'r').read().split()
term_width = int(term_width)
TOTAL_BAR_LENGTH = 65.
last_time = time.time()
begin_time = last_time
def progress_bar(current, total, msg=None):
global last_time, begin_time
if current == 0:
begin_time = time.time() # Reset for new bar.
cur_len = int(TOTAL_BAR_LENGTH*current/total)
rest_len = int(TOTAL_BAR_LENGTH - cur_len) - 1
sys.stdout.write(' [')
for i in range(cur_len):
sys.stdout.write('=')
sys.stdout.write('>')
for i in range(rest_len):
sys.stdout.write('.')
sys.stdout.write(']')
cur_time = time.time()
step_time = cur_time - last_time
last_time = cur_time
tot_time = cur_time - begin_time
L = []
L.append(' Step: %s' % format_time(step_time))
L.append(' | Tot: %s' % format_time(tot_time))
if msg:
L.append(' | ' + msg)
msg = ''.join(L)
sys.stdout.write(msg)
for i in range(term_width-int(TOTAL_BAR_LENGTH)-len(msg)-3):
sys.stdout.write(' ')
# Go back to the center of the bar.
for i in range(term_width-int(TOTAL_BAR_LENGTH/2)+2):
sys.stdout.write('\b')
sys.stdout.write(' %d/%d ' % (current+1, total))
if current < total-1:
sys.stdout.write('\r')
else:
sys.stdout.write('\n')
sys.stdout.flush()
def format_time(seconds):
days = int(seconds / 3600/24)
seconds = seconds - days*3600*24
hours = int(seconds / 3600)
seconds = seconds - hours*3600
minutes = int(seconds / 60)
seconds = seconds - minutes*60
secondsf = int(seconds)
seconds = seconds - secondsf
millis = int(seconds*1000)
f = ''
i = 1
if days > 0:
f += str(days) + 'D'
i += 1
if hours > 0 and i <= 2:
f += str(hours) + 'h'
i += 1
if minutes > 0 and i <= 2:
f += str(minutes) + 'm'
i += 1
if secondsf > 0 and i <= 2:
f += str(secondsf) + 's'
i += 1
if millis > 0 and i <= 2:
f += str(millis) + 'ms'
i += 1
if f == '':
f = '0ms'
return f
class Cutout(object):
"""Randomly mask out one or more patches from an image.
Args:
n_holes (int): Number of patches to cut out of each image.
length (int): The length (in pixels) of each square patch.
"""
def __init__(self, n_holes, length):
self.n_holes = n_holes
self.length = length
def __call__(self, img):
"""
Args:
img (Tensor): Tensor image of size (C, H, W).
Returns:
Tensor: Image with n_holes of dimension length x length cut out of it.
"""
h = img.size(1)
w = img.size(2)
mask = np.ones((h, w), np.float32)
for n in range(self.n_holes):
y = np.random.randint(h)
x = np.random.randint(w)
y1 = np.clip(y - self.length // 2, 0, h)
y2 = np.clip(y + self.length // 2, 0, h)
x1 = np.clip(x - self.length // 2, 0, w)
x2 = np.clip(x + self.length // 2, 0, w)
mask[y1: y2, x1: x2] = 0.
mask = torch.from_numpy(mask)
mask = mask.expand_as(img)
img = img * mask
return img | 28.482143 | 97 | 0.543783 | import os
import sys
import time
import math
import torch
import torch.nn as nn
import torch.nn.init as init
import numpy as np
def get_mean_and_std(dataset):
dataloader = torch.utils.data.DataLoader(dataset, batch_size=1, shuffle=True, num_workers=2)
mean = torch.zeros(3)
std = torch.zeros(3)
print('==> Computing mean and std..')
for inputs, targets in dataloader:
for i in range(3):
mean[i] += inputs[:,i,:,:].mean()
std[i] += inputs[:,i,:,:].std()
mean.div_(len(dataset))
std.div_(len(dataset))
return mean, std
def init_params(net):
for m in net.modules():
if isinstance(m, nn.Conv2d):
init.kaiming_normal(m.weight, mode='fan_out')
if m.bias:
init.constant(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
init.constant(m.weight, 1)
init.constant(m.bias, 0)
elif isinstance(m, nn.Linear):
init.normal(m.weight, std=1e-3)
if m.bias:
init.constant(m.bias, 0)
_, term_width = os.popen('stty size', 'r').read().split()
term_width = int(term_width)
TOTAL_BAR_LENGTH = 65.
last_time = time.time()
begin_time = last_time
def progress_bar(current, total, msg=None):
global last_time, begin_time
if current == 0:
begin_time = time.time()
cur_len = int(TOTAL_BAR_LENGTH*current/total)
rest_len = int(TOTAL_BAR_LENGTH - cur_len) - 1
sys.stdout.write(' [')
for i in range(cur_len):
sys.stdout.write('=')
sys.stdout.write('>')
for i in range(rest_len):
sys.stdout.write('.')
sys.stdout.write(']')
cur_time = time.time()
step_time = cur_time - last_time
last_time = cur_time
tot_time = cur_time - begin_time
L = []
L.append(' Step: %s' % format_time(step_time))
L.append(' | Tot: %s' % format_time(tot_time))
if msg:
L.append(' | ' + msg)
msg = ''.join(L)
sys.stdout.write(msg)
for i in range(term_width-int(TOTAL_BAR_LENGTH)-len(msg)-3):
sys.stdout.write(' ')
for i in range(term_width-int(TOTAL_BAR_LENGTH/2)+2):
sys.stdout.write('\b')
sys.stdout.write(' %d/%d ' % (current+1, total))
if current < total-1:
sys.stdout.write('\r')
else:
sys.stdout.write('\n')
sys.stdout.flush()
def format_time(seconds):
days = int(seconds / 3600/24)
seconds = seconds - days*3600*24
hours = int(seconds / 3600)
seconds = seconds - hours*3600
minutes = int(seconds / 60)
seconds = seconds - minutes*60
secondsf = int(seconds)
seconds = seconds - secondsf
millis = int(seconds*1000)
f = ''
i = 1
if days > 0:
f += str(days) + 'D'
i += 1
if hours > 0 and i <= 2:
f += str(hours) + 'h'
i += 1
if minutes > 0 and i <= 2:
f += str(minutes) + 'm'
i += 1
if secondsf > 0 and i <= 2:
f += str(secondsf) + 's'
i += 1
if millis > 0 and i <= 2:
f += str(millis) + 'ms'
i += 1
if f == '':
f = '0ms'
return f
class Cutout(object):
def __init__(self, n_holes, length):
self.n_holes = n_holes
self.length = length
def __call__(self, img):
h = img.size(1)
w = img.size(2)
mask = np.ones((h, w), np.float32)
for n in range(self.n_holes):
y = np.random.randint(h)
x = np.random.randint(w)
y1 = np.clip(y - self.length // 2, 0, h)
y2 = np.clip(y + self.length // 2, 0, h)
x1 = np.clip(x - self.length // 2, 0, w)
x2 = np.clip(x + self.length // 2, 0, w)
mask[y1: y2, x1: x2] = 0.
mask = torch.from_numpy(mask)
mask = mask.expand_as(img)
img = img * mask
return img | true | true |
f713b17335259f44d8fc254662e6850b184a61d9 | 9,180 | py | Python | venv/lib/python3.6/site-packages/pip/_vendor/lockfile/__init__.py | aitoehigie/britecore_flask | eef1873dbe6b2cc21f770bc6dec783007ae4493b | [
"MIT"
] | null | null | null | venv/lib/python3.6/site-packages/pip/_vendor/lockfile/__init__.py | aitoehigie/britecore_flask | eef1873dbe6b2cc21f770bc6dec783007ae4493b | [
"MIT"
] | 1 | 2021-06-01T23:32:38.000Z | 2021-06-01T23:32:38.000Z | venv/lib/python3.6/site-packages/pip/_vendor/lockfile/__init__.py | aitoehigie/britecore_flask | eef1873dbe6b2cc21f770bc6dec783007ae4493b | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
lockfile.py - Platform-independent advisory file locks.
Requires Python 2.5 unless you apply 2.4.diff
Locking is done on a per-thread basis instead of a per-process basis.
Usage:
>>> lock = LockFile('somefile')
>>> try:
... lock.acquire()
... except AlreadyLocked:
... print 'somefile', 'is locked already.'
... except LockFailed:
... print 'somefile', 'can\\'t be locked.'
... else:
... print 'got lock'
got lock
>>> print lock.is_locked()
True
>>> lock.release()
>>> lock = LockFile('somefile')
>>> print lock.is_locked()
False
>>> with lock:
... print lock.is_locked()
True
>>> print lock.is_locked()
False
>>> lock = LockFile('somefile')
>>> # It is okay to lock twice from the same thread...
>>> with lock:
... lock.acquire()
...
>>> # Though no counter is kept, so you can't unlock multiple times...
>>> print lock.is_locked()
False
Exceptions:
Error - base class for other exceptions
LockError - base class for all locking exceptions
AlreadyLocked - Another thread or process already holds the lock
LockFailed - Lock failed for some other reason
UnlockError - base class for all unlocking exceptions
AlreadyUnlocked - File was not locked.
NotMyLock - File was locked but not by the current thread/process
"""
from __future__ import absolute_import
import functools
import os
import socket
import threading
import warnings
# Work with PEP8 and non-PEP8 versions of threading module.
if not hasattr(threading, "current_thread"):
threading.current_thread = threading.currentThread
if not hasattr(threading.Thread, "get_name"):
threading.Thread.get_name = threading.Thread.getName
__all__ = [
"Error",
"LockError",
"LockTimeout",
"AlreadyLocked",
"LockFailed",
"UnlockError",
"NotLocked",
"NotMyLock",
"LinkFileLock",
"MkdirFileLock",
"SQLiteFileLock",
"LockBase",
"locked",
]
class Error(Exception):
"""
Base class for other exceptions.
>>> try:
... raise Error
... except Exception:
... pass
"""
pass
class LockError(Error):
"""
Base class for error arising from attempts to acquire the lock.
>>> try:
... raise LockError
... except Error:
... pass
"""
pass
class LockTimeout(LockError):
"""Raised when lock creation fails within a user-defined period of time.
>>> try:
... raise LockTimeout
... except LockError:
... pass
"""
pass
class AlreadyLocked(LockError):
"""Some other thread/process is locking the file.
>>> try:
... raise AlreadyLocked
... except LockError:
... pass
"""
pass
class LockFailed(LockError):
"""Lock file creation failed for some other reason.
>>> try:
... raise LockFailed
... except LockError:
... pass
"""
pass
class UnlockError(Error):
"""
Base class for errors arising from attempts to release the lock.
>>> try:
... raise UnlockError
... except Error:
... pass
"""
pass
class NotLocked(UnlockError):
"""Raised when an attempt is made to unlock an unlocked file.
>>> try:
... raise NotLocked
... except UnlockError:
... pass
"""
pass
class NotMyLock(UnlockError):
"""Raised when an attempt is made to unlock a file someone else locked.
>>> try:
... raise NotMyLock
... except UnlockError:
... pass
"""
pass
class _SharedBase(object):
def __init__(self, path):
self.path = path
def acquire(self, timeout=None):
"""
Acquire the lock.
* If timeout is omitted (or None), wait forever trying to lock the
file.
* If timeout > 0, try to acquire the lock for that many seconds. If
the lock period expires and the file is still locked, raise
LockTimeout.
* If timeout <= 0, raise AlreadyLocked immediately if the file is
already locked.
"""
raise NotImplemented("implement in subclass")
def release(self):
"""
Release the lock.
If the file is not locked, raise NotLocked.
"""
raise NotImplemented("implement in subclass")
def __enter__(self):
"""
Context manager support.
"""
self.acquire()
return self
def __exit__(self, *_exc):
"""
Context manager support.
"""
self.release()
def __repr__(self):
return "<%s: %r>" % (self.__class__.__name__, self.path)
class LockBase(_SharedBase):
"""Base class for platform-specific lock classes."""
def __init__(self, path, threaded=True, timeout=None):
"""
>>> lock = LockBase('somefile')
>>> lock = LockBase('somefile', threaded=False)
"""
super(LockBase, self).__init__(path)
self.lock_file = os.path.abspath(path) + ".lock"
self.hostname = socket.gethostname()
self.pid = os.getpid()
if threaded:
t = threading.current_thread()
# Thread objects in Python 2.4 and earlier do not have ident
# attrs. Worm around that.
ident = getattr(t, "ident", hash(t))
self.tname = "-%x" % (ident & 0xffffffff)
else:
self.tname = ""
dirname = os.path.dirname(self.lock_file)
# unique name is mostly about the current process, but must
# also contain the path -- otherwise, two adjacent locked
# files conflict (one file gets locked, creating lock-file and
# unique file, the other one gets locked, creating lock-file
# and overwriting the already existing lock-file, then one
# gets unlocked, deleting both lock-file and unique file,
# finally the last lock errors out upon releasing.
self.unique_name = os.path.join(
dirname,
"%s%s.%s%s" % (self.hostname, self.tname, self.pid, hash(self.path)),
)
self.timeout = timeout
def is_locked(self):
"""
Tell whether or not the file is locked.
"""
raise NotImplemented("implement in subclass")
def i_am_locking(self):
"""
Return True if this object is locking the file.
"""
raise NotImplemented("implement in subclass")
def break_lock(self):
"""
Remove a lock. Useful if a locking thread failed to unlock.
"""
raise NotImplemented("implement in subclass")
def __repr__(self):
return "<%s: %r -- %r>" % (self.__class__.__name__, self.unique_name, self.path)
def _fl_helper(cls, mod, *args, **kwds):
warnings.warn(
"Import from %s module instead of lockfile package" % mod,
DeprecationWarning,
stacklevel=2,
)
# This is a bit funky, but it's only for awhile. The way the unit tests
# are constructed this function winds up as an unbound method, so it
# actually takes three args, not two. We want to toss out self.
if not isinstance(args[0], str):
# We are testing, avoid the first arg
args = args[1:]
if len(args) == 1 and not kwds:
kwds["threaded"] = True
return cls(*args, **kwds)
def LinkFileLock(*args, **kwds):
"""Factory function provided for backwards compatibility.
Do not use in new code. Instead, import LinkLockFile from the
lockfile.linklockfile module.
"""
from . import linklockfile
return _fl_helper(linklockfile.LinkLockFile, "lockfile.linklockfile", *args, **kwds)
def MkdirFileLock(*args, **kwds):
"""Factory function provided for backwards compatibility.
Do not use in new code. Instead, import MkdirLockFile from the
lockfile.mkdirlockfile module.
"""
from . import mkdirlockfile
return _fl_helper(
mkdirlockfile.MkdirLockFile, "lockfile.mkdirlockfile", *args, **kwds
)
def SQLiteFileLock(*args, **kwds):
"""Factory function provided for backwards compatibility.
Do not use in new code. Instead, import SQLiteLockFile from the
lockfile.mkdirlockfile module.
"""
from . import sqlitelockfile
return _fl_helper(
sqlitelockfile.SQLiteLockFile, "lockfile.sqlitelockfile", *args, **kwds
)
def locked(path, timeout=None):
"""Decorator which enables locks for decorated function.
Arguments:
- path: path for lockfile.
- timeout (optional): Timeout for acquiring lock.
Usage:
@locked('/var/run/myname', timeout=0)
def myname(...):
...
"""
def decor(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
lock = FileLock(path, timeout=timeout)
lock.acquire()
try:
return func(*args, **kwargs)
finally:
lock.release()
return wrapper
return decor
if hasattr(os, "link"):
from . import linklockfile as _llf
LockFile = _llf.LinkLockFile
else:
from . import mkdirlockfile as _mlf
LockFile = _mlf.MkdirLockFile
FileLock = LockFile
| 24.285714 | 88 | 0.611329 |
from __future__ import absolute_import
import functools
import os
import socket
import threading
import warnings
if not hasattr(threading, "current_thread"):
threading.current_thread = threading.currentThread
if not hasattr(threading.Thread, "get_name"):
threading.Thread.get_name = threading.Thread.getName
__all__ = [
"Error",
"LockError",
"LockTimeout",
"AlreadyLocked",
"LockFailed",
"UnlockError",
"NotLocked",
"NotMyLock",
"LinkFileLock",
"MkdirFileLock",
"SQLiteFileLock",
"LockBase",
"locked",
]
class Error(Exception):
pass
class LockError(Error):
pass
class LockTimeout(LockError):
pass
class AlreadyLocked(LockError):
pass
class LockFailed(LockError):
pass
class UnlockError(Error):
pass
class NotLocked(UnlockError):
pass
class NotMyLock(UnlockError):
pass
class _SharedBase(object):
def __init__(self, path):
self.path = path
def acquire(self, timeout=None):
raise NotImplemented("implement in subclass")
def release(self):
raise NotImplemented("implement in subclass")
def __enter__(self):
self.acquire()
return self
def __exit__(self, *_exc):
self.release()
def __repr__(self):
return "<%s: %r>" % (self.__class__.__name__, self.path)
class LockBase(_SharedBase):
def __init__(self, path, threaded=True, timeout=None):
super(LockBase, self).__init__(path)
self.lock_file = os.path.abspath(path) + ".lock"
self.hostname = socket.gethostname()
self.pid = os.getpid()
if threaded:
t = threading.current_thread()
ident = getattr(t, "ident", hash(t))
self.tname = "-%x" % (ident & 0xffffffff)
else:
self.tname = ""
dirname = os.path.dirname(self.lock_file)
self.unique_name = os.path.join(
dirname,
"%s%s.%s%s" % (self.hostname, self.tname, self.pid, hash(self.path)),
)
self.timeout = timeout
def is_locked(self):
raise NotImplemented("implement in subclass")
def i_am_locking(self):
raise NotImplemented("implement in subclass")
def break_lock(self):
raise NotImplemented("implement in subclass")
def __repr__(self):
return "<%s: %r -- %r>" % (self.__class__.__name__, self.unique_name, self.path)
def _fl_helper(cls, mod, *args, **kwds):
warnings.warn(
"Import from %s module instead of lockfile package" % mod,
DeprecationWarning,
stacklevel=2,
)
# are constructed this function winds up as an unbound method, so it
# actually takes three args, not two. We want to toss out self.
if not isinstance(args[0], str):
# We are testing, avoid the first arg
args = args[1:]
if len(args) == 1 and not kwds:
kwds["threaded"] = True
return cls(*args, **kwds)
def LinkFileLock(*args, **kwds):
from . import linklockfile
return _fl_helper(linklockfile.LinkLockFile, "lockfile.linklockfile", *args, **kwds)
def MkdirFileLock(*args, **kwds):
from . import mkdirlockfile
return _fl_helper(
mkdirlockfile.MkdirLockFile, "lockfile.mkdirlockfile", *args, **kwds
)
def SQLiteFileLock(*args, **kwds):
from . import sqlitelockfile
return _fl_helper(
sqlitelockfile.SQLiteLockFile, "lockfile.sqlitelockfile", *args, **kwds
)
def locked(path, timeout=None):
def decor(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
lock = FileLock(path, timeout=timeout)
lock.acquire()
try:
return func(*args, **kwargs)
finally:
lock.release()
return wrapper
return decor
if hasattr(os, "link"):
from . import linklockfile as _llf
LockFile = _llf.LinkLockFile
else:
from . import mkdirlockfile as _mlf
LockFile = _mlf.MkdirLockFile
FileLock = LockFile
| true | true |
f713b1ea0c11d8b85504679217766f2c5726271f | 12,913 | py | Python | tests/ut/python/ops/test_tensor_slice.py | XinYao1994/mindspore | 2c1a2bf752a1fde311caddba22633d2f4f63cb4e | [
"Apache-2.0"
] | 2 | 2020-04-28T03:49:10.000Z | 2020-04-28T03:49:13.000Z | tests/ut/python/ops/test_tensor_slice.py | XinYao1994/mindspore | 2c1a2bf752a1fde311caddba22633d2f4f63cb4e | [
"Apache-2.0"
] | null | null | null | tests/ut/python/ops/test_tensor_slice.py | XinYao1994/mindspore | 2c1a2bf752a1fde311caddba22633d2f4f63cb4e | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
""" test_tensor_slice """
import numpy as np
import pytest
from mindspore import Tensor
from mindspore import context
from mindspore import dtype as mstype
from mindspore.nn import Cell
from ....mindspore_test_framework.mindspore_test import mindspore_test
from ....mindspore_test_framework.pipeline.forward.compile_forward \
import pipeline_for_compile_forward_ge_graph_for_case_by_case_config
class NetWorkSlicePositive(Cell):
def __init__(self):
super(NetWorkSlicePositive, self).__init__()
self.tensor_ret0 = Tensor(np.ones([1, 2, 2], np.int32))
self.tensor_ret1 = Tensor(np.ones([4, 7, 4], np.int32))
self.tensor_ret2 = Tensor(np.ones([6, 8, 10], np.int32))
self.tensor_ret3 = Tensor(np.ones([3, 8, 10], np.int32))
def construct(self, tensor):
ret0 = tensor[3:4:3, 1:5:2, 3:6:2] + self.tensor_ret0
ret1 = tensor[-6:4:1, 7:-8:-1, ::3] + self.tensor_ret1
ret2 = tensor[::, ::, ::] + self.tensor_ret2
ret3 = tensor[::2] + self.tensor_ret3
return ret0, ret1, ret2, ret3
class NetWorkSliceEllipsis(Cell):
def __init__(self):
super(NetWorkSliceEllipsis, self).__init__()
self.tensor_ret0 = Tensor(np.ones([2, 7, 8], np.int32))
self.tensor_ret1 = Tensor(np.ones([6, 7, 8, 9], np.int32))
self.tensor_ret2 = Tensor(np.ones([1, 6, 7, 8, 9], np.int32))
def construct(self, tensor):
ret0 = tensor[0:4:2, ..., 1] + self.tensor_ret0
ret1 = tensor[...] + self.tensor_ret1
ret2 = tensor[None] + self.tensor_ret2
ret3 = tensor[True] + self.tensor_ret2
return ret0, ret1, ret2, ret3
class NetWorkReduceDimension(Cell):
def __init__(self):
super(NetWorkReduceDimension, self).__init__()
self.tensor_ret0 = Tensor(np.ones([2, 4, 1], np.int32))
self.tensor_ret1 = Tensor(np.ones([3, 4], np.int32))
self.tensor_ret2 = Tensor(np.ones([6, 8], np.int32))
self.tensor_ret3 = Tensor(np.array(8, np.int32))
self.tensor_ret4 = Tensor(np.ones([8, 10], np.int32))
def construct(self, tensor):
ret0 = tensor[0:6:3, 1:5:1, 3:5:2] + self.tensor_ret0
ret1 = tensor[::2, 1, ::3] + self.tensor_ret1
ret2 = tensor[::, ::, 0] + self.tensor_ret2
ret3 = tensor[3, 2, 5] + self.tensor_ret3
ret4 = tensor[1] + self.tensor_ret4
return ret0, ret1, ret2, ret3, ret4
class NetWorkStepNegative(Cell):
def __init__(self):
super(NetWorkStepNegative, self).__init__()
self.tensor_ret = Tensor(np.ones([6, 5, 10], np.int32))
def construct(self, tensor):
ret = tensor[::1, -5::, ::-1] + self.tensor_ret
return ret
class NetWorkReduceToScalar(Cell):
def __init__(self):
super(NetWorkReduceToScalar, self).__init__()
self.tensor_ret = Tensor(np.array(9, np.int32))
def construct(self, tensor):
ret = tensor[2, 3, 4] + self.tensor_ret
return ret
class TensorAssignWithSliceError1(Cell):
def __init__(self):
super(TensorAssignWithSliceError1, self).__init__()
def construct(self, a, b):
a[1:3:-1,::] = b
return a
class TensorAssignWithSliceError2(Cell):
def __init__(self):
super(TensorAssignWithSliceError2, self).__init__()
def construct(self, a, b):
a[1:3:-1] = b
return a
class TensorAssignWithSlice2(Cell):
def __init__(self):
super(TensorAssignWithSlice2, self).__init__()
def construct(self, a, b):
a[1:5] = b
a[3:4] = 5
a[-1:1:-1] = b
a[-1:3:-1] = 5
a[::] = b
a[::] = 9
return a
class TensorAssignWithSlice(Cell):
def __init__(self):
super(TensorAssignWithSlice, self).__init__()
self.c = 2
def construct(self, a, b):
a[1:3,::] = b
a[2:3:,3:] = b
a[::] = b
a[::] = self.c
a[::,::] = b
a[::,::] = self.c
a[2:3:,0:, 4:1:-1] = b
a[2:3:,0:, 4:1:-1] = self.c
z = a
return z
def test_tensor_assign():
context.set_context(mode=context.GRAPH_MODE, save_graphs=True)
net = TensorAssignWithSlice()
net2= TensorAssignWithSlice2()
net_e1 = TensorAssignWithSliceError1()
net_e2 = TensorAssignWithSliceError2()
a = np.arange(60).reshape(3,4,5)
b = Tensor([1])
Ta = Tensor(a)
Ta4d = Tensor(a.reshape(1,3,4,5))
Tb= Tensor([1,3])
Tc= Tensor([])
t = Tensor([1, 2, 3, 4, 5, 6, 7, 8])
net(Ta, b)
net2(t, b)
# Error for A[Slice] = Number
# 1. A[Slice] = Number, Slice error
with pytest.raises(ValueError):
net_e2(t, 2)
# Error for A[Slice] = U, U is a Tensor
# 1. A[Slice] = U, u.size is error
with pytest.raises(ValueError):
net2(t, Tb)
# 2. A[Slice] = U, U is empty
with pytest.raises(ValueError):
net2(t, Tc)
# 3. A[Slice] = U, U.size error
with pytest.raises(ValueError):
net2(t, Tb)
# Error for A[Tuple(Slice...)] = Tensor
# 1. A[Tuple(Slice...)] = U, U is empty
with pytest.raises(ValueError):
net(Ta, Tc)
# 2. A[Tuple(Slice...)] = U, U.size error
with pytest.raises(ValueError):
net(Ta, Tb)
# 3. A[Tuple(Slice...)] = U, Slice error
with pytest.raises(ValueError):
net_e1(Ta, b)
# Error for A[Tuple(Slice...)] = Number
# 1. A[Tuple(Slice...)] = Number, Slice error
with pytest.raises(ValueError):
net_e1(Ta, 2)
net = TensorAssignWithInteger()
# Error for A[Number] = scalar/Tensor
# 1. A[Number] = U, U is a Tensor, u.size not match
with pytest.raises(ValueError):
net(Ta, Tb)
with pytest.raises(ValueError):
net(Ta, Tc)
# 2. A[Number] = U, the number index error
with pytest.raises(IndexError):
net(Ta4d, b)
# Error for A[(n,m)] = scalar/Tensor
# 1. A[(n,m)] = U, U is a tensor. u.size not match
net = TensorAssignWithTupleInteger()
with pytest.raises(ValueError):
net(Ta, Tc)
with pytest.raises(ValueError):
net(Ta, Tb)
# 2. A[(n,m)] = U, the number index error
with pytest.raises(IndexError):
net(Ta4d, b)
class TensorAssignWithInteger(Cell):
def __init__(self):
super(TensorAssignWithInteger, self).__init__()
def construct(self, a, b):
a[1] = 1
a[0] = b
return a
class TensorAssignWithTupleInteger(Cell):
def __init__(self):
super(TensorAssignWithTupleInteger, self).__init__()
def construct(self, a, b):
a[(1)] = 1
a[(1)] = b
a[(1,1)] = b
a[(1,1)] = 1
return a
class TensorAssignWithBoolTensorIndex(Cell):
def __init__(self):
super(TensorAssignWithBoolTensorIndex, self).__init__()
self.t = Tensor(np.arange(60).reshape([3,4,5]), dtype = mstype.float64)
def construct(self, a, b, c, u_tensor, _scalar):
a[c] = u_scalar
a[b] = u_tensor
z = a + self.t
return z
class TensorAssignWithBoolTensorIndexError(Cell):
def __init__(self):
super(TensorAssignWithBoolTensorIndexError, self).__init__()
def construct(self, a, b, c, u_tensor):
a[b][c] = u_tensor
return a
class TensorAssignWithBoolTensorIndex2(Cell):
def __init__(self):
super(TensorAssignWithBoolTensorIndex2, self).__init__()
self.t = Tensor(np.arange(6).reshape([2, 3]), dtype=mstype.float64)
self.t = Tensor(np.arange(60).reshape([3,4,5]), dtype = mstype.float64)
def construct(self, a, u_tensor, _scalar):
a[a > 8] = u_tensor
a[a >= 6] = u_scalar
a[a < 3] = u_scalar
a[a <= 5] = u_tensor
a[a == 5] = u_scalar
z = a + self.t
return z
class TensorAssignWithBoolTensorIndex2Error(Cell):
def __init__(self):
super(TensorAssignWithBoolTensorIndex2Error, self).__init__()
def construct(self, a, u_tensor):
a[a > 8][a > 5] = u_tensor
return a
a = np.random.uniform(1,10,[3,4,5])
b = a > 5
c = a < 3
Ta = Tensor(a)
Tb = Tensor(b)
Tc = Tensor(c)
Td = Tensor([True, True])
u_tensor = Tensor([1])
u_tensor_error = Tensor([1, 2])
t_1d = Tensor([1, 2, 3, 4, 5, 6, 7, 8])
u_scalar = 5
def test_tensor_assign_bool_index():
net1 = TensorAssignWithBoolTensorIndex()
net2 = TensorAssignWithBoolTensorIndex2()
net1(Ta, Tb, Tc, u_tensor, u_scalar)
net1(Ta, Tb, Tc, u_tensor, u_scalar)
with pytest.raises(ValueError):
net1(Ta, Td, Tc, u_tensor, u_scalar)
with pytest.raises(ValueError):
net1(Ta, u_tensor, Tc, u_tensor, u_scalar)
with pytest.raises(ValueError):
net1(Ta, Tb, Td, u_tensor, u_scalar)
with pytest.raises(ValueError):
net1(Ta, Tb, Ta, u_tensor, u_scalar)
with pytest.raises(ValueError):
net1(Ta, Tb, Tc, u_tensor_error, u_scalar)
# net1(Ta, u_tensor, Tc, u_tensor_error, u_scalar)
with pytest.raises(ValueError):
net2(Ta, u_tensor_error, u_scalar)
net3 = TensorAssignWithBoolTensorIndexError()
with pytest.raises(AttributeError):
net3(Ta, Tb, Tc, u_tensor)
with pytest.raises(AttributeError):
net3(Ta, Tb, Tc, u_scalar)
net4 = TensorAssignWithBoolTensorIndex2Error()
with pytest.raises(AttributeError):
net4(Ta, u_tensor)
with pytest.raises(AttributeError):
net4(Ta, u_scalar)
test_cases = [
('TensorAssignWithTupleInteger', {
'block': TensorAssignWithTupleInteger(),
'desc_inputs': [Ta, u_tensor],
}),
('TensorAssignWithInteger', {
'block': TensorAssignWithInteger(),
'desc_inputs': [Ta, u_tensor],
}),
('TensorAssignWithSlice', {
'block': TensorAssignWithSlice(),
'desc_inputs': [Ta, u_tensor],
}),
('TensorAssignWithSlice2', {
'block': TensorAssignWithSlice2(),
'desc_inputs': [t_1d, u_tensor],
}),
('TensorAssignWithBoolTensorIndex', {
'block': TensorAssignWithBoolTensorIndex(),
'desc_inputs': [Ta, Tb, Tc, u_tensor, u_scalar],
}),
('TensorAssignWithBoolTensorIndex2', {
'block': TensorAssignWithBoolTensorIndex2(),
'desc_inputs': [Ta, u_tensor, u_scalar],
}),
('SlicePositive', {
'block': NetWorkSlicePositive(),
'desc_inputs': [Tensor(np.ones([6, 8, 10], np.int32))],
}),
('SliceReduceDimension', {
'block': NetWorkReduceDimension(),
'desc_inputs': [Tensor(np.ones([6, 8, 10], np.int32))],
}),
('SliceNegative', {
'block': NetWorkStepNegative(),
'desc_inputs': [Tensor(np.ones([6, 8, 10], np.int32))],
}),
('SliceReduceToScalar', {
'block': NetWorkReduceToScalar(),
'desc_inputs': [Tensor(np.ones([6, 8, 10], np.int32))],
}),
('TensorSliceEllipsis', {
'block': NetWorkSliceEllipsis(),
'desc_inputs': [Tensor(np.ones([6, 7, 8, 9], np.int32))],
}),
]
@mindspore_test(pipeline_for_compile_forward_ge_graph_for_case_by_case_config)
def test_compile():
context.set_context(mode=context.GRAPH_MODE)
return test_cases
def test_tensor_slice_reduce_out_of_bounds_neg():
class NetWork(Cell):
def __init__(self):
super(NetWork, self).__init__()
self.tensor_ret = Tensor(np.array(9, np.int32))
def construct(self, tensor):
ret = tensor[-7, 3, 4]
return ret
input_tensor = Tensor(np.ones([6, 8, 10], np.int32))
net = NetWork()
with pytest.raises(ValueError) as ex:
net(input_tensor)
assert "For 'StridedSlice' the `begin[0]` should be an int and must greater or equal to -6, but got `-7`" in str(ex.value)
def test_tensor_slice_reduce_out_of_bounds_positive():
class NetWork(Cell):
def __init__(self):
super(NetWork, self).__init__()
self.tensor_ret = Tensor(np.array(9, np.int32))
def construct(self, tensor):
ret = tensor[6, 3, 4]
return ret
input_tensor = Tensor(np.ones([6, 8, 10], np.int32))
net = NetWork()
with pytest.raises(ValueError) as ex:
net(input_tensor)
assert "For 'StridedSlice' the `begin[0]` should be an int and must less than 6, but got `6`" in str(ex.value)
| 31.962871 | 126 | 0.61016 |
import numpy as np
import pytest
from mindspore import Tensor
from mindspore import context
from mindspore import dtype as mstype
from mindspore.nn import Cell
from ....mindspore_test_framework.mindspore_test import mindspore_test
from ....mindspore_test_framework.pipeline.forward.compile_forward \
import pipeline_for_compile_forward_ge_graph_for_case_by_case_config
class NetWorkSlicePositive(Cell):
def __init__(self):
super(NetWorkSlicePositive, self).__init__()
self.tensor_ret0 = Tensor(np.ones([1, 2, 2], np.int32))
self.tensor_ret1 = Tensor(np.ones([4, 7, 4], np.int32))
self.tensor_ret2 = Tensor(np.ones([6, 8, 10], np.int32))
self.tensor_ret3 = Tensor(np.ones([3, 8, 10], np.int32))
def construct(self, tensor):
ret0 = tensor[3:4:3, 1:5:2, 3:6:2] + self.tensor_ret0
ret1 = tensor[-6:4:1, 7:-8:-1, ::3] + self.tensor_ret1
ret2 = tensor[::, ::, ::] + self.tensor_ret2
ret3 = tensor[::2] + self.tensor_ret3
return ret0, ret1, ret2, ret3
class NetWorkSliceEllipsis(Cell):
def __init__(self):
super(NetWorkSliceEllipsis, self).__init__()
self.tensor_ret0 = Tensor(np.ones([2, 7, 8], np.int32))
self.tensor_ret1 = Tensor(np.ones([6, 7, 8, 9], np.int32))
self.tensor_ret2 = Tensor(np.ones([1, 6, 7, 8, 9], np.int32))
def construct(self, tensor):
ret0 = tensor[0:4:2, ..., 1] + self.tensor_ret0
ret1 = tensor[...] + self.tensor_ret1
ret2 = tensor[None] + self.tensor_ret2
ret3 = tensor[True] + self.tensor_ret2
return ret0, ret1, ret2, ret3
class NetWorkReduceDimension(Cell):
def __init__(self):
super(NetWorkReduceDimension, self).__init__()
self.tensor_ret0 = Tensor(np.ones([2, 4, 1], np.int32))
self.tensor_ret1 = Tensor(np.ones([3, 4], np.int32))
self.tensor_ret2 = Tensor(np.ones([6, 8], np.int32))
self.tensor_ret3 = Tensor(np.array(8, np.int32))
self.tensor_ret4 = Tensor(np.ones([8, 10], np.int32))
def construct(self, tensor):
ret0 = tensor[0:6:3, 1:5:1, 3:5:2] + self.tensor_ret0
ret1 = tensor[::2, 1, ::3] + self.tensor_ret1
ret2 = tensor[::, ::, 0] + self.tensor_ret2
ret3 = tensor[3, 2, 5] + self.tensor_ret3
ret4 = tensor[1] + self.tensor_ret4
return ret0, ret1, ret2, ret3, ret4
class NetWorkStepNegative(Cell):
def __init__(self):
super(NetWorkStepNegative, self).__init__()
self.tensor_ret = Tensor(np.ones([6, 5, 10], np.int32))
def construct(self, tensor):
ret = tensor[::1, -5::, ::-1] + self.tensor_ret
return ret
class NetWorkReduceToScalar(Cell):
def __init__(self):
super(NetWorkReduceToScalar, self).__init__()
self.tensor_ret = Tensor(np.array(9, np.int32))
def construct(self, tensor):
ret = tensor[2, 3, 4] + self.tensor_ret
return ret
class TensorAssignWithSliceError1(Cell):
def __init__(self):
super(TensorAssignWithSliceError1, self).__init__()
def construct(self, a, b):
a[1:3:-1,::] = b
return a
class TensorAssignWithSliceError2(Cell):
def __init__(self):
super(TensorAssignWithSliceError2, self).__init__()
def construct(self, a, b):
a[1:3:-1] = b
return a
class TensorAssignWithSlice2(Cell):
def __init__(self):
super(TensorAssignWithSlice2, self).__init__()
def construct(self, a, b):
a[1:5] = b
a[3:4] = 5
a[-1:1:-1] = b
a[-1:3:-1] = 5
a[::] = b
a[::] = 9
return a
class TensorAssignWithSlice(Cell):
def __init__(self):
super(TensorAssignWithSlice, self).__init__()
self.c = 2
def construct(self, a, b):
a[1:3,::] = b
a[2:3:,3:] = b
a[::] = b
a[::] = self.c
a[::,::] = b
a[::,::] = self.c
a[2:3:,0:, 4:1:-1] = b
a[2:3:,0:, 4:1:-1] = self.c
z = a
return z
def test_tensor_assign():
context.set_context(mode=context.GRAPH_MODE, save_graphs=True)
net = TensorAssignWithSlice()
net2= TensorAssignWithSlice2()
net_e1 = TensorAssignWithSliceError1()
net_e2 = TensorAssignWithSliceError2()
a = np.arange(60).reshape(3,4,5)
b = Tensor([1])
Ta = Tensor(a)
Ta4d = Tensor(a.reshape(1,3,4,5))
Tb= Tensor([1,3])
Tc= Tensor([])
t = Tensor([1, 2, 3, 4, 5, 6, 7, 8])
net(Ta, b)
net2(t, b)
with pytest.raises(ValueError):
net_e2(t, 2)
with pytest.raises(ValueError):
net2(t, Tb)
with pytest.raises(ValueError):
net2(t, Tc)
with pytest.raises(ValueError):
net2(t, Tb)
with pytest.raises(ValueError):
net(Ta, Tc)
with pytest.raises(ValueError):
net(Ta, Tb)
with pytest.raises(ValueError):
net_e1(Ta, b)
with pytest.raises(ValueError):
net_e1(Ta, 2)
net = TensorAssignWithInteger()
with pytest.raises(ValueError):
net(Ta, Tb)
with pytest.raises(ValueError):
net(Ta, Tc)
with pytest.raises(IndexError):
net(Ta4d, b)
net = TensorAssignWithTupleInteger()
with pytest.raises(ValueError):
net(Ta, Tc)
with pytest.raises(ValueError):
net(Ta, Tb)
with pytest.raises(IndexError):
net(Ta4d, b)
class TensorAssignWithInteger(Cell):
def __init__(self):
super(TensorAssignWithInteger, self).__init__()
def construct(self, a, b):
a[1] = 1
a[0] = b
return a
class TensorAssignWithTupleInteger(Cell):
def __init__(self):
super(TensorAssignWithTupleInteger, self).__init__()
def construct(self, a, b):
a[(1)] = 1
a[(1)] = b
a[(1,1)] = b
a[(1,1)] = 1
return a
class TensorAssignWithBoolTensorIndex(Cell):
def __init__(self):
super(TensorAssignWithBoolTensorIndex, self).__init__()
self.t = Tensor(np.arange(60).reshape([3,4,5]), dtype = mstype.float64)
def construct(self, a, b, c, u_tensor, _scalar):
a[c] = u_scalar
a[b] = u_tensor
z = a + self.t
return z
class TensorAssignWithBoolTensorIndexError(Cell):
def __init__(self):
super(TensorAssignWithBoolTensorIndexError, self).__init__()
def construct(self, a, b, c, u_tensor):
a[b][c] = u_tensor
return a
class TensorAssignWithBoolTensorIndex2(Cell):
def __init__(self):
super(TensorAssignWithBoolTensorIndex2, self).__init__()
self.t = Tensor(np.arange(6).reshape([2, 3]), dtype=mstype.float64)
self.t = Tensor(np.arange(60).reshape([3,4,5]), dtype = mstype.float64)
def construct(self, a, u_tensor, _scalar):
a[a > 8] = u_tensor
a[a >= 6] = u_scalar
a[a < 3] = u_scalar
a[a <= 5] = u_tensor
a[a == 5] = u_scalar
z = a + self.t
return z
class TensorAssignWithBoolTensorIndex2Error(Cell):
def __init__(self):
super(TensorAssignWithBoolTensorIndex2Error, self).__init__()
def construct(self, a, u_tensor):
a[a > 8][a > 5] = u_tensor
return a
a = np.random.uniform(1,10,[3,4,5])
b = a > 5
c = a < 3
Ta = Tensor(a)
Tb = Tensor(b)
Tc = Tensor(c)
Td = Tensor([True, True])
u_tensor = Tensor([1])
u_tensor_error = Tensor([1, 2])
t_1d = Tensor([1, 2, 3, 4, 5, 6, 7, 8])
u_scalar = 5
def test_tensor_assign_bool_index():
net1 = TensorAssignWithBoolTensorIndex()
net2 = TensorAssignWithBoolTensorIndex2()
net1(Ta, Tb, Tc, u_tensor, u_scalar)
net1(Ta, Tb, Tc, u_tensor, u_scalar)
with pytest.raises(ValueError):
net1(Ta, Td, Tc, u_tensor, u_scalar)
with pytest.raises(ValueError):
net1(Ta, u_tensor, Tc, u_tensor, u_scalar)
with pytest.raises(ValueError):
net1(Ta, Tb, Td, u_tensor, u_scalar)
with pytest.raises(ValueError):
net1(Ta, Tb, Ta, u_tensor, u_scalar)
with pytest.raises(ValueError):
net1(Ta, Tb, Tc, u_tensor_error, u_scalar)
with pytest.raises(ValueError):
net2(Ta, u_tensor_error, u_scalar)
net3 = TensorAssignWithBoolTensorIndexError()
with pytest.raises(AttributeError):
net3(Ta, Tb, Tc, u_tensor)
with pytest.raises(AttributeError):
net3(Ta, Tb, Tc, u_scalar)
net4 = TensorAssignWithBoolTensorIndex2Error()
with pytest.raises(AttributeError):
net4(Ta, u_tensor)
with pytest.raises(AttributeError):
net4(Ta, u_scalar)
test_cases = [
('TensorAssignWithTupleInteger', {
'block': TensorAssignWithTupleInteger(),
'desc_inputs': [Ta, u_tensor],
}),
('TensorAssignWithInteger', {
'block': TensorAssignWithInteger(),
'desc_inputs': [Ta, u_tensor],
}),
('TensorAssignWithSlice', {
'block': TensorAssignWithSlice(),
'desc_inputs': [Ta, u_tensor],
}),
('TensorAssignWithSlice2', {
'block': TensorAssignWithSlice2(),
'desc_inputs': [t_1d, u_tensor],
}),
('TensorAssignWithBoolTensorIndex', {
'block': TensorAssignWithBoolTensorIndex(),
'desc_inputs': [Ta, Tb, Tc, u_tensor, u_scalar],
}),
('TensorAssignWithBoolTensorIndex2', {
'block': TensorAssignWithBoolTensorIndex2(),
'desc_inputs': [Ta, u_tensor, u_scalar],
}),
('SlicePositive', {
'block': NetWorkSlicePositive(),
'desc_inputs': [Tensor(np.ones([6, 8, 10], np.int32))],
}),
('SliceReduceDimension', {
'block': NetWorkReduceDimension(),
'desc_inputs': [Tensor(np.ones([6, 8, 10], np.int32))],
}),
('SliceNegative', {
'block': NetWorkStepNegative(),
'desc_inputs': [Tensor(np.ones([6, 8, 10], np.int32))],
}),
('SliceReduceToScalar', {
'block': NetWorkReduceToScalar(),
'desc_inputs': [Tensor(np.ones([6, 8, 10], np.int32))],
}),
('TensorSliceEllipsis', {
'block': NetWorkSliceEllipsis(),
'desc_inputs': [Tensor(np.ones([6, 7, 8, 9], np.int32))],
}),
]
@mindspore_test(pipeline_for_compile_forward_ge_graph_for_case_by_case_config)
def test_compile():
context.set_context(mode=context.GRAPH_MODE)
return test_cases
def test_tensor_slice_reduce_out_of_bounds_neg():
class NetWork(Cell):
def __init__(self):
super(NetWork, self).__init__()
self.tensor_ret = Tensor(np.array(9, np.int32))
def construct(self, tensor):
ret = tensor[-7, 3, 4]
return ret
input_tensor = Tensor(np.ones([6, 8, 10], np.int32))
net = NetWork()
with pytest.raises(ValueError) as ex:
net(input_tensor)
assert "For 'StridedSlice' the `begin[0]` should be an int and must greater or equal to -6, but got `-7`" in str(ex.value)
def test_tensor_slice_reduce_out_of_bounds_positive():
class NetWork(Cell):
def __init__(self):
super(NetWork, self).__init__()
self.tensor_ret = Tensor(np.array(9, np.int32))
def construct(self, tensor):
ret = tensor[6, 3, 4]
return ret
input_tensor = Tensor(np.ones([6, 8, 10], np.int32))
net = NetWork()
with pytest.raises(ValueError) as ex:
net(input_tensor)
assert "For 'StridedSlice' the `begin[0]` should be an int and must less than 6, but got `6`" in str(ex.value)
| true | true |
f713b345bd277bc050a9b295cca338adedc276d5 | 1,193 | py | Python | Utils/lockfile.py | RobertCsordas/dnc | 1a01d64d30430e19380084847e84b52c0d58c81e | [
"Apache-2.0"
] | 3 | 2020-10-17T20:34:07.000Z | 2021-04-15T04:01:36.000Z | Utils/lockfile.py | RobertCsordas/dnc | 1a01d64d30430e19380084847e84b52c0d58c81e | [
"Apache-2.0"
] | null | null | null | Utils/lockfile.py | RobertCsordas/dnc | 1a01d64d30430e19380084847e84b52c0d58c81e | [
"Apache-2.0"
] | 1 | 2022-02-23T02:31:41.000Z | 2022-02-23T02:31:41.000Z | # Copyright 2017 Robert Csordas. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ==============================================================================
import os
import fcntl
class LockFile:
def __init__(self, fname):
self._fname = fname
self._fd = None
def acquire(self):
self._fd=open(self._fname, "w")
os.chmod(self._fname, 0o777)
fcntl.lockf(self._fd, fcntl.LOCK_EX)
def release(self):
fcntl.lockf(self._fd, fcntl.LOCK_UN)
self._fd.close()
self._fd = None
def __enter__(self):
self.acquire()
def __exit__(self, exc_type, exc_val, exc_tb):
self.release()
| 28.404762 | 80 | 0.640402 |
import os
import fcntl
class LockFile:
def __init__(self, fname):
self._fname = fname
self._fd = None
def acquire(self):
self._fd=open(self._fname, "w")
os.chmod(self._fname, 0o777)
fcntl.lockf(self._fd, fcntl.LOCK_EX)
def release(self):
fcntl.lockf(self._fd, fcntl.LOCK_UN)
self._fd.close()
self._fd = None
def __enter__(self):
self.acquire()
def __exit__(self, exc_type, exc_val, exc_tb):
self.release()
| true | true |
f713b4a98deb1a3197e73fe21905fc50ff875827 | 139 | py | Python | testes/008 - Copia.py | AlexCaprian/Python | 4d343330bb4e82f639ca103b119f0a9eeee916e0 | [
"MIT"
] | null | null | null | testes/008 - Copia.py | AlexCaprian/Python | 4d343330bb4e82f639ca103b119f0a9eeee916e0 | [
"MIT"
] | null | null | null | testes/008 - Copia.py | AlexCaprian/Python | 4d343330bb4e82f639ca103b119f0a9eeee916e0 | [
"MIT"
] | null | null | null | m=float(input('Quantos metros?'))
c=m*100
mm=m*1000
print('A conversão de {} para centímetros é {} e para milímetros é {}.'.format(m,c,mm)) | 34.75 | 87 | 0.683453 | m=float(input('Quantos metros?'))
c=m*100
mm=m*1000
print('A conversão de {} para centímetros é {} e para milímetros é {}.'.format(m,c,mm)) | true | true |
f713b561e8db2c29bdc9f9f639374ec689869888 | 390 | py | Python | dict01/pinky_brain.py | mikerauer/pyb-class | b7f6202c58df654eb81263d12c2634fa37a27e07 | [
"MIT"
] | null | null | null | dict01/pinky_brain.py | mikerauer/pyb-class | b7f6202c58df654eb81263d12c2634fa37a27e07 | [
"MIT"
] | null | null | null | dict01/pinky_brain.py | mikerauer/pyb-class | b7f6202c58df654eb81263d12c2634fa37a27e07 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
mice = {"number": 2, "names": [{"name": "Pinky", "tag": "the real genius"},{"name": "The Brain", "tag": "insane one"}], "world_domination_status": "pending"}
## print following
## Pinky is the real genius, and The Brain is the insane one
print(f'{mice["names"][0]["name"]} is {mice["names"][0]["tag"]}, and {mice["names"][1]["name"]} is the {mice["names"][1]["tag"]}.')
| 55.714286 | 157 | 0.607692 |
mice = {"number": 2, "names": [{"name": "Pinky", "tag": "the real genius"},{"name": "The Brain", "tag": "insane one"}], "world_domination_status": "pending"}
e["names"][1]["name"]} is the {mice["names"][1]["tag"]}.')
| true | true |
f713b5e3c74cd9f95a301019d4e5f568395f8611 | 3,712 | py | Python | intersight/models/syslog_remote_logging_client_ref.py | ategaw-cisco/intersight-python | 9d6476620507281b1dc358e29ac452d56081bbb0 | [
"Apache-2.0"
] | null | null | null | intersight/models/syslog_remote_logging_client_ref.py | ategaw-cisco/intersight-python | 9d6476620507281b1dc358e29ac452d56081bbb0 | [
"Apache-2.0"
] | null | null | null | intersight/models/syslog_remote_logging_client_ref.py | ategaw-cisco/intersight-python | 9d6476620507281b1dc358e29ac452d56081bbb0 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Intersight REST API
This is Intersight REST API
OpenAPI spec version: 1.0.9-262
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class SyslogRemoteLoggingClientRef(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'moid': 'str',
'object_type': 'str'
}
attribute_map = {
'moid': 'Moid',
'object_type': 'ObjectType'
}
def __init__(self, moid=None, object_type=None):
"""
SyslogRemoteLoggingClientRef - a model defined in Swagger
"""
self._moid = None
self._object_type = None
if moid is not None:
self.moid = moid
if object_type is not None:
self.object_type = object_type
@property
def moid(self):
"""
Gets the moid of this SyslogRemoteLoggingClientRef.
:return: The moid of this SyslogRemoteLoggingClientRef.
:rtype: str
"""
return self._moid
@moid.setter
def moid(self, moid):
"""
Sets the moid of this SyslogRemoteLoggingClientRef.
:param moid: The moid of this SyslogRemoteLoggingClientRef.
:type: str
"""
self._moid = moid
@property
def object_type(self):
"""
Gets the object_type of this SyslogRemoteLoggingClientRef.
:return: The object_type of this SyslogRemoteLoggingClientRef.
:rtype: str
"""
return self._object_type
@object_type.setter
def object_type(self, object_type):
"""
Sets the object_type of this SyslogRemoteLoggingClientRef.
:param object_type: The object_type of this SyslogRemoteLoggingClientRef.
:type: str
"""
self._object_type = object_type
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, SyslogRemoteLoggingClientRef):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 24.746667 | 81 | 0.554149 |
from pprint import pformat
from six import iteritems
import re
class SyslogRemoteLoggingClientRef(object):
swagger_types = {
'moid': 'str',
'object_type': 'str'
}
attribute_map = {
'moid': 'Moid',
'object_type': 'ObjectType'
}
def __init__(self, moid=None, object_type=None):
self._moid = None
self._object_type = None
if moid is not None:
self.moid = moid
if object_type is not None:
self.object_type = object_type
@property
def moid(self):
return self._moid
@moid.setter
def moid(self, moid):
self._moid = moid
@property
def object_type(self):
return self._object_type
@object_type.setter
def object_type(self, object_type):
self._object_type = object_type
def to_dict(self):
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
return pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, SyslogRemoteLoggingClientRef):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f713b6a7b772fb52e300f15bc3b1304b170ad695 | 182 | py | Python | Controllers/EditSessionForm.py | esot0/jmsa-tutoring-backend | f35000c73fbbb31f9b4dcca36e40854dc2e06d23 | [
"MIT"
] | null | null | null | Controllers/EditSessionForm.py | esot0/jmsa-tutoring-backend | f35000c73fbbb31f9b4dcca36e40854dc2e06d23 | [
"MIT"
] | null | null | null | Controllers/EditSessionForm.py | esot0/jmsa-tutoring-backend | f35000c73fbbb31f9b4dcca36e40854dc2e06d23 | [
"MIT"
] | null | null | null | from wtforms import StringField, Form
class EditSessionForm(Form):
subject = StringField('Subject')
date = StringField('Date')
other_user = StringField('Other User')
| 20.222222 | 42 | 0.714286 | from wtforms import StringField, Form
class EditSessionForm(Form):
subject = StringField('Subject')
date = StringField('Date')
other_user = StringField('Other User')
| true | true |
f713b6c9f2f54c3584469cc5be229a9e0af8e5ca | 3,761 | py | Python | components/deprecated/tfx/ExampleGen/CsvExampleGen/with_URI_IO/component.py | Strasser-Pablo/pipelines | a1d513eb412f3ffd44edf82af2fa7edb05c3b952 | [
"Apache-2.0"
] | 2,860 | 2018-05-24T04:55:01.000Z | 2022-03-31T13:49:56.000Z | components/deprecated/tfx/ExampleGen/CsvExampleGen/with_URI_IO/component.py | Strasser-Pablo/pipelines | a1d513eb412f3ffd44edf82af2fa7edb05c3b952 | [
"Apache-2.0"
] | 7,331 | 2018-05-16T09:03:26.000Z | 2022-03-31T23:22:04.000Z | components/deprecated/tfx/ExampleGen/CsvExampleGen/with_URI_IO/component.py | Strasser-Pablo/pipelines | a1d513eb412f3ffd44edf82af2fa7edb05c3b952 | [
"Apache-2.0"
] | 1,359 | 2018-05-15T11:05:41.000Z | 2022-03-31T09:42:09.000Z | from typing import NamedTuple
def CsvExampleGen(
output_examples_uri: 'ExamplesUri',
input_base: str,
input_config: {'JsonObject': {'data_type': 'proto:tfx.components.example_gen.Input'}},
output_config: {'JsonObject': {'data_type': 'proto:tfx.components.example_gen.Output'}},
range_config: {'JsonObject': {'data_type': 'proto:tfx.configs.RangeConfig'}} = None,
beam_pipeline_args: list = None,
) -> NamedTuple('Outputs', [
('examples_uri', 'ExamplesUri'),
]):
from tfx.components.example_gen.csv_example_gen.component import CsvExampleGen as component_class
#Generated code
import os
import tempfile
from tensorflow.io import gfile
from google.protobuf import json_format, message
from tfx.types import channel_utils, artifact_utils
from tfx.components.base import base_executor
arguments = locals().copy()
component_class_args = {}
for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items():
argument_value = arguments.get(name, None)
if argument_value is None:
continue
parameter_type = execution_parameter.type
if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message):
argument_value_obj = parameter_type()
json_format.Parse(argument_value, argument_value_obj)
else:
argument_value_obj = argument_value
component_class_args[name] = argument_value_obj
for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items():
artifact_path = arguments.get(name + '_uri') or arguments.get(name + '_path')
if artifact_path:
artifact = channel_parameter.type()
artifact.uri = artifact_path.rstrip('/') + '/' # Some TFX components require that the artifact URIs end with a slash
if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES:
# Recovering splits
subdirs = gfile.listdir(artifact_path)
# Workaround for https://github.com/tensorflow/tensorflow/issues/39167
subdirs = [subdir.rstrip('/') for subdir in subdirs]
split_names = [subdir.replace('Split-', '') for subdir in subdirs]
artifact.split_names = artifact_utils.encode_split_names(sorted(split_names))
component_class_args[name] = channel_utils.as_channel([artifact])
component_class_instance = component_class(**component_class_args)
input_dict = channel_utils.unwrap_channel_dict(component_class_instance.inputs.get_all())
output_dict = {}
exec_properties = component_class_instance.exec_properties
# Generating paths for output artifacts
for name, channel in component_class_instance.outputs.items():
artifact_path = arguments.get('output_' + name + '_uri') or arguments.get(name + '_path')
if artifact_path:
artifact = channel.type()
artifact.uri = artifact_path.rstrip('/') + '/' # Some TFX components require that the artifact URIs end with a slash
artifact_list = [artifact]
channel._artifacts = artifact_list
output_dict[name] = artifact_list
print('component instance: ' + str(component_class_instance))
executor_context = base_executor.BaseExecutor.Context(
beam_pipeline_args=arguments.get('beam_pipeline_args'),
tmp_dir=tempfile.gettempdir(),
unique_id='tfx_component',
)
executor = component_class_instance.executor_spec.executor_class(executor_context)
executor.Do(
input_dict=input_dict,
output_dict=output_dict,
exec_properties=exec_properties,
)
return (output_examples_uri, )
| 44.77381 | 129 | 0.697421 | from typing import NamedTuple
def CsvExampleGen(
output_examples_uri: 'ExamplesUri',
input_base: str,
input_config: {'JsonObject': {'data_type': 'proto:tfx.components.example_gen.Input'}},
output_config: {'JsonObject': {'data_type': 'proto:tfx.components.example_gen.Output'}},
range_config: {'JsonObject': {'data_type': 'proto:tfx.configs.RangeConfig'}} = None,
beam_pipeline_args: list = None,
) -> NamedTuple('Outputs', [
('examples_uri', 'ExamplesUri'),
]):
from tfx.components.example_gen.csv_example_gen.component import CsvExampleGen as component_class
import os
import tempfile
from tensorflow.io import gfile
from google.protobuf import json_format, message
from tfx.types import channel_utils, artifact_utils
from tfx.components.base import base_executor
arguments = locals().copy()
component_class_args = {}
for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items():
argument_value = arguments.get(name, None)
if argument_value is None:
continue
parameter_type = execution_parameter.type
if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message):
argument_value_obj = parameter_type()
json_format.Parse(argument_value, argument_value_obj)
else:
argument_value_obj = argument_value
component_class_args[name] = argument_value_obj
for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items():
artifact_path = arguments.get(name + '_uri') or arguments.get(name + '_path')
if artifact_path:
artifact = channel_parameter.type()
artifact.uri = artifact_path.rstrip('/') + '/'
if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES:
subdirs = gfile.listdir(artifact_path)
subdirs = [subdir.rstrip('/') for subdir in subdirs]
split_names = [subdir.replace('Split-', '') for subdir in subdirs]
artifact.split_names = artifact_utils.encode_split_names(sorted(split_names))
component_class_args[name] = channel_utils.as_channel([artifact])
component_class_instance = component_class(**component_class_args)
input_dict = channel_utils.unwrap_channel_dict(component_class_instance.inputs.get_all())
output_dict = {}
exec_properties = component_class_instance.exec_properties
for name, channel in component_class_instance.outputs.items():
artifact_path = arguments.get('output_' + name + '_uri') or arguments.get(name + '_path')
if artifact_path:
artifact = channel.type()
artifact.uri = artifact_path.rstrip('/') + '/'
artifact_list = [artifact]
channel._artifacts = artifact_list
output_dict[name] = artifact_list
print('component instance: ' + str(component_class_instance))
executor_context = base_executor.BaseExecutor.Context(
beam_pipeline_args=arguments.get('beam_pipeline_args'),
tmp_dir=tempfile.gettempdir(),
unique_id='tfx_component',
)
executor = component_class_instance.executor_spec.executor_class(executor_context)
executor.Do(
input_dict=input_dict,
output_dict=output_dict,
exec_properties=exec_properties,
)
return (output_examples_uri, )
| true | true |
f713b7232b0919cbb5b6eac3bc695c1343a44fe2 | 1,808 | py | Python | python/src/main/python/setup.py | radiant-maxar/geowave | 2d9f39d32e4621c8f5965a4dffff0623c1c03231 | [
"Apache-2.0"
] | 280 | 2017-06-14T01:26:19.000Z | 2022-03-28T15:45:23.000Z | python/src/main/python/setup.py | radiant-maxar/geowave | 2d9f39d32e4621c8f5965a4dffff0623c1c03231 | [
"Apache-2.0"
] | 458 | 2017-06-12T20:00:59.000Z | 2022-03-31T04:41:59.000Z | python/src/main/python/setup.py | radiant-maxar/geowave | 2d9f39d32e4621c8f5965a4dffff0623c1c03231 | [
"Apache-2.0"
] | 135 | 2017-06-12T20:39:34.000Z | 2022-03-15T13:42:30.000Z | # Packages up pygw so it's pip-installable
from setuptools import setup, find_packages
with open('README.md', 'r') as fh:
long_description = fh.read()
def get_version():
try:
from maven_version import get_maven_version
version = get_maven_version()
except ModuleNotFoundError:
# If maven version isn't found, it must be from the distribution
from pkg_resources import get_distribution
from pkg_resources import DistributionNotFound
version = get_distribution('pygw').version
return version
setup(
name='pygw',
author='GeoWave Contributors',
author_email='geowave.python@gmail.com',
description='GeoWave bindings for Python3',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://locationtech.github.io/geowave/',
project_urls={
'Documentation': 'https://locationtech.github.io/geowave/pydocs/',
'Source': 'https://github.com/locationtech/geowave/tree/master/python/src/main/python',
},
version=get_version(),
packages=find_packages(),
classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
],
install_requires=['py4j==0.10.8.1', 'shapely==1.7'],
python_requires='>=3,<3.8' # py4j does not support python 3.8 yet
)
| 36.16 | 95 | 0.65542 |
from setuptools import setup, find_packages
with open('README.md', 'r') as fh:
long_description = fh.read()
def get_version():
try:
from maven_version import get_maven_version
version = get_maven_version()
except ModuleNotFoundError:
# If maven version isn't found, it must be from the distribution
from pkg_resources import get_distribution
from pkg_resources import DistributionNotFound
version = get_distribution('pygw').version
return version
setup(
name='pygw',
author='GeoWave Contributors',
author_email='geowave.python@gmail.com',
description='GeoWave bindings for Python3',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://locationtech.github.io/geowave/',
project_urls={
'Documentation': 'https://locationtech.github.io/geowave/pydocs/',
'Source': 'https://github.com/locationtech/geowave/tree/master/python/src/main/python',
},
version=get_version(),
packages=find_packages(),
classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
],
install_requires=['py4j==0.10.8.1', 'shapely==1.7'],
python_requires='>=3,<3.8'
)
| true | true |
f713b7cba5b2c11f1ceac74906715a9f7d2dd421 | 657 | py | Python | venv/lib/python3.8/site-packages/_pyinstaller_hooks_contrib/hooks/stdhooks/hook-numba.py | avrumnoor/NewsSummarizer | a963497ef9bc62d2148aa28e624ea32955992f57 | [
"MIT"
] | 12 | 2020-12-15T15:12:06.000Z | 2022-03-18T16:17:42.000Z | venv/lib/python3.8/site-packages/_pyinstaller_hooks_contrib/hooks/stdhooks/hook-numba.py | avrumnoor/NewsSummarizer | a963497ef9bc62d2148aa28e624ea32955992f57 | [
"MIT"
] | 20 | 2021-05-03T18:02:23.000Z | 2022-03-12T12:01:04.000Z | venv/lib/python3.8/site-packages/_pyinstaller_hooks_contrib/hooks/stdhooks/hook-numba.py | avrumnoor/NewsSummarizer | a963497ef9bc62d2148aa28e624ea32955992f57 | [
"MIT"
] | 10 | 2020-12-15T15:12:14.000Z | 2022-02-09T21:02:17.000Z | # ------------------------------------------------------------------
# Copyright (c) 2020 PyInstaller Development Team.
#
# This file is distributed under the terms of the GNU General Public
# License (version 2.0 or later).
#
# The full license is available in LICENSE.GPL.txt, distributed with
# this software.
#
# SPDX-License-Identifier: GPL-2.0-or-later
# ------------------------------------------------------------------
#
# NumPy aware dynamic Python compiler using LLVM
# https://github.com/numba/numba
#
# Tested with:
# numba 0.26 (Anaconda 4.1.1, Windows), numba 0.28 (Linux)
excludedimports = ["IPython", "scipy"]
hiddenimports = ["llvmlite"]
| 31.285714 | 68 | 0.576865 |
excludedimports = ["IPython", "scipy"]
hiddenimports = ["llvmlite"]
| true | true |
f713b8d08238284849c9342f3be734f7fb3a2bdb | 1,699 | py | Python | Other/GaussianRandomStockPrice.py | AcudoDev/FinanceToolbox | 90676e798f2e8eac164ccfcd6708cc717e1911f2 | [
"MIT"
] | null | null | null | Other/GaussianRandomStockPrice.py | AcudoDev/FinanceToolbox | 90676e798f2e8eac164ccfcd6708cc717e1911f2 | [
"MIT"
] | null | null | null | Other/GaussianRandomStockPrice.py | AcudoDev/FinanceToolbox | 90676e798f2e8eac164ccfcd6708cc717e1911f2 | [
"MIT"
] | null | null | null | import pandas as pd
import numpy as np
import yfinance as yf
from sklearn.linear_model import LinearRegression
import statsmodels
import statsmodels.api as sm
import statsmodels.tsa.stattools as ts
import datetime
import scipy.stats
import math
import openpyxl as pyxl
from scipy import signal
from scipy import stats as ss
import statistics
from finta import TA
from filterpy.kalman import KalmanFilter
from filterpy.common import Q_discrete_white_noise
import pandas_ta as ta
from pingouin import gzscore
def GaussianRandomStockPrice(mu, sigma, n, end, freq, S0=100):
"""
This function randomly creates a stock price series bases on gaussian probabilities.
Arguments:
----------
- mu: float
The mean parameter
- sigma: float
The standard déviation parameter
- n: int
Number of periods
- end: datetime date
The last date of thé series
- freq: pandas frequency string
The frequency of thé dataseries:
- "D": days
- "min": minutes
- "s": seconds
- S0: float
The first stock price
Return:
----------
- RStock: Pandas DataFrame
Contains thé datetime as index and thé random stock prices in a column
"""
RStock = np.random.normal(mu, sigma, n).astype("float")
RStock = pd.DataFrame(RStock)
RStock.rename(inplace=True, columns={RStock.columns[0]: "Return"})
RStock["Price"] = ((1 + RStock["Return"]).cumprod()) * S0
times = pd.date_range(end=end, freq=freq, periods=n)
RStock.index = times
RStock = pd.DataFrame(RStock["Price"])
return RStock
| 25.742424 | 88 | 0.649205 | import pandas as pd
import numpy as np
import yfinance as yf
from sklearn.linear_model import LinearRegression
import statsmodels
import statsmodels.api as sm
import statsmodels.tsa.stattools as ts
import datetime
import scipy.stats
import math
import openpyxl as pyxl
from scipy import signal
from scipy import stats as ss
import statistics
from finta import TA
from filterpy.kalman import KalmanFilter
from filterpy.common import Q_discrete_white_noise
import pandas_ta as ta
from pingouin import gzscore
def GaussianRandomStockPrice(mu, sigma, n, end, freq, S0=100):
RStock = np.random.normal(mu, sigma, n).astype("float")
RStock = pd.DataFrame(RStock)
RStock.rename(inplace=True, columns={RStock.columns[0]: "Return"})
RStock["Price"] = ((1 + RStock["Return"]).cumprod()) * S0
times = pd.date_range(end=end, freq=freq, periods=n)
RStock.index = times
RStock = pd.DataFrame(RStock["Price"])
return RStock
| true | true |
f713ba9833d44add8863407d37e88c84d47ea6f3 | 522 | py | Python | pyschieber/player/random_player.py | Murthy10/pyschieber | f9db28c9553b8f321f6ed71cff04eff7879af5f6 | [
"MIT"
] | 5 | 2018-01-17T08:11:14.000Z | 2018-11-27T11:37:15.000Z | pyschieber/player/random_player.py | Murthy10/pyschieber | f9db28c9553b8f321f6ed71cff04eff7879af5f6 | [
"MIT"
] | 4 | 2018-05-09T08:41:05.000Z | 2018-11-16T08:07:39.000Z | pyschieber/player/random_player.py | Murthy10/pyschieber | f9db28c9553b8f321f6ed71cff04eff7879af5f6 | [
"MIT"
] | 3 | 2018-04-20T07:39:30.000Z | 2018-11-10T12:44:08.000Z | import random
from pyschieber.player.base_player import BasePlayer
from pyschieber.trumpf import Trumpf
class RandomPlayer(BasePlayer):
def choose_trumpf(self, geschoben):
return move(choices=list(Trumpf))
def choose_card(self, state=None):
cards = self.allowed_cards(state=state)
return move(choices=cards)
def move(choices):
allowed = False
while not allowed:
choice = random.choice(choices)
allowed = yield choice
if allowed:
yield None
| 22.695652 | 52 | 0.683908 | import random
from pyschieber.player.base_player import BasePlayer
from pyschieber.trumpf import Trumpf
class RandomPlayer(BasePlayer):
def choose_trumpf(self, geschoben):
return move(choices=list(Trumpf))
def choose_card(self, state=None):
cards = self.allowed_cards(state=state)
return move(choices=cards)
def move(choices):
allowed = False
while not allowed:
choice = random.choice(choices)
allowed = yield choice
if allowed:
yield None
| true | true |
f713bb1e600d2ca18e6774b088fb0443723b8c85 | 6,909 | py | Python | data/p3BR/R2/benchmark/startQiskit289.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | data/p3BR/R2/benchmark/startQiskit289.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | data/p3BR/R2/benchmark/startQiskit289.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | # qubit number=3
# total number=60
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
# oracle.draw('mpl', filename=(kernel + '-oracle.png'))
return oracle
def build_circuit(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the Bernstein-Vazirani circuit
zero = np.binary_repr(0, n)
b = f(zero)
# initial n + 1 bits
input_qubit = QuantumRegister(n+1, "qc")
classicals = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classicals)
# inverse last one (can be omitted if using O_f^\pm)
prog.x(input_qubit[n])
# circuit begin
prog.h(input_qubit[1]) # number=1
prog.h(input_qubit[2]) # number=38
prog.cz(input_qubit[0],input_qubit[2]) # number=39
prog.h(input_qubit[2]) # number=40
prog.cx(input_qubit[0],input_qubit[2]) # number=31
prog.h(input_qubit[2]) # number=42
prog.cz(input_qubit[0],input_qubit[2]) # number=43
prog.h(input_qubit[2]) # number=44
prog.h(input_qubit[2]) # number=48
prog.cz(input_qubit[0],input_qubit[2]) # number=49
prog.h(input_qubit[2]) # number=50
prog.h(input_qubit[2]) # number=57
prog.cz(input_qubit[0],input_qubit[2]) # number=58
prog.h(input_qubit[2]) # number=59
prog.x(input_qubit[2]) # number=55
prog.cx(input_qubit[0],input_qubit[2]) # number=56
prog.cx(input_qubit[0],input_qubit[2]) # number=47
prog.cx(input_qubit[0],input_qubit[2]) # number=37
prog.h(input_qubit[2]) # number=51
prog.cz(input_qubit[0],input_qubit[2]) # number=52
prog.h(input_qubit[2]) # number=53
prog.h(input_qubit[2]) # number=25
prog.cz(input_qubit[0],input_qubit[2]) # number=26
prog.h(input_qubit[2]) # number=27
prog.h(input_qubit[1]) # number=7
prog.cz(input_qubit[2],input_qubit[1]) # number=8
prog.rx(0.17592918860102857,input_qubit[2]) # number=34
prog.rx(-0.3989822670059037,input_qubit[1]) # number=30
prog.h(input_qubit[1]) # number=9
prog.h(input_qubit[1]) # number=18
prog.cz(input_qubit[2],input_qubit[1]) # number=19
prog.h(input_qubit[1]) # number=20
prog.y(input_qubit[1]) # number=14
prog.h(input_qubit[1]) # number=22
prog.cz(input_qubit[2],input_qubit[1]) # number=23
prog.h(input_qubit[1]) # number=24
prog.z(input_qubit[2]) # number=3
prog.z(input_qubit[1]) # number=41
prog.x(input_qubit[1]) # number=17
prog.y(input_qubit[2]) # number=5
prog.x(input_qubit[2]) # number=21
# apply H to get superposition
for i in range(n):
prog.h(input_qubit[i])
prog.h(input_qubit[n])
prog.barrier()
# apply oracle O_f
oracle = build_oracle(n, f)
prog.append(
oracle.to_gate(),
[input_qubit[i] for i in range(n)] + [input_qubit[n]])
# apply H back (QFT on Z_2^n)
for i in range(n):
prog.h(input_qubit[i])
prog.barrier()
# measure
return prog
def get_statevector(prog: QuantumCircuit) -> Any:
state_backend = Aer.get_backend('statevector_simulator')
statevec = execute(prog, state_backend).result()
quantum_state = statevec.get_statevector()
qubits = round(log2(len(quantum_state)))
quantum_state = {
"|" + np.binary_repr(i, qubits) + ">": quantum_state[i]
for i in range(2 ** qubits)
}
return quantum_state
def evaluate(backend_str: str, prog: QuantumCircuit, shots: int, b: str) -> Any:
# Q: which backend should we use?
# get state vector
quantum_state = get_statevector(prog)
# get simulate results
# provider = IBMQ.load_account()
# backend = provider.get_backend(backend_str)
# qobj = compile(prog, backend, shots)
# job = backend.run(qobj)
# job.result()
backend = Aer.get_backend(backend_str)
# transpile/schedule -> assemble -> backend.run
results = execute(prog, backend, shots=shots).result()
counts = results.get_counts()
a = Counter(counts).most_common(1)[0][0][::-1]
return {
"measurements": counts,
# "state": statevec,
"quantum_state": quantum_state,
"a": a,
"b": b
}
def bernstein_test_1(rep: str):
"""011 . x + 1"""
a = "011"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_2(rep: str):
"""000 . x + 0"""
a = "000"
b = "0"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_3(rep: str):
"""111 . x + 1"""
a = "111"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
if __name__ == "__main__":
n = 2
a = "11"
b = "1"
f = lambda rep: \
bitwise_xor(bitwise_dot(a, rep), b)
prog = build_circuit(n, f)
sample_shot =4000
writefile = open("../data/startQiskit289.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
backend = BasicAer.get_backend('qasm_simulator')
circuit1 = transpile(prog, FakeYorktown())
circuit1.h(qubit=2)
circuit1.x(qubit=3)
circuit1.measure_all()
info = execute(circuit1,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| 31.547945 | 140 | 0.636995 |
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f: Callable[[str], str]) -> QuantumCircuit:
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
return oracle
def build_circuit(n: int, f: Callable[[str], str]) -> QuantumCircuit:
zero = np.binary_repr(0, n)
b = f(zero)
input_qubit = QuantumRegister(n+1, "qc")
classicals = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classicals)
prog.x(input_qubit[n])
prog.h(input_qubit[1])
prog.h(input_qubit[2])
prog.cz(input_qubit[0],input_qubit[2])
prog.h(input_qubit[2])
prog.cx(input_qubit[0],input_qubit[2])
prog.h(input_qubit[2])
prog.cz(input_qubit[0],input_qubit[2])
prog.h(input_qubit[2])
prog.h(input_qubit[2])
prog.cz(input_qubit[0],input_qubit[2])
prog.h(input_qubit[2])
prog.h(input_qubit[2])
prog.cz(input_qubit[0],input_qubit[2])
prog.h(input_qubit[2])
prog.x(input_qubit[2])
prog.cx(input_qubit[0],input_qubit[2])
prog.cx(input_qubit[0],input_qubit[2])
prog.cx(input_qubit[0],input_qubit[2])
prog.h(input_qubit[2])
prog.cz(input_qubit[0],input_qubit[2])
prog.h(input_qubit[2])
prog.h(input_qubit[2])
prog.cz(input_qubit[0],input_qubit[2])
prog.h(input_qubit[2])
prog.h(input_qubit[1])
prog.cz(input_qubit[2],input_qubit[1])
prog.rx(0.17592918860102857,input_qubit[2])
prog.rx(-0.3989822670059037,input_qubit[1])
prog.h(input_qubit[1])
prog.h(input_qubit[1])
prog.cz(input_qubit[2],input_qubit[1])
prog.h(input_qubit[1])
prog.y(input_qubit[1])
prog.h(input_qubit[1])
prog.cz(input_qubit[2],input_qubit[1])
prog.h(input_qubit[1])
prog.z(input_qubit[2])
prog.z(input_qubit[1])
prog.x(input_qubit[1])
prog.y(input_qubit[2])
prog.x(input_qubit[2])
for i in range(n):
prog.h(input_qubit[i])
prog.h(input_qubit[n])
prog.barrier()
oracle = build_oracle(n, f)
prog.append(
oracle.to_gate(),
[input_qubit[i] for i in range(n)] + [input_qubit[n]])
for i in range(n):
prog.h(input_qubit[i])
prog.barrier()
return prog
def get_statevector(prog: QuantumCircuit) -> Any:
state_backend = Aer.get_backend('statevector_simulator')
statevec = execute(prog, state_backend).result()
quantum_state = statevec.get_statevector()
qubits = round(log2(len(quantum_state)))
quantum_state = {
"|" + np.binary_repr(i, qubits) + ">": quantum_state[i]
for i in range(2 ** qubits)
}
return quantum_state
def evaluate(backend_str: str, prog: QuantumCircuit, shots: int, b: str) -> Any:
quantum_state = get_statevector(prog)
backend = Aer.get_backend(backend_str)
results = execute(prog, backend, shots=shots).result()
counts = results.get_counts()
a = Counter(counts).most_common(1)[0][0][::-1]
return {
"measurements": counts,
"quantum_state": quantum_state,
"a": a,
"b": b
}
def bernstein_test_1(rep: str):
a = "011"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_2(rep: str):
a = "000"
b = "0"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_3(rep: str):
a = "111"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
if __name__ == "__main__":
n = 2
a = "11"
b = "1"
f = lambda rep: \
bitwise_xor(bitwise_dot(a, rep), b)
prog = build_circuit(n, f)
sample_shot =4000
writefile = open("../data/startQiskit289.csv", "w")
backend = BasicAer.get_backend('qasm_simulator')
circuit1 = transpile(prog, FakeYorktown())
circuit1.h(qubit=2)
circuit1.x(qubit=3)
circuit1.measure_all()
info = execute(circuit1,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| true | true |
f713bb216808a56db66a5efb35ac61c688245d4d | 15,257 | py | Python | topi/python/topi/cuda/rcnn/proposal.py | seanlatias/incubator-tvm | 2e5cb16b4cebe53fba4fb3be49b6748738e20224 | [
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | 9 | 2019-12-17T08:03:54.000Z | 2022-01-19T02:34:23.000Z | topi/python/topi/cuda/rcnn/proposal.py | seanlatias/incubator-tvm | 2e5cb16b4cebe53fba4fb3be49b6748738e20224 | [
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | 2 | 2020-06-18T21:15:42.000Z | 2020-06-24T17:38:37.000Z | topi/python/topi/cuda/rcnn/proposal.py | seanlatias/incubator-tvm | 2e5cb16b4cebe53fba4fb3be49b6748738e20224 | [
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | 3 | 2020-10-04T20:30:18.000Z | 2022-01-24T18:03:52.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name, singleton-comparison, bad-continuation
"""Proposal operator"""
import math
import tvm
from tvm import te
from ...vision.rcnn import generate_anchor, reg_bbox, reg_iou
from ...util import get_const_tuple, get_const_int
def predict_bbox_ir(cls_prob_buf, bbox_pred_buf, im_info_buf, out_buf, scales, ratios,
feature_stride, rpn_min_size, iou_loss):
"""Predict bounding boxes based on anchors, scores and deltas.
Parameters
----------
cls_prob_buf : tvm.te.schedule.Buffer
4-D with shape [batch, 2 * num_anchors, height, width]
bbox_pred_buf : tvm.te.schedule.Buffer
4-D with shape [batch, 4 * num_anchors, height, width]
im_info_buf : tvm.te.schedule.Buffer
2-D with shape [batch, 3]
out_buf : tvm.te.schedule.Buffer
3-D with shape [batch, num_bbox, 5]
The last dimension is in format of [w_start, h_start, w_end, h_end, score]
scales : list/tuple of float
Scales of anchor windows.
ratios : list/tuple of float
Ratios of anchor windows.
feature_stride : int
The size of the receptive field each unit in the convolution layer of the rpn, for example
the product of all stride's prior to this layer.
rpn_min_size : int
Minimum height or width in proposal.
iou_loss : bool
Usage of IoU loss.
Returns
-------
stmt : Stmt
The result IR statement.
"""
batch, num_anchors, height, width = get_const_tuple(cls_prob_buf.shape)
num_anchors //= 2
max_threads = int(tvm.target.Target.current(allow_none=False).max_num_threads)
nthread_tx = max_threads
nthread_bx = (batch * height * width) // max_threads + 1
tx = te.thread_axis("threadIdx.x")
bx = te.thread_axis("blockIdx.x")
tid = bx * max_threads + tx
ib = tvm.tir.ir_builder.create()
ib.scope_attr(tx, "thread_extent", nthread_tx)
ib.scope_attr(bx, "thread_extent", nthread_bx)
p_score = ib.buffer_ptr(cls_prob_buf)
p_delta = ib.buffer_ptr(bbox_pred_buf)
p_im_info = ib.buffer_ptr(im_info_buf)
p_out = ib.buffer_ptr(out_buf)
idxm = tvm.tir.indexmod
idxd = tvm.tir.indexdiv
with ib.if_scope(tid < batch * height * width):
w = idxm(tid, width)
h = idxm(idxd(tid, width), height)
b = idxd(idxd(tid, width), height)
for k in range(num_anchors):
out_index = tid * num_anchors + k
ratio = ratios[k // len(scales)]
scale = scales[k % len(scales)]
anchor = generate_anchor(ratio, scale, feature_stride)
im_height = p_im_info[b * 3]
im_width = p_im_info[b * 3 + 1]
x1 = anchor[0] + w * feature_stride
y1 = anchor[1] + h * feature_stride
x2 = anchor[2] + w * feature_stride
y2 = anchor[3] + h * feature_stride
delta = [p_delta[((((b * num_anchors + k) * 4 + i) * height + h) * width + w)]
for i in range(4)]
regression_func = reg_iou if iou_loss else reg_bbox
pred_x1, pred_y1, pred_x2, pred_y2 = regression_func(x1, y1, x2, y2, *delta)
pred_x1 = tvm.te.max(tvm.te.min(pred_x1, im_width - 1.0), 0.0)
pred_y1 = tvm.te.max(tvm.te.min(pred_y1, im_height - 1.0), 0.0)
pred_x2 = tvm.te.max(tvm.te.min(pred_x2, im_width - 1.0), 0.0)
pred_y2 = tvm.te.max(tvm.te.min(pred_y2, im_height - 1.0), 0.0)
real_height = (im_height / feature_stride).astype('int32')
real_width = (im_width / feature_stride).astype('int32')
bbox_w = pred_x2 - pred_x1 + 1.0
bbox_h = pred_y2 - pred_y1 + 1.0
min_size = p_im_info[b * 3 + 2] * rpn_min_size
pred_score = p_score[((b * num_anchors * 2 + num_anchors + k) * height + h) * width + w]
pred_score = tvm.tir.Select(tvm.tir.any(h >= real_height, w >= real_width),
-1.0, pred_score)
p_out[out_index * 5 + 0] = pred_x1
p_out[out_index * 5 + 1] = pred_y1
p_out[out_index * 5 + 2] = pred_x2
p_out[out_index * 5 + 3] = pred_y2
p_out[out_index * 5 + 4] = pred_score
with ib.if_scope(tvm.tir.any(bbox_w < min_size, bbox_h < min_size)):
p_out[out_index * 5 + 0] -= min_size / 2.0
p_out[out_index * 5 + 1] -= min_size / 2.0
p_out[out_index * 5 + 2] += min_size / 2.0
p_out[out_index * 5 + 3] += min_size / 2.0
p_out[out_index * 5 + 4] = -1.0
return ib.get()
def argsort_ir(data_buf, out_index_buf):
"""Batched odd-even transposition sort.
Parameters
----------
data_buf : tvm.te.schedule.Buffer
2-D with shape [batch, num_bbox]
out_index_buf : tvm.te.schedule.Buffer
2-D with shape [batch, num_bbox]. Indices of data in sorted order.
Returns
-------
stmt : Stmt
The result IR statement.
"""
batch, num_bbox = get_const_tuple(data_buf.shape)
max_threads = int(tvm.target.Target.current(allow_none=False).max_num_threads)
ib = tvm.tir.ir_builder.create()
p_data = ib.buffer_ptr(data_buf)
index_out = ib.buffer_ptr(out_index_buf)
nthread_tx = max_threads
nthread_bx = (num_bbox + 1) // 2 // max_threads + 1
tx = te.thread_axis("threadIdx.x")
bx = te.thread_axis("vthread")
ib.scope_attr(tx, "thread_extent", nthread_tx)
ib.scope_attr(bx, "virtual_thread", nthread_bx)
tid = bx * nthread_tx + tx
temp_data = ib.allocate("float32", (1,), name="temp_data", scope="local")
temp_index = ib.allocate("int32", (1,), name="temp_index", scope="local")
idxm = tvm.tir.indexmod
with ib.for_range(0, batch, for_type="unroll") as b:
start = b * num_bbox
for i in range(2):
bbox_id = tid * 2 + i
with ib.if_scope(bbox_id < num_bbox):
index_out[start + bbox_id] = bbox_id
with ib.for_range(0, num_bbox) as k:
offset = start + 2 * tid + idxm(k, 2)
with ib.if_scope(
tvm.tir.all(offset + 1 < num_bbox, p_data[offset] < p_data[offset + 1])):
temp_data[0] = p_data[offset]
p_data[offset] = p_data[offset + 1]
p_data[offset + 1] = temp_data[0]
temp_index[0] = index_out[offset]
index_out[offset] = index_out[offset + 1]
index_out[offset + 1] = temp_index[0]
ib.emit(tvm.tir.Call(None, 'tvm_storage_sync',
tvm.runtime.convert(['shared']),
tvm.tir.Call.Intrinsic))
return ib.get()
def nms_ir(sorted_bbox_buf, out_buf, nms_threshold):
"""Non-maximum supression.
Parameters
----------
sorted_bbox_buf : tvm.te.schedule.Buffer
3-D with shape [batch, num_bbox, 5]. The last dimension is in format of
[w_start, h_start, w_end, h_end, score].
out_buf : tvm.te.schedule.Buffer
2-D with shape [batch, num_bbox]. Boolean mask of whether a bounding box should be removed.
nms_threshold : float
Non-maximum suppression threshold.
Returns
-------
stmt : Stmt
The result IR statement.
"""
def calculate_overlap(out_tensor, box_a_idx, box_b_idx):
"""Calculate overlap of two boxes.
"""
w = tvm.te.max(0.0, tvm.te.min(out_tensor[box_a_idx + 2], out_tensor[box_b_idx + 2])
- tvm.te.max(out_tensor[box_a_idx], out_tensor[box_b_idx]) + 1.0)
h = tvm.te.max(0.0, tvm.te.min(out_tensor[box_a_idx + 3], out_tensor[box_b_idx + 3])
- tvm.te.max(out_tensor[box_a_idx + 1], out_tensor[box_b_idx + 1]) + 1.0)
i = w * h
u = (out_tensor[box_a_idx + 2] - out_tensor[box_a_idx] + 1.0) * \
(out_tensor[box_a_idx + 3] - out_tensor[box_a_idx + 1] + 1.0) + \
(out_tensor[box_b_idx + 2] - out_tensor[box_b_idx] + 1.0) * \
(out_tensor[box_b_idx + 3] - out_tensor[box_b_idx + 1] + 1.0) - i
return i / u
batch, num_bbox = get_const_tuple(out_buf.shape)
max_threads = int(math.sqrt(tvm.target.Target.current(allow_none=False).max_num_threads))
tx = te.thread_axis("threadIdx.x")
bx = te.thread_axis("blockIdx.x")
ib = tvm.tir.ir_builder.create()
p_data = ib.buffer_ptr(sorted_bbox_buf)
p_out = ib.buffer_ptr(out_buf)
nthread_tx = max_threads
nthread_bx = num_bbox // max_threads + 1
ib.scope_attr(tx, "thread_extent", nthread_tx)
ib.scope_attr(bx, "thread_extent", nthread_bx)
i = bx * max_threads + tx
with ib.for_range(0, batch, for_type="unroll", name="n") as b:
base_idx = b * num_bbox
with ib.if_scope(i < num_bbox):
p_out[base_idx + i] = False
with ib.for_range(0, num_bbox - 1) as l:
with ib.if_scope(tvm.tir.all(i < num_bbox, i > l, p_out[base_idx + l] == False)):
iou = calculate_overlap(p_data, (base_idx + l) * 5, (base_idx + i) * 5)
with ib.if_scope(iou > nms_threshold):
p_out[base_idx + i] = True
ib.emit(tvm.tir.Call(None, 'tvm_storage_sync',
tvm.runtime.convert(['shared']),
tvm.tir.Call.Intrinsic))
return ib.get()
def prepare_output_ir(sorted_bbox_buf, remove_mask_buf, out_buf):
"""Copy output after applying nms to continuous memory.
Parameters
----------
sorted_bbox_buf : tvm.te.schedule.Buffer
3-D with shape [batch, num_bbox, 5]. The last dimension is in format of
[w_start, h_start, w_end, h_end, score].
remove_mask_buf : tvm.te.schedule.Buffer
2-D with shape [batch, num_bbox]. Boolean mask of whether a bounding box should be removed.
out_buf : tvm.te.schedule.Buffer
2-D with shape [batch * rpn_post_nms_top_n, 5]. The last dimension is in format of
[batch_index, w_start, h_start, w_end, h_end].
Returns
-------
stmt : Stmt
The result IR statement.
"""
batch, num_bbox, _ = get_const_tuple(sorted_bbox_buf.shape)
rpn_post_nms_top_n = get_const_int(out_buf.shape[0]) // batch
nthread_tx = batch
tx = te.thread_axis("threadIdx.x")
ib = tvm.tir.ir_builder.create()
ib.scope_attr(tx, "thread_extent", nthread_tx)
i = ib.allocate('int32', (1,), 'i', scope='local')
i[0] = 0
p_sorted_bbox = ib.buffer_ptr(sorted_bbox_buf)
p_remove = ib.buffer_ptr(remove_mask_buf)
p_out = ib.buffer_ptr(out_buf)
b = tx
nkeep = ib.allocate('int32', (1,), 'nkeep', scope='local')
nkeep[0] = 0 # number of bbox after nms
with ib.for_range(0, num_bbox) as j:
with ib.if_scope(p_remove[b * num_bbox + j] == False):
nkeep[0] += 1
with ib.if_scope(nkeep[0] > 0):
with ib.for_range(0, te.ceil(
tvm.tir.const(rpn_post_nms_top_n, 'float32') / nkeep[0]).astype('int32')):
with ib.for_range(0, num_bbox) as j:
offset_j = (b * num_bbox + j) * 5
offset_i = (b * rpn_post_nms_top_n + i[0]) * 5
with ib.if_scope(tvm.tir.all(i[0] < rpn_post_nms_top_n,
p_remove[(b*num_bbox+j)] == False)):
p_out[offset_i] = tvm.tir.Cast('float32', b)
with ib.for_range(0, 4, for_type='unroll') as k:
p_out[offset_i + k + 1] = p_sorted_bbox[offset_j + k]
i[0] = i[0] + 1
body = ib.get()
return body
def proposal(cls_prob, bbox_pred, im_info, scales, ratios, feature_stride, threshold,
rpn_pre_nms_top_n, rpn_post_nms_top_n, rpn_min_size, iou_loss):
"""Proposal operator.
Parameters
----------
cls_prob : tvm.te.Tensor
4-D with shape [batch, 2 * num_anchors, height, width]
bbox_pred : tvm.te.Tensor
4-D with shape [batch, 4 * num_anchors, height, width]
im_info : tvm.te.Tensor
2-D with shape [batch, 3]
scales : list/tuple of float
Scales of anchor windows.
ratios : list/tuple of float
Ratios of anchor windows.
feature_stride : int
The size of the receptive field each unit in the convolution layer of the rpn, for example
the product of all stride's prior to this layer.
threshold : float
Non-maximum suppression threshold.
rpn_pre_nms_top_n : int
Number of top scoring boxes to apply NMS. -1 to use all boxes.
rpn_post_nms_top_n : int
Number of top scoring boxes to keep after applying NMS to RPN proposals.
rpn_min_size : int
Minimum height or width in proposal.
iou_loss : bool
Usage of IoU loss.
Returns
-------
out : tvm.te.Tensor
2-D tensor with shape [batch * rpn_post_nms_top_n, 5]. The last dimension is in format of
[batch_index, w_start, h_start, w_end, h_end].
"""
batch, _, height, width = get_const_tuple(cls_prob.shape)
num_anchors = len(scales) * len(ratios)
num_bbox = height * width * num_anchors
rpn_pre_nms_top_n = min(rpn_pre_nms_top_n, num_bbox) if rpn_pre_nms_top_n > 0 else num_bbox
bbox = te.extern((batch, num_bbox, 5), [cls_prob, bbox_pred, im_info], lambda ins, outs:
predict_bbox_ir(ins[0], ins[1], ins[2], outs[0], scales, ratios,
feature_stride, rpn_min_size, iou_loss),
dtype=bbox_pred.dtype)
score = te.compute((batch, num_bbox), lambda b, i: bbox[b, i, 4], tag='bbox_score')
sorted_index = te.extern([score.shape], [score],
lambda ins, outs: argsort_ir(ins[0], outs[0]),
dtype='int32')
sorted_bbox = te.compute((batch, rpn_pre_nms_top_n, 5),
lambda b, i, j: bbox[b, sorted_index[b, i], j], tag='sorted_bbox')
nms_remove_mask = te.extern((batch, rpn_pre_nms_top_n), [sorted_bbox],
lambda ins, outs: nms_ir(ins[0], outs[0], threshold),
dtype='bool')
nms_out = te.extern((batch * rpn_post_nms_top_n, 5), [sorted_bbox, nms_remove_mask],
lambda ins, outs: prepare_output_ir(ins[0], ins[1], outs[0]),
dtype=sorted_bbox.dtype)
return nms_out
| 40.044619 | 100 | 0.604575 |
import math
import tvm
from tvm import te
from ...vision.rcnn import generate_anchor, reg_bbox, reg_iou
from ...util import get_const_tuple, get_const_int
def predict_bbox_ir(cls_prob_buf, bbox_pred_buf, im_info_buf, out_buf, scales, ratios,
feature_stride, rpn_min_size, iou_loss):
batch, num_anchors, height, width = get_const_tuple(cls_prob_buf.shape)
num_anchors //= 2
max_threads = int(tvm.target.Target.current(allow_none=False).max_num_threads)
nthread_tx = max_threads
nthread_bx = (batch * height * width) // max_threads + 1
tx = te.thread_axis("threadIdx.x")
bx = te.thread_axis("blockIdx.x")
tid = bx * max_threads + tx
ib = tvm.tir.ir_builder.create()
ib.scope_attr(tx, "thread_extent", nthread_tx)
ib.scope_attr(bx, "thread_extent", nthread_bx)
p_score = ib.buffer_ptr(cls_prob_buf)
p_delta = ib.buffer_ptr(bbox_pred_buf)
p_im_info = ib.buffer_ptr(im_info_buf)
p_out = ib.buffer_ptr(out_buf)
idxm = tvm.tir.indexmod
idxd = tvm.tir.indexdiv
with ib.if_scope(tid < batch * height * width):
w = idxm(tid, width)
h = idxm(idxd(tid, width), height)
b = idxd(idxd(tid, width), height)
for k in range(num_anchors):
out_index = tid * num_anchors + k
ratio = ratios[k // len(scales)]
scale = scales[k % len(scales)]
anchor = generate_anchor(ratio, scale, feature_stride)
im_height = p_im_info[b * 3]
im_width = p_im_info[b * 3 + 1]
x1 = anchor[0] + w * feature_stride
y1 = anchor[1] + h * feature_stride
x2 = anchor[2] + w * feature_stride
y2 = anchor[3] + h * feature_stride
delta = [p_delta[((((b * num_anchors + k) * 4 + i) * height + h) * width + w)]
for i in range(4)]
regression_func = reg_iou if iou_loss else reg_bbox
pred_x1, pred_y1, pred_x2, pred_y2 = regression_func(x1, y1, x2, y2, *delta)
pred_x1 = tvm.te.max(tvm.te.min(pred_x1, im_width - 1.0), 0.0)
pred_y1 = tvm.te.max(tvm.te.min(pred_y1, im_height - 1.0), 0.0)
pred_x2 = tvm.te.max(tvm.te.min(pred_x2, im_width - 1.0), 0.0)
pred_y2 = tvm.te.max(tvm.te.min(pred_y2, im_height - 1.0), 0.0)
real_height = (im_height / feature_stride).astype('int32')
real_width = (im_width / feature_stride).astype('int32')
bbox_w = pred_x2 - pred_x1 + 1.0
bbox_h = pred_y2 - pred_y1 + 1.0
min_size = p_im_info[b * 3 + 2] * rpn_min_size
pred_score = p_score[((b * num_anchors * 2 + num_anchors + k) * height + h) * width + w]
pred_score = tvm.tir.Select(tvm.tir.any(h >= real_height, w >= real_width),
-1.0, pred_score)
p_out[out_index * 5 + 0] = pred_x1
p_out[out_index * 5 + 1] = pred_y1
p_out[out_index * 5 + 2] = pred_x2
p_out[out_index * 5 + 3] = pred_y2
p_out[out_index * 5 + 4] = pred_score
with ib.if_scope(tvm.tir.any(bbox_w < min_size, bbox_h < min_size)):
p_out[out_index * 5 + 0] -= min_size / 2.0
p_out[out_index * 5 + 1] -= min_size / 2.0
p_out[out_index * 5 + 2] += min_size / 2.0
p_out[out_index * 5 + 3] += min_size / 2.0
p_out[out_index * 5 + 4] = -1.0
return ib.get()
def argsort_ir(data_buf, out_index_buf):
batch, num_bbox = get_const_tuple(data_buf.shape)
max_threads = int(tvm.target.Target.current(allow_none=False).max_num_threads)
ib = tvm.tir.ir_builder.create()
p_data = ib.buffer_ptr(data_buf)
index_out = ib.buffer_ptr(out_index_buf)
nthread_tx = max_threads
nthread_bx = (num_bbox + 1) // 2 // max_threads + 1
tx = te.thread_axis("threadIdx.x")
bx = te.thread_axis("vthread")
ib.scope_attr(tx, "thread_extent", nthread_tx)
ib.scope_attr(bx, "virtual_thread", nthread_bx)
tid = bx * nthread_tx + tx
temp_data = ib.allocate("float32", (1,), name="temp_data", scope="local")
temp_index = ib.allocate("int32", (1,), name="temp_index", scope="local")
idxm = tvm.tir.indexmod
with ib.for_range(0, batch, for_type="unroll") as b:
start = b * num_bbox
for i in range(2):
bbox_id = tid * 2 + i
with ib.if_scope(bbox_id < num_bbox):
index_out[start + bbox_id] = bbox_id
with ib.for_range(0, num_bbox) as k:
offset = start + 2 * tid + idxm(k, 2)
with ib.if_scope(
tvm.tir.all(offset + 1 < num_bbox, p_data[offset] < p_data[offset + 1])):
temp_data[0] = p_data[offset]
p_data[offset] = p_data[offset + 1]
p_data[offset + 1] = temp_data[0]
temp_index[0] = index_out[offset]
index_out[offset] = index_out[offset + 1]
index_out[offset + 1] = temp_index[0]
ib.emit(tvm.tir.Call(None, 'tvm_storage_sync',
tvm.runtime.convert(['shared']),
tvm.tir.Call.Intrinsic))
return ib.get()
def nms_ir(sorted_bbox_buf, out_buf, nms_threshold):
def calculate_overlap(out_tensor, box_a_idx, box_b_idx):
w = tvm.te.max(0.0, tvm.te.min(out_tensor[box_a_idx + 2], out_tensor[box_b_idx + 2])
- tvm.te.max(out_tensor[box_a_idx], out_tensor[box_b_idx]) + 1.0)
h = tvm.te.max(0.0, tvm.te.min(out_tensor[box_a_idx + 3], out_tensor[box_b_idx + 3])
- tvm.te.max(out_tensor[box_a_idx + 1], out_tensor[box_b_idx + 1]) + 1.0)
i = w * h
u = (out_tensor[box_a_idx + 2] - out_tensor[box_a_idx] + 1.0) * \
(out_tensor[box_a_idx + 3] - out_tensor[box_a_idx + 1] + 1.0) + \
(out_tensor[box_b_idx + 2] - out_tensor[box_b_idx] + 1.0) * \
(out_tensor[box_b_idx + 3] - out_tensor[box_b_idx + 1] + 1.0) - i
return i / u
batch, num_bbox = get_const_tuple(out_buf.shape)
max_threads = int(math.sqrt(tvm.target.Target.current(allow_none=False).max_num_threads))
tx = te.thread_axis("threadIdx.x")
bx = te.thread_axis("blockIdx.x")
ib = tvm.tir.ir_builder.create()
p_data = ib.buffer_ptr(sorted_bbox_buf)
p_out = ib.buffer_ptr(out_buf)
nthread_tx = max_threads
nthread_bx = num_bbox // max_threads + 1
ib.scope_attr(tx, "thread_extent", nthread_tx)
ib.scope_attr(bx, "thread_extent", nthread_bx)
i = bx * max_threads + tx
with ib.for_range(0, batch, for_type="unroll", name="n") as b:
base_idx = b * num_bbox
with ib.if_scope(i < num_bbox):
p_out[base_idx + i] = False
with ib.for_range(0, num_bbox - 1) as l:
with ib.if_scope(tvm.tir.all(i < num_bbox, i > l, p_out[base_idx + l] == False)):
iou = calculate_overlap(p_data, (base_idx + l) * 5, (base_idx + i) * 5)
with ib.if_scope(iou > nms_threshold):
p_out[base_idx + i] = True
ib.emit(tvm.tir.Call(None, 'tvm_storage_sync',
tvm.runtime.convert(['shared']),
tvm.tir.Call.Intrinsic))
return ib.get()
def prepare_output_ir(sorted_bbox_buf, remove_mask_buf, out_buf):
batch, num_bbox, _ = get_const_tuple(sorted_bbox_buf.shape)
rpn_post_nms_top_n = get_const_int(out_buf.shape[0]) // batch
nthread_tx = batch
tx = te.thread_axis("threadIdx.x")
ib = tvm.tir.ir_builder.create()
ib.scope_attr(tx, "thread_extent", nthread_tx)
i = ib.allocate('int32', (1,), 'i', scope='local')
i[0] = 0
p_sorted_bbox = ib.buffer_ptr(sorted_bbox_buf)
p_remove = ib.buffer_ptr(remove_mask_buf)
p_out = ib.buffer_ptr(out_buf)
b = tx
nkeep = ib.allocate('int32', (1,), 'nkeep', scope='local')
nkeep[0] = 0
with ib.for_range(0, num_bbox) as j:
with ib.if_scope(p_remove[b * num_bbox + j] == False):
nkeep[0] += 1
with ib.if_scope(nkeep[0] > 0):
with ib.for_range(0, te.ceil(
tvm.tir.const(rpn_post_nms_top_n, 'float32') / nkeep[0]).astype('int32')):
with ib.for_range(0, num_bbox) as j:
offset_j = (b * num_bbox + j) * 5
offset_i = (b * rpn_post_nms_top_n + i[0]) * 5
with ib.if_scope(tvm.tir.all(i[0] < rpn_post_nms_top_n,
p_remove[(b*num_bbox+j)] == False)):
p_out[offset_i] = tvm.tir.Cast('float32', b)
with ib.for_range(0, 4, for_type='unroll') as k:
p_out[offset_i + k + 1] = p_sorted_bbox[offset_j + k]
i[0] = i[0] + 1
body = ib.get()
return body
def proposal(cls_prob, bbox_pred, im_info, scales, ratios, feature_stride, threshold,
rpn_pre_nms_top_n, rpn_post_nms_top_n, rpn_min_size, iou_loss):
batch, _, height, width = get_const_tuple(cls_prob.shape)
num_anchors = len(scales) * len(ratios)
num_bbox = height * width * num_anchors
rpn_pre_nms_top_n = min(rpn_pre_nms_top_n, num_bbox) if rpn_pre_nms_top_n > 0 else num_bbox
bbox = te.extern((batch, num_bbox, 5), [cls_prob, bbox_pred, im_info], lambda ins, outs:
predict_bbox_ir(ins[0], ins[1], ins[2], outs[0], scales, ratios,
feature_stride, rpn_min_size, iou_loss),
dtype=bbox_pred.dtype)
score = te.compute((batch, num_bbox), lambda b, i: bbox[b, i, 4], tag='bbox_score')
sorted_index = te.extern([score.shape], [score],
lambda ins, outs: argsort_ir(ins[0], outs[0]),
dtype='int32')
sorted_bbox = te.compute((batch, rpn_pre_nms_top_n, 5),
lambda b, i, j: bbox[b, sorted_index[b, i], j], tag='sorted_bbox')
nms_remove_mask = te.extern((batch, rpn_pre_nms_top_n), [sorted_bbox],
lambda ins, outs: nms_ir(ins[0], outs[0], threshold),
dtype='bool')
nms_out = te.extern((batch * rpn_post_nms_top_n, 5), [sorted_bbox, nms_remove_mask],
lambda ins, outs: prepare_output_ir(ins[0], ins[1], outs[0]),
dtype=sorted_bbox.dtype)
return nms_out
| true | true |
f713bd33c2d69f8941017073c807f9fb3f287a60 | 50,346 | py | Python | nuitka/optimizations/OptimizeBuiltinCalls.py | jvalduvieco/Nuitka | b93046d5d1c162d416d392d835790936d15a2cf8 | [
"Apache-2.0"
] | null | null | null | nuitka/optimizations/OptimizeBuiltinCalls.py | jvalduvieco/Nuitka | b93046d5d1c162d416d392d835790936d15a2cf8 | [
"Apache-2.0"
] | null | null | null | nuitka/optimizations/OptimizeBuiltinCalls.py | jvalduvieco/Nuitka | b93046d5d1c162d416d392d835790936d15a2cf8 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Optimize calls to built-in references to specific built-in calls.
For built-in name references, we check if it's one of the supported built-in
types, and then specialize for the ones, where it makes sense.
"""
from nuitka.__past__ import xrange # pylint: disable=I0021,redefined-builtin
from nuitka.Errors import NuitkaAssumptionError
from nuitka.nodes.AssignNodes import (
StatementAssignmentVariable,
StatementDelVariable,
)
from nuitka.nodes.AttributeNodes import (
ExpressionAttributeLookup,
ExpressionBuiltinGetattr,
ExpressionBuiltinHasattr,
ExpressionBuiltinSetattr,
)
from nuitka.nodes.BuiltinAllNodes import ExpressionBuiltinAll
from nuitka.nodes.BuiltinAnyNodes import ExpressionBuiltinAny
from nuitka.nodes.BuiltinComplexNodes import (
ExpressionBuiltinComplex1,
ExpressionBuiltinComplex2,
)
from nuitka.nodes.BuiltinDecodingNodes import (
ExpressionBuiltinChr,
ExpressionBuiltinOrd,
)
from nuitka.nodes.BuiltinDecoratorNodes import (
ExpressionBuiltinClassmethod,
ExpressionBuiltinStaticmethod,
)
from nuitka.nodes.BuiltinDictNodes import ExpressionBuiltinDict
from nuitka.nodes.BuiltinFormatNodes import (
ExpressionBuiltinAscii,
ExpressionBuiltinBin,
ExpressionBuiltinFormat,
ExpressionBuiltinHex,
ExpressionBuiltinId,
ExpressionBuiltinOct,
)
from nuitka.nodes.BuiltinHashNodes import ExpressionBuiltinHash
from nuitka.nodes.BuiltinIntegerNodes import (
ExpressionBuiltinInt1,
ExpressionBuiltinInt2,
)
from nuitka.nodes.BuiltinIteratorNodes import (
ExpressionBuiltinIter1,
ExpressionBuiltinIter2,
)
from nuitka.nodes.BuiltinLenNodes import ExpressionBuiltinLen
from nuitka.nodes.BuiltinNextNodes import (
ExpressionBuiltinNext1,
ExpressionBuiltinNext2,
)
from nuitka.nodes.BuiltinOpenNodes import ExpressionBuiltinOpen
from nuitka.nodes.BuiltinRangeNodes import (
ExpressionBuiltinRange1,
ExpressionBuiltinRange2,
ExpressionBuiltinRange3,
ExpressionBuiltinXrange1,
ExpressionBuiltinXrange2,
ExpressionBuiltinXrange3,
)
from nuitka.nodes.BuiltinRefNodes import (
ExpressionBuiltinAnonymousRef,
makeExpressionBuiltinTypeRef,
)
from nuitka.nodes.BuiltinSumNodes import (
ExpressionBuiltinSum1,
ExpressionBuiltinSum2,
)
from nuitka.nodes.BuiltinTypeNodes import (
ExpressionBuiltinBool,
ExpressionBuiltinBytearray1,
ExpressionBuiltinBytearray3,
ExpressionBuiltinFloat,
ExpressionBuiltinFrozenset,
ExpressionBuiltinList,
ExpressionBuiltinSet,
ExpressionBuiltinStrP2,
ExpressionBuiltinStrP3,
ExpressionBuiltinTuple,
ExpressionBuiltinUnicodeP2,
)
from nuitka.nodes.BuiltinVarsNodes import ExpressionBuiltinVars
from nuitka.nodes.CallNodes import makeExpressionCall
from nuitka.nodes.ClassNodes import ExpressionBuiltinType3
from nuitka.nodes.ComparisonNodes import ExpressionComparisonIs
from nuitka.nodes.ConditionalNodes import (
ExpressionConditional,
makeStatementConditional,
)
from nuitka.nodes.ConstantRefNodes import makeConstantRefNode
from nuitka.nodes.ContainerMakingNodes import makeExpressionMakeTupleOrConstant
from nuitka.nodes.ExecEvalNodes import (
ExpressionBuiltinCompile,
ExpressionBuiltinEval,
)
from nuitka.nodes.GlobalsLocalsNodes import (
ExpressionBuiltinDir1,
ExpressionBuiltinGlobals,
)
from nuitka.nodes.ImportNodes import ExpressionBuiltinImport
from nuitka.nodes.NodeMakingHelpers import (
makeConstantReplacementNode,
makeExpressionBuiltinLocals,
makeRaiseExceptionReplacementExpression,
makeRaiseExceptionReplacementExpressionFromInstance,
wrapExpressionWithSideEffects,
)
from nuitka.nodes.OperatorNodes import ExpressionOperationBinaryDivmod
from nuitka.nodes.OperatorNodesUnary import (
ExpressionOperationNot,
ExpressionOperationUnaryAbs,
ExpressionOperationUnaryRepr,
)
from nuitka.nodes.OutlineNodes import ExpressionOutlineBody
from nuitka.nodes.ReturnNodes import makeStatementReturn
from nuitka.nodes.SliceNodes import makeExpressionBuiltinSlice
from nuitka.nodes.TypeNodes import (
ExpressionBuiltinIsinstance,
ExpressionBuiltinIssubclass,
ExpressionBuiltinSuper0,
ExpressionBuiltinSuper2,
ExpressionBuiltinType1,
)
from nuitka.nodes.VariableRefNodes import (
ExpressionTempVariableRef,
ExpressionVariableRef,
)
from nuitka.PythonVersions import python_version
from nuitka.specs import BuiltinParameterSpecs
from nuitka.Tracing import optimization_logger
from nuitka.tree.ReformulationExecStatements import wrapEvalGlobalsAndLocals
from nuitka.tree.ReformulationTryFinallyStatements import (
makeTryFinallyStatement,
)
from nuitka.tree.TreeHelpers import (
makeCallNode,
makeStatementsSequence,
makeStatementsSequenceFromStatement,
)
def dir_extractor(node):
locals_scope = node.subnode_called.getLocalsScope()
def buildDirEmptyCase(source_ref):
source = makeExpressionBuiltinLocals(
locals_scope=locals_scope, source_ref=source_ref
)
result = makeCallNode(
ExpressionAttributeLookup(
expression=source, attribute_name="keys", source_ref=source_ref
),
source_ref,
)
# For Python3, keys doesn't really return values, but instead a handle
# only, but we want it to be a list.
if python_version >= 0x300:
result = ExpressionBuiltinList(value=result, source_ref=source_ref)
return result
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
# TODO: Needs locals_scope attached.
builtin_class=ExpressionBuiltinDir1,
builtin_spec=BuiltinParameterSpecs.builtin_dir_spec,
empty_special_class=buildDirEmptyCase,
)
def vars_extractor(node):
locals_scope = node.subnode_called.getLocalsScope()
def selectVarsEmptyClass(source_ref):
return makeExpressionBuiltinLocals(
locals_scope=locals_scope, source_ref=source_ref
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
# TODO: Needs locals_cope attached
builtin_class=ExpressionBuiltinVars,
builtin_spec=BuiltinParameterSpecs.builtin_vars_spec,
empty_special_class=selectVarsEmptyClass,
)
def import_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinImport,
builtin_spec=BuiltinParameterSpecs.builtin_import_spec,
)
def type_extractor(node):
args = node.subnode_args
if args is None:
iter_length = 0
else:
iter_length = args.getIterationLength()
if iter_length == 1:
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinType1,
builtin_spec=BuiltinParameterSpecs.builtin_type1_spec,
)
elif iter_length == 3:
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinType3,
builtin_spec=BuiltinParameterSpecs.builtin_type3_spec,
)
else:
return makeRaiseExceptionReplacementExpressionFromInstance(
expression=node, exception=TypeError("type() takes 1 or 3 arguments")
)
def iter_extractor(node):
def wrapIterCreation(callable_arg, sentinel, source_ref):
if sentinel is None:
return ExpressionBuiltinIter1(value=callable_arg, source_ref=source_ref)
else:
return ExpressionBuiltinIter2(
callable_arg=callable_arg, sentinel=sentinel, source_ref=source_ref
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=wrapIterCreation,
builtin_spec=BuiltinParameterSpecs.builtin_iter_spec,
)
def next_extractor(node):
# Split up next with and without defaults, they are not going to behave
# really very similar.
def selectNextBuiltinClass(iterator, default, source_ref):
if default is None:
return ExpressionBuiltinNext1(value=iterator, source_ref=source_ref)
else:
return ExpressionBuiltinNext2(
iterator=iterator, default=default, source_ref=source_ref
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=selectNextBuiltinClass,
builtin_spec=BuiltinParameterSpecs.builtin_next_spec,
)
def sum_extractor(node):
# Split up sumwith and without start value, one is much easier.
def selectSumBuiltinClass(sequence, start, source_ref):
if start is None:
return ExpressionBuiltinSum1(sequence=sequence, source_ref=source_ref)
else:
return ExpressionBuiltinSum2(
sequence=sequence, start=start, source_ref=source_ref
)
def makeSum0(source_ref):
# pylint: disable=unused-argument
return makeRaiseExceptionReplacementExpressionFromInstance(
expression=node,
exception=TypeError(
"sum expected at least 1 arguments, got 0"
if python_version < 0x380
else "sum() takes at least 1 positional argument (0 given)"
),
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=selectSumBuiltinClass,
builtin_spec=BuiltinParameterSpecs.builtin_sum_spec,
empty_special_class=makeSum0,
)
def dict_extractor(node):
# The "dict" built-in is a bit strange in that it accepts a position
# parameter, or not, but won't have a default value.
def wrapExpressionBuiltinDictCreation(positional_args, dict_star_arg, source_ref):
if len(positional_args) > 1:
result = makeRaiseExceptionReplacementExpressionFromInstance(
expression=node,
exception=TypeError(
"dict expected at most 1 arguments, got %d" % (len(positional_args))
),
)
result = wrapExpressionWithSideEffects(
side_effects=positional_args, old_node=node, new_node=result
)
if dict_star_arg:
result = wrapExpressionWithSideEffects(
side_effects=dict_star_arg, old_node=node, new_node=result
)
return result
return ExpressionBuiltinDict(
pos_arg=positional_args[0] if positional_args else None,
pairs=dict_star_arg,
source_ref=source_ref,
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=wrapExpressionBuiltinDictCreation,
builtin_spec=BuiltinParameterSpecs.builtin_dict_spec,
)
def chr_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinChr,
builtin_spec=BuiltinParameterSpecs.builtin_chr_spec,
)
def ord_extractor(node):
def makeOrd0(source_ref):
# pylint: disable=unused-argument
return makeRaiseExceptionReplacementExpressionFromInstance(
expression=node,
exception=TypeError("ord() takes exactly one argument (0 given)"),
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinOrd,
builtin_spec=BuiltinParameterSpecs.builtin_ord_spec,
empty_special_class=makeOrd0,
)
def bin_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinBin,
builtin_spec=BuiltinParameterSpecs.builtin_bin_spec,
)
def oct_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinOct,
builtin_spec=BuiltinParameterSpecs.builtin_oct_spec,
)
def hex_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinHex,
builtin_spec=BuiltinParameterSpecs.builtin_hex_spec,
)
def id_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinId,
builtin_spec=BuiltinParameterSpecs.builtin_id_spec,
)
def repr_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionOperationUnaryRepr,
builtin_spec=BuiltinParameterSpecs.builtin_repr_spec,
)
if python_version >= 0x300:
def ascii_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinAscii,
builtin_spec=BuiltinParameterSpecs.builtin_repr_spec,
)
def range_extractor(node):
def selectRangeBuiltin(low, high, step, source_ref):
if high is None:
return ExpressionBuiltinRange1(low=low, source_ref=source_ref)
elif step is None:
return ExpressionBuiltinRange2(low=low, high=high, source_ref=source_ref)
else:
return ExpressionBuiltinRange3(
low=low, high=high, step=step, source_ref=source_ref
)
def makeRange0(source_ref):
# pylint: disable=unused-argument
try:
range()
except Exception as e: # We want to broad here, pylint: disable=broad-except
return makeRaiseExceptionReplacementExpressionFromInstance(
expression=node, exception=e
)
else:
raise NuitkaAssumptionError("range without argument is expected to raise")
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=selectRangeBuiltin,
builtin_spec=BuiltinParameterSpecs.builtin_range_spec,
empty_special_class=makeRange0,
)
def xrange_extractor(node):
def selectXrangeBuiltin(low, high, step, source_ref):
if high is None:
return ExpressionBuiltinXrange1(low=low, source_ref=source_ref)
elif step is None:
return ExpressionBuiltinXrange2(low=low, high=high, source_ref=source_ref)
else:
return ExpressionBuiltinXrange3(
low=low, high=high, step=step, source_ref=source_ref
)
def makeXrange0(source_ref):
# pylint: disable=unused-argument
try:
xrange()
except Exception as e: # We want to broad here, pylint: disable=broad-except
return makeRaiseExceptionReplacementExpressionFromInstance(
expression=node, exception=e
)
else:
raise NuitkaAssumptionError("range without argument is expected to raise")
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=selectXrangeBuiltin,
builtin_spec=BuiltinParameterSpecs.builtin_xrange_spec,
empty_special_class=makeXrange0,
)
def len_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinLen,
builtin_spec=BuiltinParameterSpecs.builtin_len_spec,
)
def all_extractor(node):
# pylint: disable=unused-argument
def makeAll0(source_ref):
exception_message = "all() takes exactly one argument (0 given)"
return makeRaiseExceptionReplacementExpressionFromInstance(
expression=node, exception=TypeError(exception_message)
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinAll,
builtin_spec=BuiltinParameterSpecs.builtin_all_spec,
empty_special_class=makeAll0,
)
def abs_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionOperationUnaryAbs,
builtin_spec=BuiltinParameterSpecs.builtin_abs_spec,
)
def any_extractor(node):
# pylint: disable=unused-argument
def makeAny0(source_ref):
exception_message = "any() takes exactly one argument (0 given)"
return makeRaiseExceptionReplacementExpressionFromInstance(
expression=node, exception=TypeError(exception_message)
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinAny,
builtin_spec=BuiltinParameterSpecs.builtin_any_spec,
empty_special_class=makeAny0,
)
def tuple_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinTuple,
builtin_spec=BuiltinParameterSpecs.builtin_tuple_spec,
)
def list_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinList,
builtin_spec=BuiltinParameterSpecs.builtin_list_spec,
)
def set_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinSet,
builtin_spec=BuiltinParameterSpecs.builtin_set_spec,
)
def frozenset_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinFrozenset,
builtin_spec=BuiltinParameterSpecs.builtin_frozenset_spec,
)
def float_extractor(node):
def makeFloat0(source_ref):
# pylint: disable=unused-argument
return makeConstantReplacementNode(
constant=float(), node=node, user_provided=False
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinFloat,
builtin_spec=BuiltinParameterSpecs.builtin_float_spec,
empty_special_class=makeFloat0,
)
def complex_extractor(node):
def makeComplex0(source_ref):
# pylint: disable=unused-argument
return makeConstantReplacementNode(
constant=complex(), node=node, user_provided=False
)
def selectComplexBuiltin(real, imag, source_ref):
if imag is None:
return ExpressionBuiltinComplex1(value=real, source_ref=source_ref)
else:
return ExpressionBuiltinComplex2(
real=real, imag=imag, source_ref=source_ref
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=selectComplexBuiltin,
builtin_spec=BuiltinParameterSpecs.builtin_complex_spec,
empty_special_class=makeComplex0,
)
def str_extractor(node):
builtin_class = ExpressionBuiltinStrP2 if str is bytes else ExpressionBuiltinStrP3
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=builtin_class,
builtin_spec=builtin_class.builtin_spec,
)
if python_version < 0x300:
def unicode_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinUnicodeP2,
builtin_spec=ExpressionBuiltinUnicodeP2.builtin_spec,
)
else:
from nuitka.nodes.BuiltinTypeNodes import (
ExpressionBuiltinBytes1,
ExpressionBuiltinBytes3,
)
def bytes_extractor(node):
def makeBytes0(source_ref):
# pylint: disable=unused-argument
return makeConstantReplacementNode(
constant=bytes(), node=node, user_provided=False
)
def selectBytesBuiltin(string, encoding, errors, source_ref):
if encoding is None and errors is None:
return ExpressionBuiltinBytes1(value=string, source_ref=source_ref)
else:
return ExpressionBuiltinBytes3(
value=string,
encoding=encoding,
errors=errors,
source_ref=source_ref,
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=selectBytesBuiltin,
builtin_spec=BuiltinParameterSpecs.builtin_bytes_p3_spec,
empty_special_class=makeBytes0,
)
def bool_extractor(node):
def makeBool0(source_ref):
# pylint: disable=unused-argument
return makeConstantReplacementNode(
constant=bool(), node=node, user_provided=False
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinBool,
builtin_spec=BuiltinParameterSpecs.builtin_bool_spec,
empty_special_class=makeBool0,
)
def int_extractor(node):
def makeInt0(source_ref):
# pylint: disable=unused-argument
return makeConstantReplacementNode(
constant=int(), node=node, user_provided=False
)
def selectIntBuiltin(value, base, source_ref):
if base is None:
return ExpressionBuiltinInt1(value=value, source_ref=source_ref)
else:
return ExpressionBuiltinInt2(value=value, base=base, source_ref=source_ref)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=selectIntBuiltin,
builtin_spec=BuiltinParameterSpecs.builtin_int_spec,
empty_special_class=makeInt0,
)
if python_version < 0x300:
from nuitka.nodes.BuiltinIntegerNodes import (
ExpressionBuiltinLong1,
ExpressionBuiltinLong2,
)
def long_extractor(node):
def makeLong0(source_ref):
# pylint: disable=unused-argument
return makeConstantReplacementNode(
constant=int(), node=node, user_provided=False
)
def selectIntBuiltin(value, base, source_ref):
if base is None:
return ExpressionBuiltinLong1(value=value, source_ref=source_ref)
else:
return ExpressionBuiltinLong2(
value=value, base=base, source_ref=source_ref
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=selectIntBuiltin,
builtin_spec=BuiltinParameterSpecs.builtin_int_spec,
empty_special_class=makeLong0,
)
def globals_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinGlobals,
builtin_spec=BuiltinParameterSpecs.builtin_globals_spec,
)
def locals_extractor(node):
locals_scope = node.subnode_called.getLocalsScope()
def makeLocalsNode(source_ref):
return makeExpressionBuiltinLocals(
locals_scope=locals_scope, source_ref=source_ref
)
# Note: Locals on the module level is really globals.
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=makeLocalsNode,
builtin_spec=BuiltinParameterSpecs.builtin_locals_spec,
)
if python_version < 0x300:
from nuitka.nodes.ExecEvalNodes import ExpressionBuiltinExecfile
def execfile_extractor(node):
def wrapExpressionBuiltinExecfileCreation(
filename, globals_arg, locals_arg, source_ref
):
outline_body = ExpressionOutlineBody(
provider=node.getParentVariableProvider(),
name="execfile_call",
source_ref=source_ref,
)
globals_ref, locals_ref, tried, final = wrapEvalGlobalsAndLocals(
provider=node.getParentVariableProvider(),
globals_node=globals_arg,
locals_node=locals_arg,
temp_scope=outline_body.getOutlineTempScope(),
source_ref=source_ref,
)
tried = makeStatementsSequence(
statements=(
tried,
makeStatementReturn(
expression=ExpressionBuiltinExecfile(
source_code=makeCallNode(
ExpressionAttributeLookup(
expression=ExpressionBuiltinOpen(
filename=filename,
mode=makeConstantRefNode(
constant="rU", source_ref=source_ref
),
buffering=None,
source_ref=source_ref,
),
attribute_name="read",
source_ref=source_ref,
),
source_ref,
),
globals_arg=globals_ref,
locals_arg=locals_ref,
source_ref=source_ref,
),
source_ref=source_ref,
),
),
allow_none=False,
source_ref=source_ref,
)
outline_body.setChild(
"body",
makeStatementsSequenceFromStatement(
statement=makeTryFinallyStatement(
provider=outline_body,
tried=tried,
final=final,
source_ref=source_ref,
)
),
)
return outline_body
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=wrapExpressionBuiltinExecfileCreation,
builtin_spec=BuiltinParameterSpecs.builtin_execfile_spec,
)
def eval_extractor(node):
def wrapEvalBuiltin(source, globals_arg, locals_arg, source_ref):
provider = node.getParentVariableProvider()
outline_body = ExpressionOutlineBody(
provider=node.getParentVariableProvider(),
name="eval_call",
source_ref=source_ref,
)
globals_ref, locals_ref, tried, final = wrapEvalGlobalsAndLocals(
provider=provider,
globals_node=globals_arg,
locals_node=locals_arg,
temp_scope=outline_body.getOutlineTempScope(),
source_ref=source_ref,
)
# The wrapping should not relocate to the "source_ref".
assert (
globals_arg is None
or globals_ref.getSourceReference() == globals_arg.getSourceReference()
)
assert (
locals_arg is None
or locals_ref.getSourceReference() == locals_arg.getSourceReference()
)
source_variable = outline_body.allocateTempVariable(
temp_scope=None, name="source"
)
final.setChild(
"statements",
final.subnode_statements
+ (
StatementDelVariable(
variable=source_variable, tolerant=True, source_ref=source_ref
),
),
)
strip_choice = makeConstantRefNode(constant=(" \t",), source_ref=source_ref)
if python_version >= 0x300:
strip_choice = ExpressionConditional(
condition=ExpressionComparisonIs(
left=ExpressionBuiltinType1(
value=ExpressionTempVariableRef(
variable=source_variable, source_ref=source_ref
),
source_ref=source_ref,
),
right=makeExpressionBuiltinTypeRef(
builtin_name="bytes", source_ref=source_ref
),
source_ref=source_ref,
),
expression_yes=makeConstantRefNode(
constant=(b" \t",), source_ref=source_ref
),
expression_no=strip_choice,
source_ref=source_ref,
)
# Source needs some special treatment for eval, if it's a string, it
# must be stripped.
string_fixup = StatementAssignmentVariable(
variable=source_variable,
source=makeExpressionCall(
called=ExpressionAttributeLookup(
expression=ExpressionTempVariableRef(
variable=source_variable, source_ref=source_ref
),
attribute_name="strip",
source_ref=source_ref,
),
args=strip_choice, # This is a tuple
kw=None,
source_ref=source_ref,
),
source_ref=source_ref,
)
acceptable_builtin_types = [
ExpressionBuiltinAnonymousRef(builtin_name="code", source_ref=source_ref)
]
if python_version >= 0x270:
acceptable_builtin_types.append(
makeExpressionBuiltinTypeRef(
builtin_name="memoryview", source_ref=source_ref
)
)
statements = (
StatementAssignmentVariable(
variable=source_variable, source=source, source_ref=source_ref
),
makeStatementConditional(
condition=ExpressionOperationNot(
operand=ExpressionBuiltinIsinstance(
instance=ExpressionTempVariableRef(
variable=source_variable, source_ref=source_ref
),
classes=makeExpressionMakeTupleOrConstant(
elements=acceptable_builtin_types,
user_provided=True,
source_ref=source_ref,
),
source_ref=source_ref,
),
source_ref=source_ref,
),
yes_branch=string_fixup,
no_branch=None,
source_ref=source_ref,
),
makeStatementReturn(
expression=ExpressionBuiltinEval(
source_code=ExpressionTempVariableRef(
variable=source_variable, source_ref=source_ref
),
globals_arg=globals_ref,
locals_arg=locals_ref,
source_ref=source_ref,
),
source_ref=source_ref,
),
)
tried = makeStatementsSequence(
statements=(tried,) + statements, allow_none=False, source_ref=source_ref
)
outline_body.setChild(
"body",
makeStatementsSequenceFromStatement(
statement=makeTryFinallyStatement(
provider=outline_body,
tried=tried,
final=final,
source_ref=source_ref,
)
),
)
return outline_body
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=wrapEvalBuiltin,
builtin_spec=BuiltinParameterSpecs.builtin_eval_spec,
)
if python_version >= 0x300:
from nuitka.nodes.ExecEvalNodes import ExpressionBuiltinExec
def exec_extractor(node):
def wrapExpressionBuiltinExecCreation(
source, globals_arg, locals_arg, source_ref
):
provider = node.getParentVariableProvider()
outline_body = ExpressionOutlineBody(
provider=provider, name="exec_call", source_ref=source_ref
)
globals_ref, locals_ref, tried, final = wrapEvalGlobalsAndLocals(
provider=provider,
globals_node=globals_arg,
locals_node=locals_arg,
temp_scope=outline_body.getOutlineTempScope(),
source_ref=source_ref,
)
tried = makeStatementsSequence(
statements=(
tried,
makeStatementReturn(
expression=ExpressionBuiltinExec(
source_code=source,
globals_arg=globals_ref,
locals_arg=locals_ref,
source_ref=source_ref,
),
source_ref=source_ref,
),
),
allow_none=False,
source_ref=source_ref,
)
# Hack: Allow some APIs to work already
tried.parent = outline_body
outline_body.setChild(
"body",
makeStatementsSequenceFromStatement(
statement=makeTryFinallyStatement(
provider=provider,
tried=tried,
final=final,
source_ref=source_ref,
)
),
)
return outline_body
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=wrapExpressionBuiltinExecCreation,
builtin_spec=BuiltinParameterSpecs.builtin_eval_spec,
)
def compile_extractor(node):
def wrapExpressionBuiltinCompileCreation(
source_code, filename, mode, flags, dont_inherit, optimize=None, source_ref=None
):
return ExpressionBuiltinCompile(
source_code=source_code,
filename=filename,
mode=mode,
flags=flags,
dont_inherit=dont_inherit,
optimize=optimize,
source_ref=source_ref,
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=wrapExpressionBuiltinCompileCreation,
builtin_spec=BuiltinParameterSpecs.builtin_compile_spec,
)
def open_extractor(node):
def makeOpen0(source_ref):
# pylint: disable=unused-argument
try:
open()
except Exception as e: # We want to broad here, pylint: disable=broad-except
return makeRaiseExceptionReplacementExpressionFromInstance(
expression=node, exception=e
)
else:
raise NuitkaAssumptionError("open without argument is expected to raise")
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinOpen,
builtin_spec=BuiltinParameterSpecs.builtin_open_spec,
empty_special_class=makeOpen0,
)
def super_extractor(node):
def wrapSuperBuiltin(type_arg, object_arg, source_ref):
if type_arg is None and python_version >= 0x300:
if provider.isCompiledPythonModule():
return makeRaiseExceptionReplacementExpression(
expression=node,
exception_type="RuntimeError",
exception_value="super(): no arguments",
)
class_variable = provider.getVariableForReference(variable_name="__class__")
provider.trace_collection.getVariableCurrentTrace(class_variable).addUsage()
type_arg = ExpressionVariableRef(
# Ought to be already closure taken due to "super" flag in
# tree building.
variable=class_variable,
source_ref=source_ref,
)
# If we already have this as a local variable, then use that
# instead.
type_arg_owner = class_variable.getOwner()
if type_arg_owner is provider or not (
type_arg_owner.isExpressionFunctionBody()
or type_arg_owner.isExpressionClassBody()
):
return makeRaiseExceptionReplacementExpression(
expression=node,
exception_type="SystemError"
if python_version < 0x331
else "RuntimeError",
exception_value="super(): __class__ cell not found",
)
if object_arg is None:
if (
provider.isExpressionGeneratorObjectBody()
or provider.isExpressionCoroutineObjectBody()
or provider.isExpressionAsyncgenObjectBody()
):
parameter_provider = provider.getParentVariableProvider()
else:
parameter_provider = provider
if parameter_provider.getParameters().getArgumentCount() == 0:
return makeRaiseExceptionReplacementExpression(
expression=node,
exception_type="RuntimeError",
exception_value="super(): no arguments",
)
else:
par1_name = parameter_provider.getParameters().getArgumentNames()[0]
object_variable = provider.getVariableForReference(
variable_name=par1_name
)
provider.trace_collection.getVariableCurrentTrace(
object_variable
).addUsage()
object_arg = ExpressionVariableRef(
variable=object_variable, source_ref=source_ref
)
if not object_arg.getVariable().isParameterVariable():
return makeRaiseExceptionReplacementExpression(
expression=node,
exception_type="SystemError"
if python_version < 0x300
else "RuntimeError",
exception_value="super(): __class__ cell not found",
)
return ExpressionBuiltinSuper0(
type_arg=type_arg, object_arg=object_arg, source_ref=source_ref
)
return ExpressionBuiltinSuper2(
type_arg=type_arg, object_arg=object_arg, source_ref=source_ref
)
provider = node.getParentVariableProvider().getEntryPoint()
if not provider.isCompiledPythonModule():
provider.discardFlag("has_super")
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=wrapSuperBuiltin,
builtin_spec=BuiltinParameterSpecs.builtin_super_spec,
)
def hasattr_extractor(node):
# We need to have to builtin arguments, pylint: disable=redefined-builtin
def makeExpressionBuiltinHasattr(object, name, source_ref):
return ExpressionBuiltinHasattr(
expression=object, name=name, source_ref=source_ref
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=makeExpressionBuiltinHasattr,
builtin_spec=BuiltinParameterSpecs.builtin_hasattr_spec,
)
def getattr_extractor(node):
# We need to have to builtin arguments, pylint: disable=redefined-builtin
def makeExpressionBuiltinGetattr(object, name, default, source_ref):
return ExpressionBuiltinGetattr(
expression=object, name=name, default=default, source_ref=source_ref
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=makeExpressionBuiltinGetattr,
builtin_spec=BuiltinParameterSpecs.builtin_getattr_spec,
)
def setattr_extractor(node):
# We need to have to builtin arguments, pylint: disable=redefined-builtin
def makeExpressionBuiltinSetattr(object, name, value, source_ref):
return ExpressionBuiltinSetattr(
expression=object, name=name, value=value, source_ref=source_ref
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=makeExpressionBuiltinSetattr,
builtin_spec=BuiltinParameterSpecs.builtin_setattr_spec,
)
def isinstance_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinIsinstance,
builtin_spec=BuiltinParameterSpecs.builtin_isinstance_spec,
)
def issubclass_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinIssubclass,
builtin_spec=BuiltinParameterSpecs.builtin_isinstance_spec,
)
def bytearray_extractor(node):
def makeBytearray0(source_ref):
return makeConstantRefNode(constant=bytearray(), source_ref=source_ref)
def selectNextBuiltinClass(string, encoding, errors, source_ref):
if encoding is None:
return ExpressionBuiltinBytearray1(value=string, source_ref=source_ref)
else:
return ExpressionBuiltinBytearray3(
string=string, encoding=encoding, errors=errors, source_ref=source_ref
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=selectNextBuiltinClass,
builtin_spec=BuiltinParameterSpecs.builtin_bytearray_spec,
empty_special_class=makeBytearray0,
)
def slice_extractor(node):
def wrapSlice(start, stop, step, source_ref):
if start is not None and stop is None:
# Default rules are strange. If one argument is given, it's the
# second one then.
stop = start
start = None
return makeExpressionBuiltinSlice(
start=start, stop=stop, step=step, source_ref=source_ref
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=wrapSlice,
builtin_spec=BuiltinParameterSpecs.builtin_slice_spec,
)
def hash_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinHash,
builtin_spec=BuiltinParameterSpecs.builtin_hash_spec,
)
def format_extractor(node):
def makeFormat0(source_ref):
# pylint: disable=unused-argument
return makeRaiseExceptionReplacementExpressionFromInstance(
expression=node,
exception=TypeError("format() takes at least 1 argument (0 given)"),
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinFormat,
builtin_spec=BuiltinParameterSpecs.builtin_format_spec,
empty_special_class=makeFormat0,
)
def staticmethod_extractor(node):
def makeStaticmethod0(source_ref):
# pylint: disable=unused-argument
return makeRaiseExceptionReplacementExpressionFromInstance(
expression=node,
exception=TypeError("staticmethod expected 1 arguments, got 0"),
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinStaticmethod,
builtin_spec=BuiltinParameterSpecs.builtin_staticmethod_spec,
empty_special_class=makeStaticmethod0,
)
def classmethod_extractor(node):
def makeStaticmethod0(source_ref):
# pylint: disable=unused-argument
return makeRaiseExceptionReplacementExpressionFromInstance(
expression=node,
exception=TypeError("classmethod expected 1 arguments, got 0"),
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinClassmethod,
builtin_spec=BuiltinParameterSpecs.builtin_classmethod_spec,
empty_special_class=makeStaticmethod0,
)
def divmod_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionOperationBinaryDivmod,
builtin_spec=BuiltinParameterSpecs.builtin_divmod_spec,
)
_dispatch_dict = {
"compile": compile_extractor,
"globals": globals_extractor,
"locals": locals_extractor,
"eval": eval_extractor,
"dir": dir_extractor,
"vars": vars_extractor,
"__import__": import_extractor,
"chr": chr_extractor,
"ord": ord_extractor,
"bin": bin_extractor,
"oct": oct_extractor,
"hex": hex_extractor,
"id": id_extractor,
"type": type_extractor,
"iter": iter_extractor,
"next": next_extractor,
"sum": sum_extractor,
"tuple": tuple_extractor,
"list": list_extractor,
"dict": dict_extractor,
"set": set_extractor,
"frozenset": frozenset_extractor,
"float": float_extractor,
"complex": complex_extractor,
"str": str_extractor,
"bool": bool_extractor,
"int": int_extractor,
"repr": repr_extractor,
"len": len_extractor,
"any": any_extractor,
"abs": abs_extractor,
"all": all_extractor,
"super": super_extractor,
"hasattr": hasattr_extractor,
"getattr": getattr_extractor,
"setattr": setattr_extractor,
"isinstance": isinstance_extractor,
"issubclass": issubclass_extractor,
"bytearray": bytearray_extractor,
"slice": slice_extractor,
"hash": hash_extractor,
"format": format_extractor,
"open": open_extractor,
"staticmethod": staticmethod_extractor,
"classmethod": classmethod_extractor,
"divmod": divmod_extractor,
}
if python_version < 0x300:
# These are not in Python3
_dispatch_dict["long"] = long_extractor
_dispatch_dict["unicode"] = unicode_extractor
_dispatch_dict["execfile"] = execfile_extractor
_dispatch_dict["xrange"] = xrange_extractor
_dispatch_dict["range"] = range_extractor
else:
# This one is not in Python2:
_dispatch_dict["bytes"] = bytes_extractor
_dispatch_dict["ascii"] = ascii_extractor
_dispatch_dict["exec"] = exec_extractor
# The Python3 range is really an xrange, use that.
_dispatch_dict["range"] = xrange_extractor
def check():
from nuitka.Builtins import builtin_names
for builtin_name in _dispatch_dict:
assert builtin_name in builtin_names, builtin_name
check()
_builtin_ignore_list = (
# Not supporting 'print', because it could be replaced, and is not
# worth the effort yet.
"print",
# TODO: This could, and should be supported, as we could e.g. lower
# types easily for it.
"sorted",
# TODO: This would be very worthwhile, as it could easily optimize
# its iteration away.
"zip",
# TODO: This would be most precious due to the type hint it gives
"enumerate",
# TODO: Also worthwhile for known values.
"reversed",
# TODO: Not sure what this really is about.
"memoryview",
)
def _describeNewNode(builtin_name, inspect_node):
"""Describe the change for better understanding."""
# Don't mention side effects, that's not what we care about.
if inspect_node.isExpressionSideEffects():
inspect_node = inspect_node.subnode_expression
if inspect_node.isExpressionBuiltinImport():
tags = "new_import"
message = """\
Replaced dynamic "__import__" call with static built-in call."""
elif inspect_node.isExpressionBuiltin() or inspect_node.isStatementExec():
tags = "new_builtin"
message = "Replaced call to built-in '%s' with built-in call '%s'." % (
builtin_name,
inspect_node.kind,
)
elif inspect_node.isExpressionRaiseException():
tags = "new_raise"
message = """\
Replaced call to built-in '%s' with exception raise.""" % (
builtin_name,
)
elif inspect_node.isExpressionOperationBinary():
tags = "new_expression"
message = """\
Replaced call to built-in '%s' with binary operation '%s'.""" % (
builtin_name,
inspect_node.getOperator(),
)
elif inspect_node.isExpressionOperationUnary():
tags = "new_expression"
message = """\
Replaced call to built-in '%s' with unary operation '%s'.""" % (
builtin_name,
inspect_node.getOperator(),
)
elif inspect_node.isExpressionCall():
tags = "new_expression"
message = """\
Replaced call to built-in '%s' with call.""" % (
builtin_name,
)
elif inspect_node.isExpressionOutlineBody():
tags = "new_expression"
message = (
"""\
Replaced call to built-in '%s' with outlined call."""
% builtin_name
)
elif inspect_node.isExpressionConstantRef():
tags = "new_expression"
message = (
"""\
Replaced call to built-in '%s' with constant value."""
% builtin_name
)
else:
assert False, (builtin_name, "->", inspect_node)
return tags, message
def computeBuiltinCall(builtin_name, call_node):
# There is some dispatching for how to output various types of changes,
# with lots of cases.
if builtin_name in _dispatch_dict:
new_node = _dispatch_dict[builtin_name](call_node)
assert new_node is not call_node, builtin_name
assert new_node is not None, builtin_name
# For traces, we are going to ignore side effects, and output traces
# only based on the basis of it.
tags, message = _describeNewNode(builtin_name, new_node)
return new_node, tags, message
else:
if False and builtin_name not in _builtin_ignore_list:
optimization_logger.warning(
"Not handling built-in %r, consider support." % builtin_name
)
return call_node, None, None
| 33.144174 | 88 | 0.648095 |
from nuitka.__past__ import xrange
from nuitka.Errors import NuitkaAssumptionError
from nuitka.nodes.AssignNodes import (
StatementAssignmentVariable,
StatementDelVariable,
)
from nuitka.nodes.AttributeNodes import (
ExpressionAttributeLookup,
ExpressionBuiltinGetattr,
ExpressionBuiltinHasattr,
ExpressionBuiltinSetattr,
)
from nuitka.nodes.BuiltinAllNodes import ExpressionBuiltinAll
from nuitka.nodes.BuiltinAnyNodes import ExpressionBuiltinAny
from nuitka.nodes.BuiltinComplexNodes import (
ExpressionBuiltinComplex1,
ExpressionBuiltinComplex2,
)
from nuitka.nodes.BuiltinDecodingNodes import (
ExpressionBuiltinChr,
ExpressionBuiltinOrd,
)
from nuitka.nodes.BuiltinDecoratorNodes import (
ExpressionBuiltinClassmethod,
ExpressionBuiltinStaticmethod,
)
from nuitka.nodes.BuiltinDictNodes import ExpressionBuiltinDict
from nuitka.nodes.BuiltinFormatNodes import (
ExpressionBuiltinAscii,
ExpressionBuiltinBin,
ExpressionBuiltinFormat,
ExpressionBuiltinHex,
ExpressionBuiltinId,
ExpressionBuiltinOct,
)
from nuitka.nodes.BuiltinHashNodes import ExpressionBuiltinHash
from nuitka.nodes.BuiltinIntegerNodes import (
ExpressionBuiltinInt1,
ExpressionBuiltinInt2,
)
from nuitka.nodes.BuiltinIteratorNodes import (
ExpressionBuiltinIter1,
ExpressionBuiltinIter2,
)
from nuitka.nodes.BuiltinLenNodes import ExpressionBuiltinLen
from nuitka.nodes.BuiltinNextNodes import (
ExpressionBuiltinNext1,
ExpressionBuiltinNext2,
)
from nuitka.nodes.BuiltinOpenNodes import ExpressionBuiltinOpen
from nuitka.nodes.BuiltinRangeNodes import (
ExpressionBuiltinRange1,
ExpressionBuiltinRange2,
ExpressionBuiltinRange3,
ExpressionBuiltinXrange1,
ExpressionBuiltinXrange2,
ExpressionBuiltinXrange3,
)
from nuitka.nodes.BuiltinRefNodes import (
ExpressionBuiltinAnonymousRef,
makeExpressionBuiltinTypeRef,
)
from nuitka.nodes.BuiltinSumNodes import (
ExpressionBuiltinSum1,
ExpressionBuiltinSum2,
)
from nuitka.nodes.BuiltinTypeNodes import (
ExpressionBuiltinBool,
ExpressionBuiltinBytearray1,
ExpressionBuiltinBytearray3,
ExpressionBuiltinFloat,
ExpressionBuiltinFrozenset,
ExpressionBuiltinList,
ExpressionBuiltinSet,
ExpressionBuiltinStrP2,
ExpressionBuiltinStrP3,
ExpressionBuiltinTuple,
ExpressionBuiltinUnicodeP2,
)
from nuitka.nodes.BuiltinVarsNodes import ExpressionBuiltinVars
from nuitka.nodes.CallNodes import makeExpressionCall
from nuitka.nodes.ClassNodes import ExpressionBuiltinType3
from nuitka.nodes.ComparisonNodes import ExpressionComparisonIs
from nuitka.nodes.ConditionalNodes import (
ExpressionConditional,
makeStatementConditional,
)
from nuitka.nodes.ConstantRefNodes import makeConstantRefNode
from nuitka.nodes.ContainerMakingNodes import makeExpressionMakeTupleOrConstant
from nuitka.nodes.ExecEvalNodes import (
ExpressionBuiltinCompile,
ExpressionBuiltinEval,
)
from nuitka.nodes.GlobalsLocalsNodes import (
ExpressionBuiltinDir1,
ExpressionBuiltinGlobals,
)
from nuitka.nodes.ImportNodes import ExpressionBuiltinImport
from nuitka.nodes.NodeMakingHelpers import (
makeConstantReplacementNode,
makeExpressionBuiltinLocals,
makeRaiseExceptionReplacementExpression,
makeRaiseExceptionReplacementExpressionFromInstance,
wrapExpressionWithSideEffects,
)
from nuitka.nodes.OperatorNodes import ExpressionOperationBinaryDivmod
from nuitka.nodes.OperatorNodesUnary import (
ExpressionOperationNot,
ExpressionOperationUnaryAbs,
ExpressionOperationUnaryRepr,
)
from nuitka.nodes.OutlineNodes import ExpressionOutlineBody
from nuitka.nodes.ReturnNodes import makeStatementReturn
from nuitka.nodes.SliceNodes import makeExpressionBuiltinSlice
from nuitka.nodes.TypeNodes import (
ExpressionBuiltinIsinstance,
ExpressionBuiltinIssubclass,
ExpressionBuiltinSuper0,
ExpressionBuiltinSuper2,
ExpressionBuiltinType1,
)
from nuitka.nodes.VariableRefNodes import (
ExpressionTempVariableRef,
ExpressionVariableRef,
)
from nuitka.PythonVersions import python_version
from nuitka.specs import BuiltinParameterSpecs
from nuitka.Tracing import optimization_logger
from nuitka.tree.ReformulationExecStatements import wrapEvalGlobalsAndLocals
from nuitka.tree.ReformulationTryFinallyStatements import (
makeTryFinallyStatement,
)
from nuitka.tree.TreeHelpers import (
makeCallNode,
makeStatementsSequence,
makeStatementsSequenceFromStatement,
)
def dir_extractor(node):
locals_scope = node.subnode_called.getLocalsScope()
def buildDirEmptyCase(source_ref):
source = makeExpressionBuiltinLocals(
locals_scope=locals_scope, source_ref=source_ref
)
result = makeCallNode(
ExpressionAttributeLookup(
expression=source, attribute_name="keys", source_ref=source_ref
),
source_ref,
)
# only, but we want it to be a list.
if python_version >= 0x300:
result = ExpressionBuiltinList(value=result, source_ref=source_ref)
return result
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
# TODO: Needs locals_scope attached.
builtin_class=ExpressionBuiltinDir1,
builtin_spec=BuiltinParameterSpecs.builtin_dir_spec,
empty_special_class=buildDirEmptyCase,
)
def vars_extractor(node):
locals_scope = node.subnode_called.getLocalsScope()
def selectVarsEmptyClass(source_ref):
return makeExpressionBuiltinLocals(
locals_scope=locals_scope, source_ref=source_ref
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
# TODO: Needs locals_cope attached
builtin_class=ExpressionBuiltinVars,
builtin_spec=BuiltinParameterSpecs.builtin_vars_spec,
empty_special_class=selectVarsEmptyClass,
)
def import_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinImport,
builtin_spec=BuiltinParameterSpecs.builtin_import_spec,
)
def type_extractor(node):
args = node.subnode_args
if args is None:
iter_length = 0
else:
iter_length = args.getIterationLength()
if iter_length == 1:
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinType1,
builtin_spec=BuiltinParameterSpecs.builtin_type1_spec,
)
elif iter_length == 3:
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinType3,
builtin_spec=BuiltinParameterSpecs.builtin_type3_spec,
)
else:
return makeRaiseExceptionReplacementExpressionFromInstance(
expression=node, exception=TypeError("type() takes 1 or 3 arguments")
)
def iter_extractor(node):
def wrapIterCreation(callable_arg, sentinel, source_ref):
if sentinel is None:
return ExpressionBuiltinIter1(value=callable_arg, source_ref=source_ref)
else:
return ExpressionBuiltinIter2(
callable_arg=callable_arg, sentinel=sentinel, source_ref=source_ref
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=wrapIterCreation,
builtin_spec=BuiltinParameterSpecs.builtin_iter_spec,
)
def next_extractor(node):
# Split up next with and without defaults, they are not going to behave
# really very similar.
def selectNextBuiltinClass(iterator, default, source_ref):
if default is None:
return ExpressionBuiltinNext1(value=iterator, source_ref=source_ref)
else:
return ExpressionBuiltinNext2(
iterator=iterator, default=default, source_ref=source_ref
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=selectNextBuiltinClass,
builtin_spec=BuiltinParameterSpecs.builtin_next_spec,
)
def sum_extractor(node):
# Split up sumwith and without start value, one is much easier.
def selectSumBuiltinClass(sequence, start, source_ref):
if start is None:
return ExpressionBuiltinSum1(sequence=sequence, source_ref=source_ref)
else:
return ExpressionBuiltinSum2(
sequence=sequence, start=start, source_ref=source_ref
)
def makeSum0(source_ref):
# pylint: disable=unused-argument
return makeRaiseExceptionReplacementExpressionFromInstance(
expression=node,
exception=TypeError(
"sum expected at least 1 arguments, got 0"
if python_version < 0x380
else "sum() takes at least 1 positional argument (0 given)"
),
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=selectSumBuiltinClass,
builtin_spec=BuiltinParameterSpecs.builtin_sum_spec,
empty_special_class=makeSum0,
)
def dict_extractor(node):
# The "dict" built-in is a bit strange in that it accepts a position
# parameter, or not, but won't have a default value.
def wrapExpressionBuiltinDictCreation(positional_args, dict_star_arg, source_ref):
if len(positional_args) > 1:
result = makeRaiseExceptionReplacementExpressionFromInstance(
expression=node,
exception=TypeError(
"dict expected at most 1 arguments, got %d" % (len(positional_args))
),
)
result = wrapExpressionWithSideEffects(
side_effects=positional_args, old_node=node, new_node=result
)
if dict_star_arg:
result = wrapExpressionWithSideEffects(
side_effects=dict_star_arg, old_node=node, new_node=result
)
return result
return ExpressionBuiltinDict(
pos_arg=positional_args[0] if positional_args else None,
pairs=dict_star_arg,
source_ref=source_ref,
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=wrapExpressionBuiltinDictCreation,
builtin_spec=BuiltinParameterSpecs.builtin_dict_spec,
)
def chr_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinChr,
builtin_spec=BuiltinParameterSpecs.builtin_chr_spec,
)
def ord_extractor(node):
def makeOrd0(source_ref):
return makeRaiseExceptionReplacementExpressionFromInstance(
expression=node,
exception=TypeError("ord() takes exactly one argument (0 given)"),
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinOrd,
builtin_spec=BuiltinParameterSpecs.builtin_ord_spec,
empty_special_class=makeOrd0,
)
def bin_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinBin,
builtin_spec=BuiltinParameterSpecs.builtin_bin_spec,
)
def oct_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinOct,
builtin_spec=BuiltinParameterSpecs.builtin_oct_spec,
)
def hex_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinHex,
builtin_spec=BuiltinParameterSpecs.builtin_hex_spec,
)
def id_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinId,
builtin_spec=BuiltinParameterSpecs.builtin_id_spec,
)
def repr_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionOperationUnaryRepr,
builtin_spec=BuiltinParameterSpecs.builtin_repr_spec,
)
if python_version >= 0x300:
def ascii_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinAscii,
builtin_spec=BuiltinParameterSpecs.builtin_repr_spec,
)
def range_extractor(node):
def selectRangeBuiltin(low, high, step, source_ref):
if high is None:
return ExpressionBuiltinRange1(low=low, source_ref=source_ref)
elif step is None:
return ExpressionBuiltinRange2(low=low, high=high, source_ref=source_ref)
else:
return ExpressionBuiltinRange3(
low=low, high=high, step=step, source_ref=source_ref
)
def makeRange0(source_ref):
try:
range()
except Exception as e:
return makeRaiseExceptionReplacementExpressionFromInstance(
expression=node, exception=e
)
else:
raise NuitkaAssumptionError("range without argument is expected to raise")
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=selectRangeBuiltin,
builtin_spec=BuiltinParameterSpecs.builtin_range_spec,
empty_special_class=makeRange0,
)
def xrange_extractor(node):
def selectXrangeBuiltin(low, high, step, source_ref):
if high is None:
return ExpressionBuiltinXrange1(low=low, source_ref=source_ref)
elif step is None:
return ExpressionBuiltinXrange2(low=low, high=high, source_ref=source_ref)
else:
return ExpressionBuiltinXrange3(
low=low, high=high, step=step, source_ref=source_ref
)
def makeXrange0(source_ref):
try:
xrange()
except Exception as e:
return makeRaiseExceptionReplacementExpressionFromInstance(
expression=node, exception=e
)
else:
raise NuitkaAssumptionError("range without argument is expected to raise")
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=selectXrangeBuiltin,
builtin_spec=BuiltinParameterSpecs.builtin_xrange_spec,
empty_special_class=makeXrange0,
)
def len_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinLen,
builtin_spec=BuiltinParameterSpecs.builtin_len_spec,
)
def all_extractor(node):
def makeAll0(source_ref):
exception_message = "all() takes exactly one argument (0 given)"
return makeRaiseExceptionReplacementExpressionFromInstance(
expression=node, exception=TypeError(exception_message)
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinAll,
builtin_spec=BuiltinParameterSpecs.builtin_all_spec,
empty_special_class=makeAll0,
)
def abs_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionOperationUnaryAbs,
builtin_spec=BuiltinParameterSpecs.builtin_abs_spec,
)
def any_extractor(node):
def makeAny0(source_ref):
exception_message = "any() takes exactly one argument (0 given)"
return makeRaiseExceptionReplacementExpressionFromInstance(
expression=node, exception=TypeError(exception_message)
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinAny,
builtin_spec=BuiltinParameterSpecs.builtin_any_spec,
empty_special_class=makeAny0,
)
def tuple_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinTuple,
builtin_spec=BuiltinParameterSpecs.builtin_tuple_spec,
)
def list_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinList,
builtin_spec=BuiltinParameterSpecs.builtin_list_spec,
)
def set_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinSet,
builtin_spec=BuiltinParameterSpecs.builtin_set_spec,
)
def frozenset_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinFrozenset,
builtin_spec=BuiltinParameterSpecs.builtin_frozenset_spec,
)
def float_extractor(node):
def makeFloat0(source_ref):
return makeConstantReplacementNode(
constant=float(), node=node, user_provided=False
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinFloat,
builtin_spec=BuiltinParameterSpecs.builtin_float_spec,
empty_special_class=makeFloat0,
)
def complex_extractor(node):
def makeComplex0(source_ref):
return makeConstantReplacementNode(
constant=complex(), node=node, user_provided=False
)
def selectComplexBuiltin(real, imag, source_ref):
if imag is None:
return ExpressionBuiltinComplex1(value=real, source_ref=source_ref)
else:
return ExpressionBuiltinComplex2(
real=real, imag=imag, source_ref=source_ref
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=selectComplexBuiltin,
builtin_spec=BuiltinParameterSpecs.builtin_complex_spec,
empty_special_class=makeComplex0,
)
def str_extractor(node):
builtin_class = ExpressionBuiltinStrP2 if str is bytes else ExpressionBuiltinStrP3
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=builtin_class,
builtin_spec=builtin_class.builtin_spec,
)
if python_version < 0x300:
def unicode_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinUnicodeP2,
builtin_spec=ExpressionBuiltinUnicodeP2.builtin_spec,
)
else:
from nuitka.nodes.BuiltinTypeNodes import (
ExpressionBuiltinBytes1,
ExpressionBuiltinBytes3,
)
def bytes_extractor(node):
def makeBytes0(source_ref):
return makeConstantReplacementNode(
constant=bytes(), node=node, user_provided=False
)
def selectBytesBuiltin(string, encoding, errors, source_ref):
if encoding is None and errors is None:
return ExpressionBuiltinBytes1(value=string, source_ref=source_ref)
else:
return ExpressionBuiltinBytes3(
value=string,
encoding=encoding,
errors=errors,
source_ref=source_ref,
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=selectBytesBuiltin,
builtin_spec=BuiltinParameterSpecs.builtin_bytes_p3_spec,
empty_special_class=makeBytes0,
)
def bool_extractor(node):
def makeBool0(source_ref):
return makeConstantReplacementNode(
constant=bool(), node=node, user_provided=False
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinBool,
builtin_spec=BuiltinParameterSpecs.builtin_bool_spec,
empty_special_class=makeBool0,
)
def int_extractor(node):
def makeInt0(source_ref):
return makeConstantReplacementNode(
constant=int(), node=node, user_provided=False
)
def selectIntBuiltin(value, base, source_ref):
if base is None:
return ExpressionBuiltinInt1(value=value, source_ref=source_ref)
else:
return ExpressionBuiltinInt2(value=value, base=base, source_ref=source_ref)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=selectIntBuiltin,
builtin_spec=BuiltinParameterSpecs.builtin_int_spec,
empty_special_class=makeInt0,
)
if python_version < 0x300:
from nuitka.nodes.BuiltinIntegerNodes import (
ExpressionBuiltinLong1,
ExpressionBuiltinLong2,
)
def long_extractor(node):
def makeLong0(source_ref):
return makeConstantReplacementNode(
constant=int(), node=node, user_provided=False
)
def selectIntBuiltin(value, base, source_ref):
if base is None:
return ExpressionBuiltinLong1(value=value, source_ref=source_ref)
else:
return ExpressionBuiltinLong2(
value=value, base=base, source_ref=source_ref
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=selectIntBuiltin,
builtin_spec=BuiltinParameterSpecs.builtin_int_spec,
empty_special_class=makeLong0,
)
def globals_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinGlobals,
builtin_spec=BuiltinParameterSpecs.builtin_globals_spec,
)
def locals_extractor(node):
locals_scope = node.subnode_called.getLocalsScope()
def makeLocalsNode(source_ref):
return makeExpressionBuiltinLocals(
locals_scope=locals_scope, source_ref=source_ref
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=makeLocalsNode,
builtin_spec=BuiltinParameterSpecs.builtin_locals_spec,
)
if python_version < 0x300:
from nuitka.nodes.ExecEvalNodes import ExpressionBuiltinExecfile
def execfile_extractor(node):
def wrapExpressionBuiltinExecfileCreation(
filename, globals_arg, locals_arg, source_ref
):
outline_body = ExpressionOutlineBody(
provider=node.getParentVariableProvider(),
name="execfile_call",
source_ref=source_ref,
)
globals_ref, locals_ref, tried, final = wrapEvalGlobalsAndLocals(
provider=node.getParentVariableProvider(),
globals_node=globals_arg,
locals_node=locals_arg,
temp_scope=outline_body.getOutlineTempScope(),
source_ref=source_ref,
)
tried = makeStatementsSequence(
statements=(
tried,
makeStatementReturn(
expression=ExpressionBuiltinExecfile(
source_code=makeCallNode(
ExpressionAttributeLookup(
expression=ExpressionBuiltinOpen(
filename=filename,
mode=makeConstantRefNode(
constant="rU", source_ref=source_ref
),
buffering=None,
source_ref=source_ref,
),
attribute_name="read",
source_ref=source_ref,
),
source_ref,
),
globals_arg=globals_ref,
locals_arg=locals_ref,
source_ref=source_ref,
),
source_ref=source_ref,
),
),
allow_none=False,
source_ref=source_ref,
)
outline_body.setChild(
"body",
makeStatementsSequenceFromStatement(
statement=makeTryFinallyStatement(
provider=outline_body,
tried=tried,
final=final,
source_ref=source_ref,
)
),
)
return outline_body
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=wrapExpressionBuiltinExecfileCreation,
builtin_spec=BuiltinParameterSpecs.builtin_execfile_spec,
)
def eval_extractor(node):
def wrapEvalBuiltin(source, globals_arg, locals_arg, source_ref):
provider = node.getParentVariableProvider()
outline_body = ExpressionOutlineBody(
provider=node.getParentVariableProvider(),
name="eval_call",
source_ref=source_ref,
)
globals_ref, locals_ref, tried, final = wrapEvalGlobalsAndLocals(
provider=provider,
globals_node=globals_arg,
locals_node=locals_arg,
temp_scope=outline_body.getOutlineTempScope(),
source_ref=source_ref,
)
assert (
globals_arg is None
or globals_ref.getSourceReference() == globals_arg.getSourceReference()
)
assert (
locals_arg is None
or locals_ref.getSourceReference() == locals_arg.getSourceReference()
)
source_variable = outline_body.allocateTempVariable(
temp_scope=None, name="source"
)
final.setChild(
"statements",
final.subnode_statements
+ (
StatementDelVariable(
variable=source_variable, tolerant=True, source_ref=source_ref
),
),
)
strip_choice = makeConstantRefNode(constant=(" \t",), source_ref=source_ref)
if python_version >= 0x300:
strip_choice = ExpressionConditional(
condition=ExpressionComparisonIs(
left=ExpressionBuiltinType1(
value=ExpressionTempVariableRef(
variable=source_variable, source_ref=source_ref
),
source_ref=source_ref,
),
right=makeExpressionBuiltinTypeRef(
builtin_name="bytes", source_ref=source_ref
),
source_ref=source_ref,
),
expression_yes=makeConstantRefNode(
constant=(b" \t",), source_ref=source_ref
),
expression_no=strip_choice,
source_ref=source_ref,
)
# must be stripped.
string_fixup = StatementAssignmentVariable(
variable=source_variable,
source=makeExpressionCall(
called=ExpressionAttributeLookup(
expression=ExpressionTempVariableRef(
variable=source_variable, source_ref=source_ref
),
attribute_name="strip",
source_ref=source_ref,
),
args=strip_choice, # This is a tuple
kw=None,
source_ref=source_ref,
),
source_ref=source_ref,
)
acceptable_builtin_types = [
ExpressionBuiltinAnonymousRef(builtin_name="code", source_ref=source_ref)
]
if python_version >= 0x270:
acceptable_builtin_types.append(
makeExpressionBuiltinTypeRef(
builtin_name="memoryview", source_ref=source_ref
)
)
statements = (
StatementAssignmentVariable(
variable=source_variable, source=source, source_ref=source_ref
),
makeStatementConditional(
condition=ExpressionOperationNot(
operand=ExpressionBuiltinIsinstance(
instance=ExpressionTempVariableRef(
variable=source_variable, source_ref=source_ref
),
classes=makeExpressionMakeTupleOrConstant(
elements=acceptable_builtin_types,
user_provided=True,
source_ref=source_ref,
),
source_ref=source_ref,
),
source_ref=source_ref,
),
yes_branch=string_fixup,
no_branch=None,
source_ref=source_ref,
),
makeStatementReturn(
expression=ExpressionBuiltinEval(
source_code=ExpressionTempVariableRef(
variable=source_variable, source_ref=source_ref
),
globals_arg=globals_ref,
locals_arg=locals_ref,
source_ref=source_ref,
),
source_ref=source_ref,
),
)
tried = makeStatementsSequence(
statements=(tried,) + statements, allow_none=False, source_ref=source_ref
)
outline_body.setChild(
"body",
makeStatementsSequenceFromStatement(
statement=makeTryFinallyStatement(
provider=outline_body,
tried=tried,
final=final,
source_ref=source_ref,
)
),
)
return outline_body
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=wrapEvalBuiltin,
builtin_spec=BuiltinParameterSpecs.builtin_eval_spec,
)
if python_version >= 0x300:
from nuitka.nodes.ExecEvalNodes import ExpressionBuiltinExec
def exec_extractor(node):
def wrapExpressionBuiltinExecCreation(
source, globals_arg, locals_arg, source_ref
):
provider = node.getParentVariableProvider()
outline_body = ExpressionOutlineBody(
provider=provider, name="exec_call", source_ref=source_ref
)
globals_ref, locals_ref, tried, final = wrapEvalGlobalsAndLocals(
provider=provider,
globals_node=globals_arg,
locals_node=locals_arg,
temp_scope=outline_body.getOutlineTempScope(),
source_ref=source_ref,
)
tried = makeStatementsSequence(
statements=(
tried,
makeStatementReturn(
expression=ExpressionBuiltinExec(
source_code=source,
globals_arg=globals_ref,
locals_arg=locals_ref,
source_ref=source_ref,
),
source_ref=source_ref,
),
),
allow_none=False,
source_ref=source_ref,
)
# Hack: Allow some APIs to work already
tried.parent = outline_body
outline_body.setChild(
"body",
makeStatementsSequenceFromStatement(
statement=makeTryFinallyStatement(
provider=provider,
tried=tried,
final=final,
source_ref=source_ref,
)
),
)
return outline_body
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=wrapExpressionBuiltinExecCreation,
builtin_spec=BuiltinParameterSpecs.builtin_eval_spec,
)
def compile_extractor(node):
def wrapExpressionBuiltinCompileCreation(
source_code, filename, mode, flags, dont_inherit, optimize=None, source_ref=None
):
return ExpressionBuiltinCompile(
source_code=source_code,
filename=filename,
mode=mode,
flags=flags,
dont_inherit=dont_inherit,
optimize=optimize,
source_ref=source_ref,
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=wrapExpressionBuiltinCompileCreation,
builtin_spec=BuiltinParameterSpecs.builtin_compile_spec,
)
def open_extractor(node):
def makeOpen0(source_ref):
# pylint: disable=unused-argument
try:
open()
except Exception as e: # We want to broad here, pylint: disable=broad-except
return makeRaiseExceptionReplacementExpressionFromInstance(
expression=node, exception=e
)
else:
raise NuitkaAssumptionError("open without argument is expected to raise")
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinOpen,
builtin_spec=BuiltinParameterSpecs.builtin_open_spec,
empty_special_class=makeOpen0,
)
def super_extractor(node):
def wrapSuperBuiltin(type_arg, object_arg, source_ref):
if type_arg is None and python_version >= 0x300:
if provider.isCompiledPythonModule():
return makeRaiseExceptionReplacementExpression(
expression=node,
exception_type="RuntimeError",
exception_value="super(): no arguments",
)
class_variable = provider.getVariableForReference(variable_name="__class__")
provider.trace_collection.getVariableCurrentTrace(class_variable).addUsage()
type_arg = ExpressionVariableRef(
# Ought to be already closure taken due to "super" flag in
# tree building.
variable=class_variable,
source_ref=source_ref,
)
# If we already have this as a local variable, then use that
# instead.
type_arg_owner = class_variable.getOwner()
if type_arg_owner is provider or not (
type_arg_owner.isExpressionFunctionBody()
or type_arg_owner.isExpressionClassBody()
):
return makeRaiseExceptionReplacementExpression(
expression=node,
exception_type="SystemError"
if python_version < 0x331
else "RuntimeError",
exception_value="super(): __class__ cell not found",
)
if object_arg is None:
if (
provider.isExpressionGeneratorObjectBody()
or provider.isExpressionCoroutineObjectBody()
or provider.isExpressionAsyncgenObjectBody()
):
parameter_provider = provider.getParentVariableProvider()
else:
parameter_provider = provider
if parameter_provider.getParameters().getArgumentCount() == 0:
return makeRaiseExceptionReplacementExpression(
expression=node,
exception_type="RuntimeError",
exception_value="super(): no arguments",
)
else:
par1_name = parameter_provider.getParameters().getArgumentNames()[0]
object_variable = provider.getVariableForReference(
variable_name=par1_name
)
provider.trace_collection.getVariableCurrentTrace(
object_variable
).addUsage()
object_arg = ExpressionVariableRef(
variable=object_variable, source_ref=source_ref
)
if not object_arg.getVariable().isParameterVariable():
return makeRaiseExceptionReplacementExpression(
expression=node,
exception_type="SystemError"
if python_version < 0x300
else "RuntimeError",
exception_value="super(): __class__ cell not found",
)
return ExpressionBuiltinSuper0(
type_arg=type_arg, object_arg=object_arg, source_ref=source_ref
)
return ExpressionBuiltinSuper2(
type_arg=type_arg, object_arg=object_arg, source_ref=source_ref
)
provider = node.getParentVariableProvider().getEntryPoint()
if not provider.isCompiledPythonModule():
provider.discardFlag("has_super")
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=wrapSuperBuiltin,
builtin_spec=BuiltinParameterSpecs.builtin_super_spec,
)
def hasattr_extractor(node):
# We need to have to builtin arguments, pylint: disable=redefined-builtin
def makeExpressionBuiltinHasattr(object, name, source_ref):
return ExpressionBuiltinHasattr(
expression=object, name=name, source_ref=source_ref
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=makeExpressionBuiltinHasattr,
builtin_spec=BuiltinParameterSpecs.builtin_hasattr_spec,
)
def getattr_extractor(node):
# We need to have to builtin arguments, pylint: disable=redefined-builtin
def makeExpressionBuiltinGetattr(object, name, default, source_ref):
return ExpressionBuiltinGetattr(
expression=object, name=name, default=default, source_ref=source_ref
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=makeExpressionBuiltinGetattr,
builtin_spec=BuiltinParameterSpecs.builtin_getattr_spec,
)
def setattr_extractor(node):
# We need to have to builtin arguments, pylint: disable=redefined-builtin
def makeExpressionBuiltinSetattr(object, name, value, source_ref):
return ExpressionBuiltinSetattr(
expression=object, name=name, value=value, source_ref=source_ref
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=makeExpressionBuiltinSetattr,
builtin_spec=BuiltinParameterSpecs.builtin_setattr_spec,
)
def isinstance_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinIsinstance,
builtin_spec=BuiltinParameterSpecs.builtin_isinstance_spec,
)
def issubclass_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinIssubclass,
builtin_spec=BuiltinParameterSpecs.builtin_isinstance_spec,
)
def bytearray_extractor(node):
def makeBytearray0(source_ref):
return makeConstantRefNode(constant=bytearray(), source_ref=source_ref)
def selectNextBuiltinClass(string, encoding, errors, source_ref):
if encoding is None:
return ExpressionBuiltinBytearray1(value=string, source_ref=source_ref)
else:
return ExpressionBuiltinBytearray3(
string=string, encoding=encoding, errors=errors, source_ref=source_ref
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=selectNextBuiltinClass,
builtin_spec=BuiltinParameterSpecs.builtin_bytearray_spec,
empty_special_class=makeBytearray0,
)
def slice_extractor(node):
def wrapSlice(start, stop, step, source_ref):
if start is not None and stop is None:
# Default rules are strange. If one argument is given, it's the
stop = start
start = None
return makeExpressionBuiltinSlice(
start=start, stop=stop, step=step, source_ref=source_ref
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=wrapSlice,
builtin_spec=BuiltinParameterSpecs.builtin_slice_spec,
)
def hash_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinHash,
builtin_spec=BuiltinParameterSpecs.builtin_hash_spec,
)
def format_extractor(node):
def makeFormat0(source_ref):
return makeRaiseExceptionReplacementExpressionFromInstance(
expression=node,
exception=TypeError("format() takes at least 1 argument (0 given)"),
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinFormat,
builtin_spec=BuiltinParameterSpecs.builtin_format_spec,
empty_special_class=makeFormat0,
)
def staticmethod_extractor(node):
def makeStaticmethod0(source_ref):
return makeRaiseExceptionReplacementExpressionFromInstance(
expression=node,
exception=TypeError("staticmethod expected 1 arguments, got 0"),
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinStaticmethod,
builtin_spec=BuiltinParameterSpecs.builtin_staticmethod_spec,
empty_special_class=makeStaticmethod0,
)
def classmethod_extractor(node):
def makeStaticmethod0(source_ref):
return makeRaiseExceptionReplacementExpressionFromInstance(
expression=node,
exception=TypeError("classmethod expected 1 arguments, got 0"),
)
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionBuiltinClassmethod,
builtin_spec=BuiltinParameterSpecs.builtin_classmethod_spec,
empty_special_class=makeStaticmethod0,
)
def divmod_extractor(node):
return BuiltinParameterSpecs.extractBuiltinArgs(
node=node,
builtin_class=ExpressionOperationBinaryDivmod,
builtin_spec=BuiltinParameterSpecs.builtin_divmod_spec,
)
_dispatch_dict = {
"compile": compile_extractor,
"globals": globals_extractor,
"locals": locals_extractor,
"eval": eval_extractor,
"dir": dir_extractor,
"vars": vars_extractor,
"__import__": import_extractor,
"chr": chr_extractor,
"ord": ord_extractor,
"bin": bin_extractor,
"oct": oct_extractor,
"hex": hex_extractor,
"id": id_extractor,
"type": type_extractor,
"iter": iter_extractor,
"next": next_extractor,
"sum": sum_extractor,
"tuple": tuple_extractor,
"list": list_extractor,
"dict": dict_extractor,
"set": set_extractor,
"frozenset": frozenset_extractor,
"float": float_extractor,
"complex": complex_extractor,
"str": str_extractor,
"bool": bool_extractor,
"int": int_extractor,
"repr": repr_extractor,
"len": len_extractor,
"any": any_extractor,
"abs": abs_extractor,
"all": all_extractor,
"super": super_extractor,
"hasattr": hasattr_extractor,
"getattr": getattr_extractor,
"setattr": setattr_extractor,
"isinstance": isinstance_extractor,
"issubclass": issubclass_extractor,
"bytearray": bytearray_extractor,
"slice": slice_extractor,
"hash": hash_extractor,
"format": format_extractor,
"open": open_extractor,
"staticmethod": staticmethod_extractor,
"classmethod": classmethod_extractor,
"divmod": divmod_extractor,
}
if python_version < 0x300:
_dispatch_dict["long"] = long_extractor
_dispatch_dict["unicode"] = unicode_extractor
_dispatch_dict["execfile"] = execfile_extractor
_dispatch_dict["xrange"] = xrange_extractor
_dispatch_dict["range"] = range_extractor
else:
_dispatch_dict["bytes"] = bytes_extractor
_dispatch_dict["ascii"] = ascii_extractor
_dispatch_dict["exec"] = exec_extractor
_dispatch_dict["range"] = xrange_extractor
def check():
from nuitka.Builtins import builtin_names
for builtin_name in _dispatch_dict:
assert builtin_name in builtin_names, builtin_name
check()
_builtin_ignore_list = (
"print",
"sorted",
"zip",
"enumerate",
"reversed",
"memoryview",
)
def _describeNewNode(builtin_name, inspect_node):
if inspect_node.isExpressionSideEffects():
inspect_node = inspect_node.subnode_expression
if inspect_node.isExpressionBuiltinImport():
tags = "new_import"
message = """\
Replaced dynamic "__import__" call with static built-in call."""
elif inspect_node.isExpressionBuiltin() or inspect_node.isStatementExec():
tags = "new_builtin"
message = "Replaced call to built-in '%s' with built-in call '%s'." % (
builtin_name,
inspect_node.kind,
)
elif inspect_node.isExpressionRaiseException():
tags = "new_raise"
message = """\
Replaced call to built-in '%s' with exception raise.""" % (
builtin_name,
)
elif inspect_node.isExpressionOperationBinary():
tags = "new_expression"
message = """\
Replaced call to built-in '%s' with binary operation '%s'.""" % (
builtin_name,
inspect_node.getOperator(),
)
elif inspect_node.isExpressionOperationUnary():
tags = "new_expression"
message = """\
Replaced call to built-in '%s' with unary operation '%s'.""" % (
builtin_name,
inspect_node.getOperator(),
)
elif inspect_node.isExpressionCall():
tags = "new_expression"
message = """\
Replaced call to built-in '%s' with call.""" % (
builtin_name,
)
elif inspect_node.isExpressionOutlineBody():
tags = "new_expression"
message = (
"""\
Replaced call to built-in '%s' with outlined call."""
% builtin_name
)
elif inspect_node.isExpressionConstantRef():
tags = "new_expression"
message = (
"""\
Replaced call to built-in '%s' with constant value."""
% builtin_name
)
else:
assert False, (builtin_name, "->", inspect_node)
return tags, message
def computeBuiltinCall(builtin_name, call_node):
if builtin_name in _dispatch_dict:
new_node = _dispatch_dict[builtin_name](call_node)
assert new_node is not call_node, builtin_name
assert new_node is not None, builtin_name
tags, message = _describeNewNode(builtin_name, new_node)
return new_node, tags, message
else:
if False and builtin_name not in _builtin_ignore_list:
optimization_logger.warning(
"Not handling built-in %r, consider support." % builtin_name
)
return call_node, None, None
| true | true |
f713bd605f56403edd2af58f3a6b67b73eebfb33 | 746 | py | Python | release/stubs.min/System/__init___parts/IFormatProvider.py | htlcnn/ironpython-stubs | 780d829e2104b2789d5f4d6f32b0ec9f2930ca03 | [
"MIT"
] | 182 | 2017-06-27T02:26:15.000Z | 2022-03-30T18:53:43.000Z | release/stubs.min/System/__init___parts/IFormatProvider.py | htlcnn/ironpython-stubs | 780d829e2104b2789d5f4d6f32b0ec9f2930ca03 | [
"MIT"
] | 28 | 2017-06-27T13:38:23.000Z | 2022-03-15T11:19:44.000Z | release/stubs.min/System/__init___parts/IFormatProvider.py | htlcnn/ironpython-stubs | 780d829e2104b2789d5f4d6f32b0ec9f2930ca03 | [
"MIT"
] | 67 | 2017-06-28T09:43:59.000Z | 2022-03-20T21:17:10.000Z | class IFormatProvider:
""" Provides a mechanism for retrieving an object to control formatting. """
def GetFormat(self,formatType):
"""
GetFormat(self: IFormatProvider,formatType: Type) -> object
Returns an object that provides formatting services for the specified type.
formatType: An object that specifies the type of format object to return.
Returns: An instance of the object specified by formatType,if the System.IFormatProvider implementation
can supply that type of object; otherwise,null.
"""
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
| 32.434783 | 147 | 0.718499 | class IFormatProvider:
def GetFormat(self,formatType):
pass
def __init__(self,*args):
pass
| true | true |
f713bd7d306ff19bdfc4bafacd88ec5bf2b55af8 | 782 | py | Python | backend/histocat/api/panorama/controller.py | BodenmillerGroup/histocat-web | c598cd07506febf0b7c209626d4eb869761f2e62 | [
"MIT"
] | 4 | 2021-06-14T15:19:25.000Z | 2022-02-09T13:17:39.000Z | backend/histocat/api/panorama/controller.py | BodenmillerGroup/histocat-web | c598cd07506febf0b7c209626d4eb869761f2e62 | [
"MIT"
] | null | null | null | backend/histocat/api/panorama/controller.py | BodenmillerGroup/histocat-web | c598cd07506febf0b7c209626d4eb869761f2e62 | [
"MIT"
] | 1 | 2022-02-09T13:17:41.000Z | 2022-02-09T13:17:41.000Z | import os
from fastapi import APIRouter, Depends
from sqlalchemy.orm import Session
from starlette.responses import FileResponse
from histocat.api.db import get_db
from histocat.core.panorama import service
router = APIRouter()
@router.get("/panoramas/{id}/image", responses={200: {"content": {"image/png": {}}}})
async def read_panorama_image(
id: int,
# user: User = Depends(get_current_active_user),
db: Session = Depends(get_db),
):
"""
Get panorama image by id
"""
item = service.get(db, id=id)
slide = item.slide
return FileResponse(
os.path.join(
item.slide.location,
"origin",
f"{slide.name}_s{slide.origin_id}_p{item.origin_id}_pano.png",
),
media_type="image/png",
)
| 24.4375 | 85 | 0.649616 | import os
from fastapi import APIRouter, Depends
from sqlalchemy.orm import Session
from starlette.responses import FileResponse
from histocat.api.db import get_db
from histocat.core.panorama import service
router = APIRouter()
@router.get("/panoramas/{id}/image", responses={200: {"content": {"image/png": {}}}})
async def read_panorama_image(
id: int,
db: Session = Depends(get_db),
):
item = service.get(db, id=id)
slide = item.slide
return FileResponse(
os.path.join(
item.slide.location,
"origin",
f"{slide.name}_s{slide.origin_id}_p{item.origin_id}_pano.png",
),
media_type="image/png",
)
| true | true |
f713bd985d707ee952fc4906911d895395ad2c03 | 2,325 | py | Python | google-cloud-sdk/lib/surface/runtime_config/configs/list.py | bopopescu/searchparty | afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6 | [
"Apache-2.0"
] | null | null | null | google-cloud-sdk/lib/surface/runtime_config/configs/list.py | bopopescu/searchparty | afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6 | [
"Apache-2.0"
] | null | null | null | google-cloud-sdk/lib/surface/runtime_config/configs/list.py | bopopescu/searchparty | afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6 | [
"Apache-2.0"
] | 3 | 2017-07-27T18:44:13.000Z | 2020-07-25T17:48:53.000Z | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The configs list command."""
from apitools.base.py import list_pager
from googlecloudsdk.api_lib.runtime_config import util
from googlecloudsdk.calliope import base
class List(base.ListCommand):
"""List runtime-config resources within the current project.
This command lists runtime-config resources for the current project.
"""
DEFAULT_PAGE_SIZE = 100
detailed_help = {
'EXAMPLES': """\
To list all runtime-config resources for the current project, run:
$ {command}
The --filter parameter can be used to filter results based on content.
For example, to list all runtime-config resources with names that
begin with 'foo', run:
$ {command} --filter 'name=foo*'
""",
}
@staticmethod
def Args(parser):
parser.display_info.AddFormat('table(name, description)')
def Run(self, args):
"""Run 'runtime-configs list'.
Args:
args: argparse.Namespace, The arguments that this command was invoked
with.
Yields:
The list of runtime-config resources.
Raises:
HttpException: An http error response was received while executing api
request.
"""
config_client = util.ConfigClient()
messages = util.Messages()
project = util.Project()
request = messages.RuntimeconfigProjectsConfigsListRequest(
parent=util.ProjectPath(project),
)
page_size = args.page_size or self.DEFAULT_PAGE_SIZE
results = list_pager.YieldFromList(
config_client, request, field='configs',
batch_size_attribute='pageSize', limit=args.limit,
batch_size=page_size,
)
for result in results:
yield util.FormatConfig(result)
| 28.703704 | 80 | 0.695914 |
from apitools.base.py import list_pager
from googlecloudsdk.api_lib.runtime_config import util
from googlecloudsdk.calliope import base
class List(base.ListCommand):
DEFAULT_PAGE_SIZE = 100
detailed_help = {
'EXAMPLES': """\
To list all runtime-config resources for the current project, run:
$ {command}
The --filter parameter can be used to filter results based on content.
For example, to list all runtime-config resources with names that
begin with 'foo', run:
$ {command} --filter 'name=foo*'
""",
}
@staticmethod
def Args(parser):
parser.display_info.AddFormat('table(name, description)')
def Run(self, args):
config_client = util.ConfigClient()
messages = util.Messages()
project = util.Project()
request = messages.RuntimeconfigProjectsConfigsListRequest(
parent=util.ProjectPath(project),
)
page_size = args.page_size or self.DEFAULT_PAGE_SIZE
results = list_pager.YieldFromList(
config_client, request, field='configs',
batch_size_attribute='pageSize', limit=args.limit,
batch_size=page_size,
)
for result in results:
yield util.FormatConfig(result)
| true | true |
f713be977bca839114bb2f2747397cf2cbc53701 | 786 | py | Python | filemonitor/migrations/0006_auto_20170523_1706.py | imsilence/shadow-hostmonitor | faa28d7f5bb85212d5a64a60f742b807cf8644f7 | [
"Apache-2.0"
] | 1 | 2019-11-02T14:25:29.000Z | 2019-11-02T14:25:29.000Z | filemonitor/migrations/0006_auto_20170523_1706.py | imsilence/shadow-hostmonitor | faa28d7f5bb85212d5a64a60f742b807cf8644f7 | [
"Apache-2.0"
] | null | null | null | filemonitor/migrations/0006_auto_20170523_1706.py | imsilence/shadow-hostmonitor | faa28d7f5bb85212d5a64a60f742b807cf8644f7 | [
"Apache-2.0"
] | 1 | 2019-11-02T14:25:19.000Z | 2019-11-02T14:25:19.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-05-23 09:06
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('filemonitor', '0005_auto_20170523_1541'),
]
operations = [
migrations.RenameField(
model_name='actualfile',
old_name='status',
new_name='ckstatus',
),
migrations.AddField(
model_name='actualfile',
name='dlstatus',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='actualfile',
name='remark',
field=models.TextField(default=''),
),
]
| 25.354839 | 52 | 0.550891 |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('filemonitor', '0005_auto_20170523_1541'),
]
operations = [
migrations.RenameField(
model_name='actualfile',
old_name='status',
new_name='ckstatus',
),
migrations.AddField(
model_name='actualfile',
name='dlstatus',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='actualfile',
name='remark',
field=models.TextField(default=''),
),
]
| true | true |
f713bed89ea6d867f3c6c6448a2bf04f05614686 | 1,695 | py | Python | async.py | Ethan-Xie/python_study | 0e251709de37d38e3ea9af2202d8f94766d3a64f | [
"MIT"
] | null | null | null | async.py | Ethan-Xie/python_study | 0e251709de37d38e3ea9af2202d8f94766d3a64f | [
"MIT"
] | null | null | null | async.py | Ethan-Xie/python_study | 0e251709de37d38e3ea9af2202d8f94766d3a64f | [
"MIT"
] | null | null | null |
# 使用yield 实现单线程的异步并发效果
import time
def consumer(name):
print("%s 准备吃包子啦!" %name)
while True:
baozi = yield #接收值
print("包子[%s]来了,被[%s]吃了!" %(baozi,name))
def producer(name):
c = consumer("A")
c2 = consumer("B")
c.__next__()
c2.__next__()
print("老子开始做包子了")
for i in range(1): #0
time.sleep(1)
print("做了两个包子了")
c.send(i) #//给 yiled 送值
c2.send(i)
producer("alex") # 每分钟做两个包子,并同时分给两个人
"""
A 准备吃包子啦!
B 准备吃包子啦!
老子开始做包子了
做了两个包子了
包子[0]来了,被[A]吃了!
包子[0]来了,被[B]吃了!
做了两个包子了
包子[1]来了,被[A]吃了!
包子[1]来了,被[B]吃了!
"""
# python 装饰器
# tv=login(tv)
# tv("alex")
def w1(func):
print("我在w1函数内")
def inner():
print("我在inner函数内") #
#2
#3
return func
return inner
#@w1
def f1():
print('我在f1函数内')
flag=w1(f1) # 执行w1函数
#print(flag)
flag=flag() #执行inner 函数
flag() ##执行f1 函数
'''
我在w1函数内
我在inner函数内
我在f1函数内
'''
#---------------next----------
print("开始@的用法说明")
@w1
def f2():
print('我在f1函数内')
f2()
"""
@w1 :执行w1,把自己装饰的函数名当作参数,相对于w1(f2)
show 函数重新定义,w1(show)返回值
新show =
"""
#@w1(f1) #如此是这样
def f3():
print('我在f1函数内')
"""
@filter(before,after)
1. 执行filter(before,after)
2.@outer
3 新的
"""
#---------------递归 ----------
def calc(n):
if n/2 >1:
print(n)
res=calc(n/2)
print(n,res)
return res
calc(20)
"""
20
10.0
5.0
2.5
2.5 None
5.0 None
10.0 None
20 None
"""
# def 数列 第三位数=数2+数1
def func3(arg1,arg2):
if arg1==0:
print(arg1)
print(arg2)
arg3=arg1+arg2
print(arg3)
if arg3<110:
func3(arg2,arg3)
func3(0,1)
"""
0
1
1
2
3
5
8
"""
# 二分查找法
data = list(range(1,100,3))
print(data) | 13.139535 | 48 | 0.532743 |
import time
def consumer(name):
print("%s 准备吃包子啦!" %name)
while True:
baozi = yield
print("包子[%s]来了,被[%s]吃了!" %(baozi,name))
def producer(name):
c = consumer("A")
c2 = consumer("B")
c.__next__()
c2.__next__()
print("老子开始做包子了")
for i in range(1):
time.sleep(1)
print("做了两个包子了")
c.send(i)
c2.send(i)
producer("alex")
def w1(func):
print("我在w1函数内")
def inner():
print("我在inner函数内")
return func
return inner
def f1():
print('我在f1函数内')
flag=w1(f1)
flag=flag()
flag() ("开始@的用法说明")
@w1
def f2():
print('我在f1函数内')
f2()
3():
print('我在f1函数内')
def calc(n):
if n/2 >1:
print(n)
res=calc(n/2)
print(n,res)
return res
calc(20)
def func3(arg1,arg2):
if arg1==0:
print(arg1)
print(arg2)
arg3=arg1+arg2
print(arg3)
if arg3<110:
func3(arg2,arg3)
func3(0,1)
data = list(range(1,100,3))
print(data) | true | true |
f713bf3131754f4b99d4daaa838e48deebcdd659 | 3,937 | py | Python | openGaussBase/testcase/GUC/QUERYPLAN/Opengauss_Function_Guc_Queryplan_Case0101.py | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | openGaussBase/testcase/GUC/QUERYPLAN/Opengauss_Function_Guc_Queryplan_Case0101.py | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | openGaussBase/testcase/GUC/QUERYPLAN/Opengauss_Function_Guc_Queryplan_Case0101.py | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | """
Copyright (c) 2022 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
"""
Case Type : GUC
Case Name : 使用gs_guc set方法设置参数autoanalyze为on,观察预期结果
Description :
1.查询autoanalyze默认值
2.默认值off下建表并手动执行analyze
3.查询系统表pg_stat_all_tables中autoanalyze_count,last_autoanalyze
等字段值
4.修改参数值为on并重启数据库
5.查询该参数修改后的值
6.恢复参数默认值
Expect :
1.显示默认值为off
2.建表成功且analyze执行成功
3.查询成功,默认值off下,表未被自动分析
4.修改成功
5.显示on
6.默认值恢复成功
History :
"""
import unittest
from testcase.utils.CommonSH import CommonSH
from testcase.utils.Constant import Constant
from testcase.utils.Logger import Logger
LOG = Logger()
commonsh = CommonSH('dbuser')
class QueryPlan(unittest.TestCase):
def setUp(self):
self.constant = Constant()
LOG.info(
'------Opengauss_Function_Guc_Queryplan_Case0101start------')
def test_autoanalyze(self):
LOG.info('--步骤1:查看默认值--')
sql_cmd = commonsh.execut_db_sql('''show autoanalyze;''')
LOG.info(sql_cmd)
self.res = sql_cmd.splitlines()[-2].strip()
LOG.info('--步骤2:建表后,通过系统表查询表并执行analyze语句--')
sql_cmd = commonsh.execut_db_sql('''drop table if exists test_101;
create table test_101 (id int);
select relname, reltuples,relpages from pg_class where
relname ='test_101';
analyze test_101(id);
''')
LOG.info(sql_cmd)
self.assertIn(self.constant.TABLE_CREATE_SUCCESS, sql_cmd)
self.assertIn('0', sql_cmd)
self.assertIn(self.constant.ANALYZE_SUCCESS_MSG, sql_cmd)
LOG.info('--步骤3:查询系统表--')
sql_cmd = commonsh.execut_db_sql('''select last_analyze,analyze_count,
relname,last_autoanalyze,autovacuum_count from pg_stat_all_tables
where relname='test_101';
''')
LOG.info(sql_cmd)
self.assertIn('0', sql_cmd)
LOG.info('--步骤4:gs_guc set设置autoanalyze为on并重启数据库--')
msg = commonsh.execute_gsguc('set',
self.constant.GSGUC_SUCCESS_MSG,
'autoanalyze =on')
LOG.info(msg)
self.assertTrue(msg)
msg = commonsh.restart_db_cluster()
LOG.info(msg)
status = commonsh.get_db_cluster_status()
self.assertTrue("Degraded" in status or "Normal" in status)
LOG.info('--步骤5:查询该参数修改后的值--')
sql_cmd = commonsh.execut_db_sql('show autoanalyze;')
LOG.info(sql_cmd)
self.assertIn(self.constant.BOOLEAN_VALUES[0], sql_cmd)
def tearDown(self):
LOG.info('--步骤6:清理环境--')
sql_cmd = commonsh.execut_db_sql('drop table if exists test_101;')
LOG.info(sql_cmd)
sql_cmd = commonsh.execut_db_sql('show autoanalyze;')
LOG.info(sql_cmd)
if self.res != sql_cmd.split('\n')[-2].strip():
msg = commonsh.execute_gsguc('set',
self.constant.GSGUC_SUCCESS_MSG,
f"autoanalyze={self.res}")
LOG.info(msg)
msg = commonsh.restart_db_cluster()
LOG.info(msg)
status = commonsh.get_db_cluster_status()
self.assertTrue("Degraded" in status or "Normal" in status)
sql_cmd = commonsh.execut_db_sql('show autoanalyze;')
LOG.info(sql_cmd)
LOG.info(
'-----Opengauss_Function_Guc_Queryplan_Case0101执行完成------')
| 36.119266 | 84 | 0.624333 | import unittest
from testcase.utils.CommonSH import CommonSH
from testcase.utils.Constant import Constant
from testcase.utils.Logger import Logger
LOG = Logger()
commonsh = CommonSH('dbuser')
class QueryPlan(unittest.TestCase):
def setUp(self):
self.constant = Constant()
LOG.info(
'------Opengauss_Function_Guc_Queryplan_Case0101start------')
def test_autoanalyze(self):
LOG.info('--步骤1:查看默认值--')
sql_cmd = commonsh.execut_db_sql('''show autoanalyze;''')
LOG.info(sql_cmd)
self.res = sql_cmd.splitlines()[-2].strip()
LOG.info('--步骤2:建表后,通过系统表查询表并执行analyze语句--')
sql_cmd = commonsh.execut_db_sql('''drop table if exists test_101;
create table test_101 (id int);
select relname, reltuples,relpages from pg_class where
relname ='test_101';
analyze test_101(id);
''')
LOG.info(sql_cmd)
self.assertIn(self.constant.TABLE_CREATE_SUCCESS, sql_cmd)
self.assertIn('0', sql_cmd)
self.assertIn(self.constant.ANALYZE_SUCCESS_MSG, sql_cmd)
LOG.info('--步骤3:查询系统表--')
sql_cmd = commonsh.execut_db_sql('''select last_analyze,analyze_count,
relname,last_autoanalyze,autovacuum_count from pg_stat_all_tables
where relname='test_101';
''')
LOG.info(sql_cmd)
self.assertIn('0', sql_cmd)
LOG.info('--步骤4:gs_guc set设置autoanalyze为on并重启数据库--')
msg = commonsh.execute_gsguc('set',
self.constant.GSGUC_SUCCESS_MSG,
'autoanalyze =on')
LOG.info(msg)
self.assertTrue(msg)
msg = commonsh.restart_db_cluster()
LOG.info(msg)
status = commonsh.get_db_cluster_status()
self.assertTrue("Degraded" in status or "Normal" in status)
LOG.info('--步骤5:查询该参数修改后的值--')
sql_cmd = commonsh.execut_db_sql('show autoanalyze;')
LOG.info(sql_cmd)
self.assertIn(self.constant.BOOLEAN_VALUES[0], sql_cmd)
def tearDown(self):
LOG.info('--步骤6:清理环境--')
sql_cmd = commonsh.execut_db_sql('drop table if exists test_101;')
LOG.info(sql_cmd)
sql_cmd = commonsh.execut_db_sql('show autoanalyze;')
LOG.info(sql_cmd)
if self.res != sql_cmd.split('\n')[-2].strip():
msg = commonsh.execute_gsguc('set',
self.constant.GSGUC_SUCCESS_MSG,
f"autoanalyze={self.res}")
LOG.info(msg)
msg = commonsh.restart_db_cluster()
LOG.info(msg)
status = commonsh.get_db_cluster_status()
self.assertTrue("Degraded" in status or "Normal" in status)
sql_cmd = commonsh.execut_db_sql('show autoanalyze;')
LOG.info(sql_cmd)
LOG.info(
'-----Opengauss_Function_Guc_Queryplan_Case0101执行完成------')
| true | true |
f713bf7049d636559b31faa12aae89196f72bea9 | 3,608 | py | Python | docs/samples/specification/azure_key_credential/generated/azure/key/credential/sample/_auto_rest_head_test_service.py | cfculhane/autorest.python | 8cbca95faee88d933a58bbbd17b76834faa8d387 | [
"MIT"
] | 35 | 2018-04-03T12:15:53.000Z | 2022-03-11T14:03:34.000Z | docs/samples/specification/azure_key_credential/generated/azure/key/credential/sample/_auto_rest_head_test_service.py | cfculhane/autorest.python | 8cbca95faee88d933a58bbbd17b76834faa8d387 | [
"MIT"
] | 652 | 2017-08-28T22:44:41.000Z | 2022-03-31T21:20:31.000Z | docs/samples/specification/azure_key_credential/generated/azure/key/credential/sample/_auto_rest_head_test_service.py | cfculhane/autorest.python | 8cbca95faee88d933a58bbbd17b76834faa8d387 | [
"MIT"
] | 29 | 2017-08-28T20:57:01.000Z | 2022-03-11T14:03:38.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from copy import deepcopy
from typing import TYPE_CHECKING
from azure.core import PipelineClient
from msrest import Deserializer, Serializer
from ._configuration import AutoRestHeadTestServiceConfiguration
from .operations import HttpSuccessOperations
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Dict, Optional
from azure.core.credentials import AzureKeyCredential
from azure.core.rest import HttpRequest, HttpResponse
class AutoRestHeadTestService(object):
"""Test Infrastructure for AutoRest.
:ivar http_success: HttpSuccessOperations operations
:vartype http_success: azure.key.credential.sample.operations.HttpSuccessOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.AzureKeyCredential
:param base_url: Service URL. Default value is 'http://localhost:3000'.
:type base_url: str
"""
def __init__(
self,
credential, # type: AzureKeyCredential
base_url="http://localhost:3000", # type: str
**kwargs # type: Any
):
# type: (...) -> None
self._config = AutoRestHeadTestServiceConfiguration(credential=credential, **kwargs)
self._client = PipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {} # type: Dict[str, Any]
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.http_success = HttpSuccessOperations(self._client, self._config, self._serialize, self._deserialize)
def _send_request(
self,
request, # type: HttpRequest
**kwargs # type: Any
):
# type: (...) -> HttpResponse
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = client._send_request(request)
<HttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.HttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
def close(self):
# type: () -> None
self._client.close()
def __enter__(self):
# type: () -> AutoRestHeadTestService
self._client.__enter__()
return self
def __exit__(self, *exc_details):
# type: (Any) -> None
self._client.__exit__(*exc_details)
| 39.217391 | 113 | 0.665188 |
from copy import deepcopy
from typing import TYPE_CHECKING
from azure.core import PipelineClient
from msrest import Deserializer, Serializer
from ._configuration import AutoRestHeadTestServiceConfiguration
from .operations import HttpSuccessOperations
if TYPE_CHECKING:
from typing import Any, Dict, Optional
from azure.core.credentials import AzureKeyCredential
from azure.core.rest import HttpRequest, HttpResponse
class AutoRestHeadTestService(object):
def __init__(
self,
credential,
base_url="http://localhost:3000",
**kwargs
):
self._config = AutoRestHeadTestServiceConfiguration(credential=credential, **kwargs)
self._client = PipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.http_success = HttpSuccessOperations(self._client, self._config, self._serialize, self._deserialize)
def _send_request(
self,
request,
**kwargs
):
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
def close(self):
self._client.close()
def __enter__(self):
self._client.__enter__()
return self
def __exit__(self, *exc_details):
self._client.__exit__(*exc_details)
| true | true |
f713c0cb8c9f0306d03c4ed99fa96bed9e43aa74 | 713 | py | Python | assignment/migrations/0008_auto_20201218_1203.py | Magesh-sam/E-Assignment | 241252e88b0526afbb97a01b9b1814d693688885 | [
"MIT"
] | 1 | 2020-12-23T07:04:51.000Z | 2020-12-23T07:04:51.000Z | assignment/migrations/0008_auto_20201218_1203.py | Magesh-sam/E-Assignment | 241252e88b0526afbb97a01b9b1814d693688885 | [
"MIT"
] | null | null | null | assignment/migrations/0008_auto_20201218_1203.py | Magesh-sam/E-Assignment | 241252e88b0526afbb97a01b9b1814d693688885 | [
"MIT"
] | null | null | null | # Generated by Django 3.1.4 on 2020-12-18 06:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('assignment', '0007_document'),
]
operations = [
migrations.RenameField(
model_name='document',
old_name='description',
new_name='assignmentname',
),
migrations.AddField(
model_name='document',
name='regno',
field=models.CharField(blank=True, max_length=255),
),
migrations.AddField(
model_name='document',
name='staffname',
field=models.CharField(blank=True, max_length=255),
),
]
| 24.586207 | 63 | 0.56662 |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('assignment', '0007_document'),
]
operations = [
migrations.RenameField(
model_name='document',
old_name='description',
new_name='assignmentname',
),
migrations.AddField(
model_name='document',
name='regno',
field=models.CharField(blank=True, max_length=255),
),
migrations.AddField(
model_name='document',
name='staffname',
field=models.CharField(blank=True, max_length=255),
),
]
| true | true |
f713c211024b61164dea46dcd8cf5c99dd053cc0 | 8,371 | py | Python | datagateway_api/src/common/config.py | MRichards99/datagateway-api | 2e6133636fed950a16190d2f703f152c73bb5b1b | [
"Apache-2.0"
] | null | null | null | datagateway_api/src/common/config.py | MRichards99/datagateway-api | 2e6133636fed950a16190d2f703f152c73bb5b1b | [
"Apache-2.0"
] | null | null | null | datagateway_api/src/common/config.py | MRichards99/datagateway-api | 2e6133636fed950a16190d2f703f152c73bb5b1b | [
"Apache-2.0"
] | null | null | null | import json
import logging
from pathlib import Path
import sys
from typing import Optional
from pydantic import (
BaseModel,
StrictBool,
StrictInt,
StrictStr,
ValidationError,
validator,
)
log = logging.getLogger()
def validate_extension(extension):
"""
Checks that the API extension starts and does not end with a '/'. An error is
raised, at which point the application exits, if the extension does not meet
these validation rules.
:param extension: The extension for the API
"""
extension = extension.strip()
if not extension.startswith("/"):
raise ValueError("must start with '/'")
if extension.endswith("/"):
raise ValueError("must not end with '/'")
return extension
class DataGatewayAPI(BaseModel):
"""
Configuration model class that implements pydantic's BaseModel class to allow for
validation of the DataGatewayAPI config data using Python type annotations. It takes
the backend into account, meaning only the config options for the backend used are
required.
"""
backend: StrictStr
client_cache_size: Optional[StrictInt]
client_pool_init_size: Optional[StrictInt]
client_pool_max_size: Optional[StrictInt]
db_url: Optional[StrictStr]
extension: StrictStr
icat_check_cert: Optional[StrictBool]
icat_url: Optional[StrictStr]
_validate_extension = validator("extension", allow_reuse=True)(validate_extension)
@validator("db_url", always=True)
def require_db_config_value(cls, value, values): # noqa: B902, N805
"""
By default the `db_url` config field is optional so that it does not have to be
present in the config file if `backend` is set to `python_icat`. However, if the
`backend` is set to `db`, this validator esentially makes the `db_url` config
field mandatory. This means that an error is raised, at which point the
application exits, if a `db_url` config value is not present in the config file.
:param cls: :class:`DataGatewayAPI` pointer
:param value: The value of the given config field
:param values: The config field values loaded before the given config field
"""
if "backend" in values and values["backend"] == "db" and value is None:
raise TypeError("field required")
return value
@validator(
"client_cache_size",
"client_pool_init_size",
"client_pool_max_size",
"icat_check_cert",
"icat_url",
always=True,
)
def require_icat_config_value(cls, value, values): # noqa: B902, N805
"""
By default the above config fields that are passed to the `@validator` decorator
are optional so that they do not have to be present in the config file if
`backend` is set to `db`. However, if the `backend` is set to `python_icat`,
this validator esentially makes these config fields mandatory. This means that
an error is raised, at which point the application exits, if any of these config
values are not present in the config file.
:param cls: :class:`DataGatewayAPI` pointer
:param value: The value of the given config field
:param values: The config field values loaded before the given config field
"""
if "backend" in values and values["backend"] == "python_icat" and value is None:
raise TypeError("field required")
return value
def set_backend_type(self, backend_type):
"""
This setter is used as a way for automated tests to set the backend type. The
API can detect if the Flask app setup is from an automated test by checking the
app's config for a `TEST_BACKEND`. If this value exists (a KeyError will be
raised when the API is run normally, which will then grab the backend type from
`config.json`), it needs to be set using this function. This is required because
creating filters in the `QueryFilterFactory` is backend-specific so the backend
type must be fetched. This must be done using this module (rather than directly
importing and checking the Flask app's config) to avoid circular import issues.
"""
self.backend = backend_type
class Config:
"""
The behaviour of the BaseModel class can be controlled via this class.
"""
# Enables assignment validation on the BaseModel fields. Useful for when the
# backend type is changed using the set_backend_type function.
validate_assignment = True
class SearchAPI(BaseModel):
"""
Configuration model class that implements pydantic's BaseModel class to allow for
validation of the SearchAPI config data using Python type annotations.
"""
client_pool_init_size: StrictInt
client_pool_max_size: StrictInt
extension: StrictStr
icat_check_cert: StrictBool
icat_url: StrictStr
_validate_extension = validator("extension", allow_reuse=True)(validate_extension)
class TestUserCredentials(BaseModel):
username: StrictStr
password: StrictStr
class APIConfig(BaseModel):
"""
Configuration model class that implements pydantic's BaseModel class to allow for
validation of the API config data using Python type annotations. It ensures that
all required config options exist before getting too far into the setup of the API.
If a mandatory config option is missing or misspelled, or has a wrong value type,
Pydantic raises a validation error with a breakdown of what was wrong and the
application is exited.
Config options used for testing are not checked here as they should only be used
during tests, not in the typical running of the API.
Some options used when running the API (host, debug_mode etc.) aren't mandatory
when running the API in production (these options aren't used in the `wsgi.py`
entrypoint). As a result, they're not present in `config_keys`. However, they
are required when using `main.py` as an entrypoint. In any case of these
specific missing config options when using that entrypoint, they are checked at
API startup so any missing options will be caught quickly.
"""
datagateway_api: Optional[DataGatewayAPI]
debug_mode: Optional[StrictBool]
flask_reloader: Optional[StrictBool]
generate_swagger: StrictBool
host: Optional[StrictStr]
log_level: StrictStr
log_location: StrictStr
port: Optional[StrictStr]
search_api: Optional[SearchAPI]
test_mechanism: Optional[StrictStr]
test_user_credentials: Optional[TestUserCredentials]
@classmethod
def load(cls, path=Path(__file__).parent.parent.parent / "config.json"):
"""
Loads the config data from the JSON file and returns it as a APIConfig pydantic
model. Exits the application if it fails to locate the JSON config file or
the APIConfig model validation fails.
:param cls: :class:`APIConfig` pointer
:param path: path to the configuration file
:return: APIConfig model object that contains the config data
"""
try:
with open(path, encoding="utf-8") as target:
data = json.load(target)
return cls(**data)
except (IOError, ValidationError) as error:
sys.exit(f"An error occurred while trying to load the config data: {error}")
@validator("search_api")
def validate_api_extensions(cls, value, values): # noqa: B902, N805
"""
Checks that the DataGateway API and Search API extensions are not the same. An
error is raised, at which point the application exits, if the extensions are the
same.
:param cls: :class:`APIConfig` pointer
:param value: The value of the given config field
:param values: The config field values loaded before the given config field
"""
if (
"datagateway_api" in values
and values["datagateway_api"] is not None
and value is not None
and values["datagateway_api"].extension == value.extension
):
raise ValueError(
"extension cannot be the same as datagateway_api extension",
)
return value
config = APIConfig.load()
| 38.223744 | 88 | 0.687851 | import json
import logging
from pathlib import Path
import sys
from typing import Optional
from pydantic import (
BaseModel,
StrictBool,
StrictInt,
StrictStr,
ValidationError,
validator,
)
log = logging.getLogger()
def validate_extension(extension):
extension = extension.strip()
if not extension.startswith("/"):
raise ValueError("must start with '/'")
if extension.endswith("/"):
raise ValueError("must not end with '/'")
return extension
class DataGatewayAPI(BaseModel):
backend: StrictStr
client_cache_size: Optional[StrictInt]
client_pool_init_size: Optional[StrictInt]
client_pool_max_size: Optional[StrictInt]
db_url: Optional[StrictStr]
extension: StrictStr
icat_check_cert: Optional[StrictBool]
icat_url: Optional[StrictStr]
_validate_extension = validator("extension", allow_reuse=True)(validate_extension)
@validator("db_url", always=True)
def require_db_config_value(cls, value, values):
if "backend" in values and values["backend"] == "db" and value is None:
raise TypeError("field required")
return value
@validator(
"client_cache_size",
"client_pool_init_size",
"client_pool_max_size",
"icat_check_cert",
"icat_url",
always=True,
)
def require_icat_config_value(cls, value, values):
if "backend" in values and values["backend"] == "python_icat" and value is None:
raise TypeError("field required")
return value
def set_backend_type(self, backend_type):
self.backend = backend_type
class Config:
validate_assignment = True
class SearchAPI(BaseModel):
client_pool_init_size: StrictInt
client_pool_max_size: StrictInt
extension: StrictStr
icat_check_cert: StrictBool
icat_url: StrictStr
_validate_extension = validator("extension", allow_reuse=True)(validate_extension)
class TestUserCredentials(BaseModel):
username: StrictStr
password: StrictStr
class APIConfig(BaseModel):
datagateway_api: Optional[DataGatewayAPI]
debug_mode: Optional[StrictBool]
flask_reloader: Optional[StrictBool]
generate_swagger: StrictBool
host: Optional[StrictStr]
log_level: StrictStr
log_location: StrictStr
port: Optional[StrictStr]
search_api: Optional[SearchAPI]
test_mechanism: Optional[StrictStr]
test_user_credentials: Optional[TestUserCredentials]
@classmethod
def load(cls, path=Path(__file__).parent.parent.parent / "config.json"):
try:
with open(path, encoding="utf-8") as target:
data = json.load(target)
return cls(**data)
except (IOError, ValidationError) as error:
sys.exit(f"An error occurred while trying to load the config data: {error}")
@validator("search_api")
def validate_api_extensions(cls, value, values):
if (
"datagateway_api" in values
and values["datagateway_api"] is not None
and value is not None
and values["datagateway_api"].extension == value.extension
):
raise ValueError(
"extension cannot be the same as datagateway_api extension",
)
return value
config = APIConfig.load()
| true | true |
f713c29c86cdc35f0168c2009748e732ac290b18 | 44,536 | py | Python | kolla/common/config.py | priteau/kolla | 2ee796bdce78aadaeaf42796a4f4dce9e7471ce0 | [
"Apache-2.0"
] | null | null | null | kolla/common/config.py | priteau/kolla | 2ee796bdce78aadaeaf42796a4f4dce9e7471ce0 | [
"Apache-2.0"
] | null | null | null | kolla/common/config.py | priteau/kolla | 2ee796bdce78aadaeaf42796a4f4dce9e7471ce0 | [
"Apache-2.0"
] | null | null | null | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import os
from oslo_config import cfg
from oslo_config import types
from kolla.version import version_info as version
BASE_OS_DISTRO = ['centos', 'rhel', 'ubuntu', 'debian']
BASE_ARCH = ['x86_64', 'ppc64le', 'aarch64']
DEFAULT_BASE_TAGS = {
'centos': '7',
'rhel': '7',
'debian': '10',
'ubuntu': '18.04',
}
DISTRO_RELEASE = {
'centos': '7',
'rhel': '7',
'debian': '10',
'ubuntu': '18.04',
}
OPENSTACK_RELEASE = {
'centos': 'train',
'rhel': 'train',
'debian': 'master',
'ubuntu': 'master',
}
# This is noarch repository so we will use it on all architectures
DELOREAN = \
"https://trunk.rdoproject.org/centos7/current-passed-ci/delorean.repo"
DELOREAN_DEPS = "https://trunk.rdoproject.org/centos7/delorean-deps.repo"
INSTALL_TYPE_CHOICES = ['binary', 'source', 'rdo', 'rhos']
# TODO(mandre) check for file integrity instead of downloading from an HTTPS
# source
TARBALLS_BASE = "https://tarballs.openstack.org"
_PROFILE_OPTS = [
cfg.ListOpt('infra',
default=[
'ceph',
'certmonger',
'cron',
'elasticsearch',
'etcd',
'fluentd',
'haproxy',
'hacluster',
'keepalived',
'kibana',
'kolla-toolbox',
'logstash',
'mariadb',
'memcached',
'mongodb',
'opendaylight',
'openvswitch',
'ptp',
'qdrouterd',
'rabbitmq',
'redis',
'rsyslog',
'skydive',
'storm',
'tgtd',
],
help='Infra images'),
cfg.ListOpt('main',
default=[
'ceilometer',
'cinder',
'glance',
'heat',
'horizon',
'iscsi',
'keystone',
'neutron',
'nova-',
'placement',
'swift',
],
help='Main images'),
cfg.ListOpt('aux',
default=[
'aodh',
'blazar',
'cloudkitty',
'congress',
'designate',
'ec2-api',
'freezer',
'gnocchi',
'influxdb',
'ironic',
'kafka',
'karbor',
'kuryr',
'magnum',
'manila',
'masakari',
'mistral',
'monasca',
'murano',
'novajoin',
'octavia',
'panko',
'qinling',
'rally',
'redis',
'sahara',
'searchlight',
'senlin',
'solum',
'tacker',
'telegraf',
'trove',
'vitrage',
'zaqar',
'zookeeper',
'zun',
],
help='Aux Images'),
cfg.ListOpt('default',
default=[
'chrony',
'cron',
'kolla-toolbox',
'fluentd',
'glance',
'haproxy',
'heat',
'horizon',
'keepalived',
'keystone',
'mariadb',
'memcached',
'neutron',
'nova-',
'placement',
'openvswitch',
'rabbitmq',
],
help='Default images'),
]
hostarch = os.uname()[4]
_CLI_OPTS = [
cfg.StrOpt('base', short='b', default='centos',
choices=BASE_OS_DISTRO,
help='The distro type of the base image.'),
cfg.StrOpt('base-tag', default='latest',
help='The base distro image tag'),
cfg.StrOpt('base-image',
help='The base image name. Default is the same with base.'),
cfg.StrOpt('base-arch', default=hostarch,
choices=BASE_ARCH,
help='The base architecture. Default is same as host.'),
cfg.BoolOpt('use-dumb-init', default=True,
help='Use dumb-init as init system in containers'),
cfg.BoolOpt('debug', short='d', default=False,
help='Turn on debugging log level'),
cfg.BoolOpt('skip-parents', default=False,
help='Do not rebuild parents of matched images'),
cfg.BoolOpt('skip-existing', default=False,
help='Do not rebuild images present in the docker cache'),
cfg.DictOpt('build-args',
help='Set docker build time variables'),
cfg.BoolOpt('keep', default=False,
help='Keep failed intermediate containers'),
cfg.BoolOpt('list-dependencies', short='l',
help='Show image dependencies (filtering supported)'),
cfg.BoolOpt('list-images',
help='Show all available images (filtering supported)'),
cfg.StrOpt('namespace', short='n', default='kolla',
help='The Docker namespace name'),
cfg.StrOpt('network_mode', default=None,
help='The network mode for Docker build. Example: host'),
cfg.BoolOpt('cache', default=True,
help='Use the Docker cache when building'),
cfg.MultiOpt('profile', types.String(), short='p',
help=('Build a pre-defined set of images, see [profiles]'
' section in config. The default profiles are:'
' {}'.format(', '.join(
[opt.name for opt in _PROFILE_OPTS])
))),
cfg.BoolOpt('push', default=False,
help='Push images after building'),
cfg.IntOpt('push-threads', default=1, min=1,
help=('The number of threads to user while pushing'
' Images. Note: Docker can not handle threading'
' push properly')),
cfg.IntOpt('retries', short='r', default=3, min=0,
help='The number of times to retry while building'),
cfg.MultiOpt('regex', types.String(), positional=True,
help=('Build only images matching regex and its'
' dependencies')),
cfg.StrOpt('registry',
help=('The docker registry host. The default registry host'
' is Docker Hub')),
cfg.StrOpt('save-dependency',
help=('Path to the file to store the docker image'
' dependency in Graphviz dot format')),
cfg.StrOpt('format', short='f', default='json',
choices=['json', 'none'],
help='Format to write the final results in'),
cfg.StrOpt('tarballs-base', default=TARBALLS_BASE,
help='Base url to OpenStack tarballs'),
cfg.StrOpt('type', short='t', default='binary',
choices=INSTALL_TYPE_CHOICES,
dest='install_type',
help=('The method of the OpenStack install.')),
cfg.IntOpt('threads', short='T', default=8, min=1,
help=('The number of threads to use while building.'
' (Note: setting to one will allow real time'
' logging)')),
cfg.StrOpt('tag', default=version.cached_version_string(),
help='The Docker tag'),
cfg.BoolOpt('template-only', default=False,
help="Don't build images. Generate Dockerfile only"),
cfg.IntOpt('timeout', default=120,
help='Time in seconds after which any operation times out'),
cfg.MultiOpt('template-override', types.String(),
help='Path to template override file'),
cfg.MultiOpt('docker-dir', types.String(),
help=('Path to additional docker file template directory,'
' can be specified multiple times'),
short='D', default=[]),
cfg.StrOpt('logs-dir', help='Path to logs directory'),
cfg.BoolOpt('pull', default=True,
help='Attempt to pull a newer version of the base image'),
cfg.StrOpt('work-dir', help=('Path to be used as working directory.'
' By default, a temporary dir is created')),
cfg.BoolOpt('squash', default=False,
help=('Squash the image layers. WARNING: it will consume lots'
' of disk IO. "docker-squash" tool is required, install'
' it by "pip install docker-squash"')),
cfg.StrOpt('openstack-release', default='master',
help='OpenStack release for building kolla-toolbox'),
cfg.StrOpt('openstack-branch', default='master',
help='Branch for source images'),
cfg.BoolOpt('docker-healthchecks', default=True,
help='Add Kolla docker healthcheck scripts in the image')
]
_BASE_OPTS = [
cfg.StrOpt('maintainer',
default='Kolla Project (https://launchpad.net/kolla)',
help='Content of the maintainer label'),
cfg.StrOpt('distro_package_manager', default=None,
help=('Use this parameter to override the default package '
'manager used by kolla. For example, if you want to use '
'yum on a system with dnf, set this to yum which will '
'use yum command in the build process')),
cfg.StrOpt('base_package_type', default=None,
help=('Set the package type of the distro. If not set then '
'the packaging type is set to "rpm" if a RHEL based '
'distro and "deb" if a Debian based distro.')),
cfg.ListOpt('rpm_setup_config', default=[DELOREAN, DELOREAN_DEPS],
help=('Comma separated list of .rpm or .repo file(s) '
'or URL(s) to install before building containers')),
cfg.StrOpt('apt_sources_list', help=('Path to custom sources.list')),
cfg.StrOpt('apt_preferences', help=('Path to custom apt/preferences')),
cfg.BoolOpt('squash-cleanup', default=True,
help='Remove source image from Docker after squashing'),
cfg.StrOpt('squash-tmp-dir',
help='Temporary directory to be used during squashing'),
cfg.BoolOpt('clean_package_cache', default=True,
help='Clean all package cache.')
]
SOURCES = {
'openstack-base': {
'type': 'url',
'location': ('$tarballs_base/requirements/'
'requirements-${openstack_branch}.tar.gz')},
'aodh-base': {
'type': 'url',
'location': ('$tarballs_base/aodh/'
'aodh-${openstack_branch}.tar.gz')},
'barbican-base': {
'type': 'url',
'location': ('$tarballs_base/barbican/'
'barbican-${openstack_branch}.tar.gz')},
'bifrost-base': {
'type': 'url',
'location': ('$tarballs_base/bifrost/'
'bifrost-${openstack_branch}.tar.gz')},
'blazar-base': {
'type': 'url',
'location': ('$tarballs_base/blazar/'
'blazar-${openstack_branch}.tar.gz')},
'ceilometer-base': {
'type': 'url',
'location': ('$tarballs_base/ceilometer/'
'ceilometer-${openstack_branch}.tar.gz')},
'ceilometer-base-plugin-panko': {
'type': 'url',
'location': ('$tarballs_base/panko/'
'panko-${openstack_branch}.tar.gz')},
'cinder-base': {
'type': 'url',
'location': ('$tarballs_base/cinder/'
'cinder-${openstack_branch}.tar.gz')},
'congress-base': {
'type': 'url',
'location': ('$tarballs_base/congress/'
'congress-${openstack_branch}.tar.gz')},
'cloudkitty-base': {
'type': 'url',
'location': ('$tarballs_base/cloudkitty/'
'cloudkitty-${openstack_branch}.tar.gz')},
'cyborg-base': {
'type': 'url',
'location': ('$tarballs_base/cyborg/'
'cyborg-${openstack_branch}.tar.gz')},
'designate-base': {
'type': 'url',
'location': ('$tarballs_base/designate/'
'designate-${openstack_branch}.tar.gz')},
'ec2-api': {
'type': 'url',
'location': ('$tarballs_base/ec2-api/'
'ec2-api-${openstack_branch}.tar.gz')},
'freezer-api': {
'type': 'url',
'location': ('$tarballs_base/freezer-api/'
'freezer-api-${openstack_branch}.tar.gz')},
'freezer-base': {
'type': 'url',
'location': ('$tarballs_base/freezer/'
'freezer-${openstack_branch}.tar.gz')},
'glance-base': {
'type': 'url',
'location': ('$tarballs_base/glance/'
'glance-${openstack_branch}.tar.gz')},
'gnocchi-base': {
'type': 'git',
'reference': 'master',
'location': ('https://github.com/gnocchixyz/'
'gnocchi.git')},
'heat-base': {
'type': 'url',
'location': ('$tarballs_base/heat/'
'heat-${openstack_branch}.tar.gz')},
'horizon': {
'type': 'url',
'location': ('$tarballs_base/horizon/'
'horizon-${openstack_branch}.tar.gz')},
'horizon-plugin-blazar-dashboard': {
'type': 'url',
'location': ('$tarballs_base/blazar-dashboard/'
'blazar-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-congress-dashboard': {
'type': 'url',
'location': ('$tarballs_base/congress-dashboard/'
'congress-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-cloudkitty-dashboard': {
'type': 'url',
'location': ('$tarballs_base/cloudkitty-dashboard/'
'cloudkitty-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-designate-dashboard': {
'type': 'url',
'location': ('$tarballs_base/designate-dashboard/'
'designate-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-fwaas-dashboard': {
'type': 'url',
'location': ('$tarballs_base/neutron-fwaas-dashboard/'
'neutron-fwaas-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-freezer-web-ui': {
'type': 'url',
'location': ('$tarballs_base/freezer-web-ui/'
'freezer-web-ui-${openstack_branch}.tar.gz')},
'horizon-plugin-heat-dashboard': {
'type': 'url',
'location': ('$tarballs_base/heat-dashboard/'
'heat-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-ironic-ui': {
'type': 'url',
'location': ('$tarballs_base/ironic-ui/'
'ironic-ui-${openstack_branch}.tar.gz')},
'horizon-plugin-karbor-dashboard': {
'type': 'url',
'location': ('$tarballs_base/karbor-dashboard/'
'karbor-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-magnum-ui': {
'type': 'url',
'location': ('$tarballs_base/magnum-ui/'
'magnum-ui-${openstack_branch}.tar.gz')},
'horizon-plugin-manila-ui': {
'type': 'url',
'location': ('$tarballs_base/manila-ui/'
'manila-ui-${openstack_branch}.tar.gz')},
'horizon-plugin-masakari-dashboard': {
'type': 'url',
'location': ('$tarballs_base/masakari-dashboard/'
'masakari-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-mistral-dashboard': {
'type': 'url',
'location': ('$tarballs_base/mistral-dashboard/'
'mistral-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-monasca-ui': {
'type': 'url',
'location': ('$tarballs_base/monasca-ui/'
'monasca-ui-${openstack_branch}.tar.gz')},
'horizon-plugin-murano-dashboard': {
'type': 'url',
'location': ('$tarballs_base/murano-dashboard/'
'murano-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-neutron-vpnaas-dashboard': {
'type': 'url',
'location': ('$tarballs_base/neutron-vpnaas-dashboard/'
'neutron-vpnaas-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-octavia-dashboard': {
'type': 'url',
'location': ('$tarballs_base/octavia-dashboard/'
'octavia-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-qinling-dashboard': {
'type': 'url',
'location': ('$tarballs_base/qinling-dashboard/'
'qinling-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-sahara-dashboard': {
'type': 'url',
'location': ('$tarballs_base/sahara-dashboard/'
'sahara-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-searchlight-ui': {
'type': 'url',
'location': ('$tarballs_base/searchlight-ui/'
'searchlight-ui-${openstack_branch}.tar.gz')},
'horizon-plugin-senlin-dashboard': {
'type': 'url',
'location': ('$tarballs_base/senlin-dashboard/'
'senlin-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-solum-dashboard': {
'type': 'url',
'location': ('$tarballs_base/solum-dashboard/'
'solum-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-tacker-dashboard': {
'type': 'url',
'location': ('$tarballs_base/tacker-horizon/'
'tacker-horizon-${openstack_branch}.tar.gz')},
'horizon-plugin-trove-dashboard': {
'type': 'url',
'location': ('$tarballs_base/trove-dashboard/'
'trove-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-vitrage-dashboard': {
'type': 'url',
'location': ('$tarballs_base/vitrage-dashboard/'
'vitrage-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-watcher-dashboard': {
'type': 'url',
'location': ('$tarballs_base/watcher-dashboard/'
'watcher-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-zaqar-ui': {
'type': 'url',
'location': ('$tarballs_base/zaqar-ui/'
'zaqar-ui-${openstack_branch}.tar.gz')},
'horizon-plugin-zun-ui': {
'type': 'url',
'location': ('$tarballs_base/zun-ui/'
'zun-ui-${openstack_branch}.tar.gz')},
'ironic-base': {
'type': 'url',
'location': ('$tarballs_base/ironic/'
'ironic-${openstack_branch}.tar.gz')},
'ironic-inspector': {
'type': 'url',
'location': ('$tarballs_base/ironic-inspector/'
'ironic-inspector-${openstack_branch}.tar.gz')},
'karbor-base': {
'type': 'url',
'location': ('$tarballs_base/karbor/'
'karbor-${openstack_branch}.tar.gz')},
'keystone-base': {
'type': 'url',
'location': ('$tarballs_base/keystone/'
'keystone-${openstack_branch}.tar.gz')},
'kuryr-base': {
'type': 'url',
'location': ('$tarballs_base/kuryr/'
'kuryr-${openstack_branch}.tar.gz')},
'kuryr-libnetwork': {
'type': 'url',
'location': ('$tarballs_base/kuryr-libnetwork/'
'kuryr-libnetwork-${openstack_branch}.tar.gz')},
'magnum-base': {
'type': 'url',
'location': ('$tarballs_base/magnum/'
'magnum-${openstack_branch}.tar.gz')},
'manila-base': {
'type': 'url',
'location': ('$tarballs_base/manila/'
'manila-${openstack_branch}.tar.gz')},
'masakari-base': {
'type': 'url',
'location': ('$tarballs_base/masakari/'
'masakari-${openstack_branch}.tar.gz')},
'masakari-monitors': {
'type': 'url',
'location': ('$tarballs_base/masakari-monitors/'
'masakari-monitors-${openstack_branch}.tar.gz')},
'mistral-base': {
'type': 'url',
'location': ('$tarballs_base/mistral/'
'mistral-${openstack_branch}.tar.gz')},
'mistral-base-plugin-tacker': {
'type': 'url',
'location': ('$tarballs_base/tacker/'
'tacker-${openstack_branch}.tar.gz')},
'monasca-agent': {
'type': 'url',
'location': ('$tarballs_base/monasca-agent/'
'monasca-agent-${openstack_branch}.tar.gz')},
'monasca-api': {
'type': 'url',
'location': ('$tarballs_base/monasca-api/'
'monasca-api-${openstack_branch}.tar.gz')},
'monasca-log-api': {
'type': 'url',
'location': ('$tarballs_base/monasca-log-api/'
'monasca-log-api-${openstack_branch}.tar.gz')},
'monasca-notification': {
'type': 'url',
'location': ('$tarballs_base/monasca-notification/'
'monasca-notification-${openstack_branch}.tar.gz')},
'monasca-persister': {
'type': 'url',
'location': ('$tarballs_base/monasca-persister/'
'monasca-persister-${openstack_branch}.tar.gz')},
'monasca-statsd': {
'type': 'url',
'location': ('$tarballs_base/monasca-statsd/'
'monasca-statsd-${openstack_branch}.tar.gz')},
# FIXME(dszumski): Use openstack tar when infra is fixed
'monasca-thresh': {
'type': 'url',
'location': ('https://github.com/openstack/monasca-thresh/archive/'
'master.tar.gz')},
'monasca-thresh-additions-monasca-common': {
'type': 'url',
'location': ('$tarballs_base/monasca-common/'
'monasca-common-${openstack_branch}.tar.gz')},
'murano-base': {
'type': 'url',
'location': ('$tarballs_base/murano/'
'murano-${openstack_branch}.tar.gz')},
'neutron-base': {
'type': 'url',
'location': ('$tarballs_base/neutron/'
'neutron-${openstack_branch}.tar.gz')},
'neutron-base-plugin-neutron-fwaas': {
'type': 'url',
'location': ('$tarballs_base/neutron-fwaas/'
'neutron-fwaas-${openstack_branch}.tar.gz')},
'neutron-base-plugin-networking-ansible': {
'type': 'url',
'location': ('$tarballs_base/networking-ansible/'
'networking-ansible-${openstack_branch}.tar.gz')},
'neutron-base-plugin-networking-baremetal': {
'type': 'url',
'location': ('$tarballs_base/networking-baremetal/'
'networking-baremetal-${openstack_branch}.tar.gz')},
'neutron-base-plugin-networking-generic-switch': {
'type': 'url',
'location': ('$tarballs_base/networking-generic-switch/'
'networking-generic-switch-${openstack_branch}.tar.gz')},
'neutron-base-plugin-networking-mlnx': {
'type': 'url',
'location': ('$tarballs_base/networking-mlnx/'
'networking-mlnx-${openstack_branch}.tar.gz')},
'neutron-base-plugin-networking-sfc': {
'type': 'url',
'location': ('$tarballs_base/networking-sfc/'
'networking-sfc-${openstack_branch}.tar.gz')},
'neutron-base-plugin-vmware-nsx': {
'type': 'url',
'location': ('$tarballs_base/vmware-nsx/'
'vmware-nsx-${openstack_branch}.tar.gz')},
'neutron-base-plugin-vpnaas-agent': {
'type': 'url',
'location': ('$tarballs_base/neutron-vpnaas/'
'neutron-vpnaas-${openstack_branch}.tar.gz')},
'neutron-bgp-dragent': {
'type': 'url',
'location': ('$tarballs_base/neutron-dynamic-routing/'
'neutron-dynamic-routing-${openstack_branch}.tar.gz')},
'neutron-server-opendaylight-plugin-networking-odl': {
'type': 'url',
'location': ('$tarballs_base/networking-odl/'
'networking-odl-${openstack_branch}.tar.gz')},
'neutron-server-opendaylight-plugin-networking-bgpvpn': {
'type': 'url',
'location': ('$tarballs_base/networking-bgpvpn/'
'networking-bgpvpn-${openstack_branch}.tar.gz')},
'neutron-server-opendaylight-plugin-networking-l2gw': {
'type': 'url',
'location': ('$tarballs_base/networking-l2gw/'
'networking-l2gw-${openstack_branch}.tar.gz')},
'neutron-server-opendaylight-plugin-networking-sfc': {
'type': 'url',
'location': ('$tarballs_base/networking-sfc/'
'networking-sfc-${openstack_branch}.tar.gz')},
'neutron-server-plugin-neutron-dynamic-routing': {
'type': 'url',
'location': ('$tarballs_base/neutron-dynamic-routing/'
'neutron-dynamic-routing-${openstack_branch}.tar.gz')},
'neutron-server-plugin-vmware-nsxlib': {
'type': 'url',
'location': ('$tarballs_base/vmware-nsxlib/'
'vmware-nsxlib-${openstack_branch}.tar.gz')},
'neutron-vpnaas-agent': {
'type': 'url',
'location': ('$tarballs_base/neutron-vpnaas/'
'neutron-vpnaas-${openstack_branch}.tar.gz')},
'neutron-server-ovn-plugin-networking-ovn': {
'type': 'url',
'location': ('$tarballs_base/networking-ovn/'
'networking-ovn-${openstack_branch}.tar.gz')},
'neutron-metadata-agent-ovn-plugin-networking-ovn': {
'type': 'url',
'location': ('$tarballs_base/networking-ovn/'
'networking-ovn-${openstack_branch}.tar.gz')},
'nova-base': {
'type': 'url',
'location': ('$tarballs_base/nova/'
'nova-${openstack_branch}.tar.gz')},
'nova-base-plugin-blazar': {
'type': 'url',
'location': ('$tarballs_base/blazar-nova/'
'blazar-nova-${openstack_branch}.tar.gz')},
'nova-base-plugin-mksproxy': {
'type': 'url',
'location': ('$tarballs_base/nova-mksproxy/'
'nova-mksproxy-master.tar.gz')},
'novajoin-base': {
'type': 'url',
'location': ('$tarballs_base/novajoin/'
'novajoin-master.tar.gz')},
'octavia-base': {
'type': 'url',
'location': ('$tarballs_base/octavia/'
'octavia-${openstack_branch}.tar.gz')},
'panko-base': {
'type': 'url',
'location': ('$tarballs_base/panko/'
'panko-${openstack_branch}.tar.gz')},
'placement-base': {
'type': 'url',
'location': ('$tarballs_base/placement/'
'placement-${openstack_branch}.tar.gz')},
'qinling-base': {
'type': 'url',
'location': ('$tarballs_base/qinling/'
'qinling-${openstack_branch}.tar.gz')},
'tempest-plugin-tempest-conf': {
'type': 'url',
'location': ('$tarballs_base/python-tempestconf/'
'python-tempestconf-master.tar.gz')},
'tempest-plugin-barbican': {
'type': 'url',
'location': ('$tarballs_base/barbican-tempest-plugin/'
'barbican-tempest-plugin-master.tar.gz')},
'tempest-plugin-blazar': {
'type': 'url',
'location': ('$tarballs_base/blazar-tempest-plugin/'
'blazar-tempest-plugin-master.tar.gz')},
'tempest-plugin-cinder': {
'type': 'url',
'location': ('$tarballs_base/cinder-tempest-plugin/'
'cinder-tempest-plugin-master.tar.gz')},
'tempest-plugin-congress': {
'type': 'url',
'location': ('$tarballs_base/congress-tempest-plugin/'
'congress-tempest-plugin-master.tar.gz')},
'tempest-plugin-ec2api': {
'type': 'url',
'location': ('$tarballs_base/ec2api-tempest-plugin/'
'ec2api-tempest-plugin-master.tar.gz')},
'tempest-plugin-heat': {
'type': 'url',
'location': ('$tarballs_base/heat-tempest-plugin/'
'heat-tempest-plugin-master.tar.gz')},
'tempest-plugin-ironic': {
'type': 'url',
'location': ('$tarballs_base/ironic-tempest-plugin/'
'ironic-tempest-plugin-master.tar.gz')},
'tempest-plugin-keystone': {
'type': 'url',
'location': ('$tarballs_base/keystone-tempest-plugin/'
'keystone-tempest-plugin-master.tar.gz')},
'tempest-plugin-magnum': {
'type': 'url',
'location': ('$tarballs_base/magnum-tempest-plugin/'
'magnum-tempest-plugin-master.tar.gz')},
'tempest-plugin-manila': {
'type': 'url',
'location': ('$tarballs_base/manila-tempest-plugin/'
'manila-tempest-plugin-master.tar.gz')},
'tempest-plugin-mistral': {
'type': 'url',
'location': ('$tarballs_base/mistral-tempest-plugin/'
'mistral-tempest-plugin-master.tar.gz')},
'tempest-plugin-monasca': {
'type': 'url',
'location': ('$tarballs_base/monasca-tempest-plugin/'
'monasca-tempest-plugin-master.tar.gz')},
'tempest-plugin-murano': {
'type': 'url',
'location': ('$tarballs_base/murano-tempest-plugin/'
'murano-tempest-plugin-master.tar.gz')},
'tempest-plugin-neutron': {
'type': 'url',
'location': ('$tarballs_base/neutron-tempest-plugin/'
'neutron-tempest-plugin-master.tar.gz')},
'tempest-plugin-patrole': {
'type': 'url',
'location': ('$tarballs_base/patrole/'
'patrole-master.tar.gz')},
'tempest-plugin-telemetry': {
'type': 'url',
'location': ('$tarballs_base/telemetry-tempest-plugin/'
'telemetry-tempest-plugin-master.tar.gz')},
'tempest-plugin-tripleo-common': {
'type': 'url',
'location': ('$tarballs_base/tripleo-common-tempest-plugin/'
'tripleo-common-tempest-plugin-master.'
'tar.gz')},
'tempest-plugin-trove': {
'type': 'url',
'location': ('$tarballs_base/trove-tempest-plugin/'
'trove-tempest-plugin-master.tar.gz')},
'tempest-plugin-vitrage': {
'type': 'url',
'location': ('$tarballs_base/vitrage-tempest-plugin/'
'vitrage-tempest-plugin-master.tar.gz')},
'tempest-plugin-watcher': {
'type': 'url',
'location': ('$tarballs_base/watcher-tempest-plugin/'
'watcher-tempest-plugin-master.tar.gz')},
'tempest-plugin-zaqar': {
'type': 'url',
'location': ('$tarballs_base/zaqar-tempest-plugin/'
'zaqar-tempest-plugin-master.tar.gz')},
'rally': {
'type': 'url',
'location': ('$tarballs_base/rally/'
'rally-master.tar.gz')},
'sahara-base': {
'type': 'url',
'location': ('$tarballs_base/sahara/'
'sahara-${openstack_branch}.tar.gz')},
'sahara-base-plugin-ambari': {
'type': 'url',
'location': ('$tarballs_base/sahara-plugin-ambari/'
'sahara-plugin-ambari-${openstack_branch}.tar.gz')},
'sahara-base-plugin-cdh': {
'type': 'url',
'location': ('$tarballs_base/sahara-plugin-cdh/'
'sahara-plugin-cdh-${openstack_branch}.tar.gz')},
'sahara-base-plugin-mapr': {
'type': 'url',
'location': ('$tarballs_base/sahara-plugin-mapr/'
'sahara-plugin-mapr-${openstack_branch}.tar.gz')},
'sahara-base-plugin-spark': {
'type': 'url',
'location': ('$tarballs_base/sahara-plugin-spark/'
'sahara-plugin-spark-${openstack_branch}.tar.gz')},
'sahara-base-plugin-storm': {
'type': 'url',
'location': ('$tarballs_base/sahara-plugin-storm/'
'sahara-plugin-storm-${openstack_branch}.tar.gz')},
'sahara-base-plugin-vanilla': {
'type': 'url',
'location': ('$tarballs_base/sahara-plugin-vanilla/'
'sahara-plugin-vanilla-${openstack_branch}.tar.gz')},
'searchlight-base': {
'type': 'url',
'location': ('$tarballs_base/searchlight/'
'searchlight-${openstack_branch}.tar.gz')},
'senlin-base': {
'type': 'url',
'location': ('$tarballs_base/senlin/'
'senlin-${openstack_branch}.tar.gz')},
'solum-base': {
'type': 'url',
'location': ('$tarballs_base/solum/'
'solum-${openstack_branch}.tar.gz')},
'swift-base': {
'type': 'url',
'location': ('$tarballs_base/swift/'
'swift-${openstack_branch}.tar.gz')},
'tacker-base': {
'type': 'url',
'location': ('$tarballs_base/tacker/'
'tacker-${openstack_branch}.tar.gz')},
'tacker-base-plugin-networking-sfc': {
'type': 'url',
'location': ('$tarballs_base/networking-sfc/'
'networking-sfc-${openstack_branch}.tar.gz')},
'tempest': {
'type': 'url',
'location': ('$tarballs_base/tempest/'
'tempest-master.tar.gz')},
'tripleoclient': {
'type': 'url',
'location': ('$tarballs_base/python-tripleoclient/'
'tripleoclient-12.3.0.tar.gz')},
'trove-base': {
'type': 'url',
'location': ('$tarballs_base/trove/'
'trove-${openstack_branch}.tar.gz')},
'vitrage-base': {
'type': 'url',
'location': ('$tarballs_base/vitrage/'
'vitrage-${openstack_branch}.tar.gz')},
'vmtp': {
'type': 'url',
'location': ('$tarballs_base/vmtp/'
'vmtp-master.tar.gz')},
'watcher-base': {
'type': 'url',
'location': ('$tarballs_base/watcher/'
'watcher-${openstack_branch}.tar.gz')},
'zaqar-base': {
'type': 'url',
'location': ('$tarballs_base/zaqar/'
'zaqar-${openstack_branch}.tar.gz')},
'zun-base': {
'type': 'url',
'location': ('$tarballs_base/zun/'
'zun-${openstack_branch}.tar.gz')}
}
# NOTE(SamYaple): Only increment the UID. Never reuse old or removed UIDs.
# Starting point 42400+ was chosen arbitrarily to ensure no conflicts
USERS = {
'kolla-user': {
'uid': 42400,
'gid': 42400,
},
'ansible-user': {
'uid': 42401,
'gid': 42401,
},
'aodh-user': {
'uid': 42402,
'gid': 42402,
},
'barbican-user': {
'uid': 42403,
'gid': 42403,
},
'bifrost-user': {
'uid': 42404,
'gid': 42404,
},
'ceilometer-user': {
'uid': 42405,
'gid': 42405,
},
'chrony-user': {
'uid': 42406,
'gid': 42406,
},
'cinder-user': {
'uid': 42407,
'gid': 42407,
},
'cloudkitty-user': {
'uid': 42408,
'gid': 42408,
},
'collectd-user': {
'uid': 42409,
'gid': 42409,
},
'congress-user': {
'uid': 42410,
'gid': 42410,
},
'designate-user': {
'uid': 42411,
'gid': 42411,
},
'elasticsearch-user': {
'uid': 42412,
'gid': 42412,
},
'etcd-user': {
'uid': 42413,
'gid': 42413,
},
'freezer-user': {
'uid': 42414,
'gid': 42414,
},
'glance-user': {
'uid': 42415,
'gid': 42415,
},
'gnocchi-user': {
'uid': 42416,
'gid': 42416,
},
'grafana-user': {
'uid': 42417,
'gid': 42417,
},
'heat-user': {
'uid': 42418,
'gid': 42418,
},
'horizon-user': {
'uid': 42420,
'gid': 42420,
},
'influxdb-user': {
'uid': 42421,
'gid': 42421,
},
'ironic-user': {
'uid': 42422,
'gid': 42422,
},
'kafka-user': {
'uid': 42423,
'gid': 42423,
},
'keystone-user': {
'uid': 42425,
'gid': 42425,
},
'kibana-user': {
'uid': 42426,
'gid': 42426,
},
'qemu-user': {
'uid': 42427,
'gid': 42427,
},
'magnum-user': {
'uid': 42428,
'gid': 42428,
},
'manila-user': {
'uid': 42429,
'gid': 42429,
},
'mistral-user': {
'uid': 42430,
'gid': 42430,
},
'monasca-user': {
'uid': 42431,
'gid': 42431,
},
'mongodb-user': {
'uid': 42432,
'gid': 65534,
},
'murano-user': {
'uid': 42433,
'gid': 42433,
},
'mysql-user': {
'uid': 42434,
'gid': 42434,
},
'neutron-user': {
'uid': 42435,
'gid': 42435,
},
'nova-user': {
'uid': 42436,
'gid': 42436,
},
'octavia-user': {
'uid': 42437,
'gid': 42437,
},
'panko-user': {
'uid': 42438,
'gid': 42438,
},
'rabbitmq-user': {
'uid': 42439,
'gid': 42439,
},
'rally-user': {
'uid': 42440,
'gid': 42440,
},
'sahara-user': {
'uid': 42441,
'gid': 42441,
},
'searchlight-user': {
'uid': 42442,
'gid': 42442,
},
'senlin-user': {
'uid': 42443,
'gid': 42443,
},
'solum-user': {
'uid': 42444,
'gid': 42444,
},
'swift-user': {
'uid': 42445,
'gid': 42445,
},
'tacker-user': {
'uid': 42446,
'gid': 42446,
},
'td-agent-user': {
'uid': 42447,
'gid': 42447,
},
'telegraf-user': {
'uid': 42448,
'gid': 42448,
},
'trove-user': {
'uid': 42449,
'gid': 42449,
},
'vmtp-user': {
'uid': 42450,
'gid': 42450,
},
'watcher-user': {
'uid': 42451,
'gid': 42451,
},
'zaqar-user': {
'uid': 42452,
'gid': 42452,
},
'zookeeper-user': {
'uid': 42453,
'gid': 42453,
},
'haproxy-user': {
'uid': 42454,
'gid': 42454,
},
'ceph-user': {
'uid': 64045,
'gid': 64045,
},
'memcached-user': {
'uid': 42457,
'gid': 42457,
},
'karbor-user': {
'uid': 42458,
'gid': 42458,
},
'vitrage-user': {
'uid': 42459,
'gid': 42459,
},
'redis-user': {
'uid': 42460,
'gid': 42460,
},
'ironic-inspector-user': {
'uid': 42461,
'gid': 42461,
},
'odl-user': {
'uid': 42462,
'gid': 42462,
},
'zun-user': {
'uid': 42463,
'gid': 42463,
},
'dragonflow-user': { # unused user (dragonflow dropped)
'uid': 42464,
'gid': 42464,
},
'qdrouterd-user': {
'uid': 42465,
'gid': 42465,
},
'ec2api-user': {
'uid': 42466,
'gid': 42466,
},
'sensu-user': {
'uid': 42467,
'gid': 42467,
},
'skydive-user': {
'uid': 42468,
'gid': 42468,
},
'kuryr-user': {
'uid': 42469,
'gid': 42469,
},
'novajoin-user': {
'uid': 42470,
'gid': 42470,
},
'blazar-user': {
'uid': 42471,
'gid': 42471,
},
'prometheus-user': {
'uid': 42472,
'gid': 42472,
},
'libvirt-user': {
'uid': 42473, # unused user, but we need the group for socket access
'gid': 42473,
},
'fluentd-user': {
'uid': 42474,
'gid': 42474,
},
'almanach-user': { # unused user (almanach dropped)
'uid': 42475,
'gid': 42475,
},
'openvswitch-user': {
'uid': 42476, # unused user
'gid': 42476,
},
'hugetlbfs-user': {
'uid': 42477, # unused user, but we need the group for vhost socket
'gid': 42477,
},
'logstash-user': {
'uid': 42478,
'gid': 42478,
},
'storm-user': {
'uid': 42479,
'gid': 42479,
},
'tempest-user': {
'uid': 42480,
'gid': 42480,
},
'nfast-user': {
'uid': 42481, # unused user, but we need the group for thales hsm
'gid': 42481,
},
'placement-user': {
'uid': 42482,
'gid': 42482,
},
'cyborg-user': {
'uid': 42483,
'gid': 42483,
},
'qinling-user': {
'uid': 42484,
'gid': 42484,
},
'masakari-user': {
'uid': 42485,
'gid': 42485,
}
}
def get_source_opts(type_=None, location=None, reference=None):
return [cfg.StrOpt('type', choices=['local', 'git', 'url'],
default=type_,
help='Source location type'),
cfg.StrOpt('location', default=location,
help='The location for source install'),
cfg.StrOpt('reference', default=reference,
help=('Git reference to pull, commit sha, tag '
'or branch name'))]
def get_user_opts(uid, gid):
return [
cfg.IntOpt('uid', default=uid, help='The user id'),
cfg.IntOpt('gid', default=gid, help='The group id'),
]
def gen_all_user_opts():
for name, params in USERS.items():
uid = params['uid']
gid = params['gid']
yield name, get_user_opts(uid, gid)
def gen_all_source_opts():
for name, params in SOURCES.items():
type_ = params['type']
location = params['location']
reference = params.get('reference')
yield name, get_source_opts(type_, location, reference)
def list_opts():
return itertools.chain([(None, _CLI_OPTS),
(None, _BASE_OPTS),
('profiles', _PROFILE_OPTS)],
gen_all_source_opts(),
gen_all_user_opts(),
)
def parse(conf, args, usage=None, prog=None,
default_config_files=None):
conf.register_cli_opts(_CLI_OPTS)
conf.register_opts(_BASE_OPTS)
conf.register_opts(_PROFILE_OPTS, group='profiles')
for name, opts in gen_all_source_opts():
conf.register_opts(opts, name)
for name, opts in gen_all_user_opts():
conf.register_opts(opts, name)
conf(args=args,
project='kolla',
usage=usage,
prog=prog,
version=version.cached_version_string(),
default_config_files=default_config_files)
# NOTE(jeffrey4l): set the default base tag based on the
# base option
conf.set_default('base_tag', DEFAULT_BASE_TAGS.get(conf.base))
conf.set_default('openstack_release', OPENSTACK_RELEASE.get(conf.base))
prefix = '' if conf.openstack_release == 'master' else 'stable-'
openstack_branch = '{}{}'.format(prefix, conf.openstack_release)
conf.set_default('openstack_branch', openstack_branch)
if not conf.base_image:
conf.base_image = conf.base
| 35.374106 | 78 | 0.505793 |
import itertools
import os
from oslo_config import cfg
from oslo_config import types
from kolla.version import version_info as version
BASE_OS_DISTRO = ['centos', 'rhel', 'ubuntu', 'debian']
BASE_ARCH = ['x86_64', 'ppc64le', 'aarch64']
DEFAULT_BASE_TAGS = {
'centos': '7',
'rhel': '7',
'debian': '10',
'ubuntu': '18.04',
}
DISTRO_RELEASE = {
'centos': '7',
'rhel': '7',
'debian': '10',
'ubuntu': '18.04',
}
OPENSTACK_RELEASE = {
'centos': 'train',
'rhel': 'train',
'debian': 'master',
'ubuntu': 'master',
}
DELOREAN = \
"https://trunk.rdoproject.org/centos7/current-passed-ci/delorean.repo"
DELOREAN_DEPS = "https://trunk.rdoproject.org/centos7/delorean-deps.repo"
INSTALL_TYPE_CHOICES = ['binary', 'source', 'rdo', 'rhos']
TARBALLS_BASE = "https://tarballs.openstack.org"
_PROFILE_OPTS = [
cfg.ListOpt('infra',
default=[
'ceph',
'certmonger',
'cron',
'elasticsearch',
'etcd',
'fluentd',
'haproxy',
'hacluster',
'keepalived',
'kibana',
'kolla-toolbox',
'logstash',
'mariadb',
'memcached',
'mongodb',
'opendaylight',
'openvswitch',
'ptp',
'qdrouterd',
'rabbitmq',
'redis',
'rsyslog',
'skydive',
'storm',
'tgtd',
],
help='Infra images'),
cfg.ListOpt('main',
default=[
'ceilometer',
'cinder',
'glance',
'heat',
'horizon',
'iscsi',
'keystone',
'neutron',
'nova-',
'placement',
'swift',
],
help='Main images'),
cfg.ListOpt('aux',
default=[
'aodh',
'blazar',
'cloudkitty',
'congress',
'designate',
'ec2-api',
'freezer',
'gnocchi',
'influxdb',
'ironic',
'kafka',
'karbor',
'kuryr',
'magnum',
'manila',
'masakari',
'mistral',
'monasca',
'murano',
'novajoin',
'octavia',
'panko',
'qinling',
'rally',
'redis',
'sahara',
'searchlight',
'senlin',
'solum',
'tacker',
'telegraf',
'trove',
'vitrage',
'zaqar',
'zookeeper',
'zun',
],
help='Aux Images'),
cfg.ListOpt('default',
default=[
'chrony',
'cron',
'kolla-toolbox',
'fluentd',
'glance',
'haproxy',
'heat',
'horizon',
'keepalived',
'keystone',
'mariadb',
'memcached',
'neutron',
'nova-',
'placement',
'openvswitch',
'rabbitmq',
],
help='Default images'),
]
hostarch = os.uname()[4]
_CLI_OPTS = [
cfg.StrOpt('base', short='b', default='centos',
choices=BASE_OS_DISTRO,
help='The distro type of the base image.'),
cfg.StrOpt('base-tag', default='latest',
help='The base distro image tag'),
cfg.StrOpt('base-image',
help='The base image name. Default is the same with base.'),
cfg.StrOpt('base-arch', default=hostarch,
choices=BASE_ARCH,
help='The base architecture. Default is same as host.'),
cfg.BoolOpt('use-dumb-init', default=True,
help='Use dumb-init as init system in containers'),
cfg.BoolOpt('debug', short='d', default=False,
help='Turn on debugging log level'),
cfg.BoolOpt('skip-parents', default=False,
help='Do not rebuild parents of matched images'),
cfg.BoolOpt('skip-existing', default=False,
help='Do not rebuild images present in the docker cache'),
cfg.DictOpt('build-args',
help='Set docker build time variables'),
cfg.BoolOpt('keep', default=False,
help='Keep failed intermediate containers'),
cfg.BoolOpt('list-dependencies', short='l',
help='Show image dependencies (filtering supported)'),
cfg.BoolOpt('list-images',
help='Show all available images (filtering supported)'),
cfg.StrOpt('namespace', short='n', default='kolla',
help='The Docker namespace name'),
cfg.StrOpt('network_mode', default=None,
help='The network mode for Docker build. Example: host'),
cfg.BoolOpt('cache', default=True,
help='Use the Docker cache when building'),
cfg.MultiOpt('profile', types.String(), short='p',
help=('Build a pre-defined set of images, see [profiles]'
' section in config. The default profiles are:'
' {}'.format(', '.join(
[opt.name for opt in _PROFILE_OPTS])
))),
cfg.BoolOpt('push', default=False,
help='Push images after building'),
cfg.IntOpt('push-threads', default=1, min=1,
help=('The number of threads to user while pushing'
' Images. Note: Docker can not handle threading'
' push properly')),
cfg.IntOpt('retries', short='r', default=3, min=0,
help='The number of times to retry while building'),
cfg.MultiOpt('regex', types.String(), positional=True,
help=('Build only images matching regex and its'
' dependencies')),
cfg.StrOpt('registry',
help=('The docker registry host. The default registry host'
' is Docker Hub')),
cfg.StrOpt('save-dependency',
help=('Path to the file to store the docker image'
' dependency in Graphviz dot format')),
cfg.StrOpt('format', short='f', default='json',
choices=['json', 'none'],
help='Format to write the final results in'),
cfg.StrOpt('tarballs-base', default=TARBALLS_BASE,
help='Base url to OpenStack tarballs'),
cfg.StrOpt('type', short='t', default='binary',
choices=INSTALL_TYPE_CHOICES,
dest='install_type',
help=('The method of the OpenStack install.')),
cfg.IntOpt('threads', short='T', default=8, min=1,
help=('The number of threads to use while building.'
' (Note: setting to one will allow real time'
' logging)')),
cfg.StrOpt('tag', default=version.cached_version_string(),
help='The Docker tag'),
cfg.BoolOpt('template-only', default=False,
help="Don't build images. Generate Dockerfile only"),
cfg.IntOpt('timeout', default=120,
help='Time in seconds after which any operation times out'),
cfg.MultiOpt('template-override', types.String(),
help='Path to template override file'),
cfg.MultiOpt('docker-dir', types.String(),
help=('Path to additional docker file template directory,'
' can be specified multiple times'),
short='D', default=[]),
cfg.StrOpt('logs-dir', help='Path to logs directory'),
cfg.BoolOpt('pull', default=True,
help='Attempt to pull a newer version of the base image'),
cfg.StrOpt('work-dir', help=('Path to be used as working directory.'
' By default, a temporary dir is created')),
cfg.BoolOpt('squash', default=False,
help=('Squash the image layers. WARNING: it will consume lots'
' of disk IO. "docker-squash" tool is required, install'
' it by "pip install docker-squash"')),
cfg.StrOpt('openstack-release', default='master',
help='OpenStack release for building kolla-toolbox'),
cfg.StrOpt('openstack-branch', default='master',
help='Branch for source images'),
cfg.BoolOpt('docker-healthchecks', default=True,
help='Add Kolla docker healthcheck scripts in the image')
]
_BASE_OPTS = [
cfg.StrOpt('maintainer',
default='Kolla Project (https://launchpad.net/kolla)',
help='Content of the maintainer label'),
cfg.StrOpt('distro_package_manager', default=None,
help=('Use this parameter to override the default package '
'manager used by kolla. For example, if you want to use '
'yum on a system with dnf, set this to yum which will '
'use yum command in the build process')),
cfg.StrOpt('base_package_type', default=None,
help=('Set the package type of the distro. If not set then '
'the packaging type is set to "rpm" if a RHEL based '
'distro and "deb" if a Debian based distro.')),
cfg.ListOpt('rpm_setup_config', default=[DELOREAN, DELOREAN_DEPS],
help=('Comma separated list of .rpm or .repo file(s) '
'or URL(s) to install before building containers')),
cfg.StrOpt('apt_sources_list', help=('Path to custom sources.list')),
cfg.StrOpt('apt_preferences', help=('Path to custom apt/preferences')),
cfg.BoolOpt('squash-cleanup', default=True,
help='Remove source image from Docker after squashing'),
cfg.StrOpt('squash-tmp-dir',
help='Temporary directory to be used during squashing'),
cfg.BoolOpt('clean_package_cache', default=True,
help='Clean all package cache.')
]
SOURCES = {
'openstack-base': {
'type': 'url',
'location': ('$tarballs_base/requirements/'
'requirements-${openstack_branch}.tar.gz')},
'aodh-base': {
'type': 'url',
'location': ('$tarballs_base/aodh/'
'aodh-${openstack_branch}.tar.gz')},
'barbican-base': {
'type': 'url',
'location': ('$tarballs_base/barbican/'
'barbican-${openstack_branch}.tar.gz')},
'bifrost-base': {
'type': 'url',
'location': ('$tarballs_base/bifrost/'
'bifrost-${openstack_branch}.tar.gz')},
'blazar-base': {
'type': 'url',
'location': ('$tarballs_base/blazar/'
'blazar-${openstack_branch}.tar.gz')},
'ceilometer-base': {
'type': 'url',
'location': ('$tarballs_base/ceilometer/'
'ceilometer-${openstack_branch}.tar.gz')},
'ceilometer-base-plugin-panko': {
'type': 'url',
'location': ('$tarballs_base/panko/'
'panko-${openstack_branch}.tar.gz')},
'cinder-base': {
'type': 'url',
'location': ('$tarballs_base/cinder/'
'cinder-${openstack_branch}.tar.gz')},
'congress-base': {
'type': 'url',
'location': ('$tarballs_base/congress/'
'congress-${openstack_branch}.tar.gz')},
'cloudkitty-base': {
'type': 'url',
'location': ('$tarballs_base/cloudkitty/'
'cloudkitty-${openstack_branch}.tar.gz')},
'cyborg-base': {
'type': 'url',
'location': ('$tarballs_base/cyborg/'
'cyborg-${openstack_branch}.tar.gz')},
'designate-base': {
'type': 'url',
'location': ('$tarballs_base/designate/'
'designate-${openstack_branch}.tar.gz')},
'ec2-api': {
'type': 'url',
'location': ('$tarballs_base/ec2-api/'
'ec2-api-${openstack_branch}.tar.gz')},
'freezer-api': {
'type': 'url',
'location': ('$tarballs_base/freezer-api/'
'freezer-api-${openstack_branch}.tar.gz')},
'freezer-base': {
'type': 'url',
'location': ('$tarballs_base/freezer/'
'freezer-${openstack_branch}.tar.gz')},
'glance-base': {
'type': 'url',
'location': ('$tarballs_base/glance/'
'glance-${openstack_branch}.tar.gz')},
'gnocchi-base': {
'type': 'git',
'reference': 'master',
'location': ('https://github.com/gnocchixyz/'
'gnocchi.git')},
'heat-base': {
'type': 'url',
'location': ('$tarballs_base/heat/'
'heat-${openstack_branch}.tar.gz')},
'horizon': {
'type': 'url',
'location': ('$tarballs_base/horizon/'
'horizon-${openstack_branch}.tar.gz')},
'horizon-plugin-blazar-dashboard': {
'type': 'url',
'location': ('$tarballs_base/blazar-dashboard/'
'blazar-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-congress-dashboard': {
'type': 'url',
'location': ('$tarballs_base/congress-dashboard/'
'congress-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-cloudkitty-dashboard': {
'type': 'url',
'location': ('$tarballs_base/cloudkitty-dashboard/'
'cloudkitty-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-designate-dashboard': {
'type': 'url',
'location': ('$tarballs_base/designate-dashboard/'
'designate-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-fwaas-dashboard': {
'type': 'url',
'location': ('$tarballs_base/neutron-fwaas-dashboard/'
'neutron-fwaas-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-freezer-web-ui': {
'type': 'url',
'location': ('$tarballs_base/freezer-web-ui/'
'freezer-web-ui-${openstack_branch}.tar.gz')},
'horizon-plugin-heat-dashboard': {
'type': 'url',
'location': ('$tarballs_base/heat-dashboard/'
'heat-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-ironic-ui': {
'type': 'url',
'location': ('$tarballs_base/ironic-ui/'
'ironic-ui-${openstack_branch}.tar.gz')},
'horizon-plugin-karbor-dashboard': {
'type': 'url',
'location': ('$tarballs_base/karbor-dashboard/'
'karbor-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-magnum-ui': {
'type': 'url',
'location': ('$tarballs_base/magnum-ui/'
'magnum-ui-${openstack_branch}.tar.gz')},
'horizon-plugin-manila-ui': {
'type': 'url',
'location': ('$tarballs_base/manila-ui/'
'manila-ui-${openstack_branch}.tar.gz')},
'horizon-plugin-masakari-dashboard': {
'type': 'url',
'location': ('$tarballs_base/masakari-dashboard/'
'masakari-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-mistral-dashboard': {
'type': 'url',
'location': ('$tarballs_base/mistral-dashboard/'
'mistral-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-monasca-ui': {
'type': 'url',
'location': ('$tarballs_base/monasca-ui/'
'monasca-ui-${openstack_branch}.tar.gz')},
'horizon-plugin-murano-dashboard': {
'type': 'url',
'location': ('$tarballs_base/murano-dashboard/'
'murano-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-neutron-vpnaas-dashboard': {
'type': 'url',
'location': ('$tarballs_base/neutron-vpnaas-dashboard/'
'neutron-vpnaas-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-octavia-dashboard': {
'type': 'url',
'location': ('$tarballs_base/octavia-dashboard/'
'octavia-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-qinling-dashboard': {
'type': 'url',
'location': ('$tarballs_base/qinling-dashboard/'
'qinling-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-sahara-dashboard': {
'type': 'url',
'location': ('$tarballs_base/sahara-dashboard/'
'sahara-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-searchlight-ui': {
'type': 'url',
'location': ('$tarballs_base/searchlight-ui/'
'searchlight-ui-${openstack_branch}.tar.gz')},
'horizon-plugin-senlin-dashboard': {
'type': 'url',
'location': ('$tarballs_base/senlin-dashboard/'
'senlin-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-solum-dashboard': {
'type': 'url',
'location': ('$tarballs_base/solum-dashboard/'
'solum-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-tacker-dashboard': {
'type': 'url',
'location': ('$tarballs_base/tacker-horizon/'
'tacker-horizon-${openstack_branch}.tar.gz')},
'horizon-plugin-trove-dashboard': {
'type': 'url',
'location': ('$tarballs_base/trove-dashboard/'
'trove-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-vitrage-dashboard': {
'type': 'url',
'location': ('$tarballs_base/vitrage-dashboard/'
'vitrage-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-watcher-dashboard': {
'type': 'url',
'location': ('$tarballs_base/watcher-dashboard/'
'watcher-dashboard-${openstack_branch}.tar.gz')},
'horizon-plugin-zaqar-ui': {
'type': 'url',
'location': ('$tarballs_base/zaqar-ui/'
'zaqar-ui-${openstack_branch}.tar.gz')},
'horizon-plugin-zun-ui': {
'type': 'url',
'location': ('$tarballs_base/zun-ui/'
'zun-ui-${openstack_branch}.tar.gz')},
'ironic-base': {
'type': 'url',
'location': ('$tarballs_base/ironic/'
'ironic-${openstack_branch}.tar.gz')},
'ironic-inspector': {
'type': 'url',
'location': ('$tarballs_base/ironic-inspector/'
'ironic-inspector-${openstack_branch}.tar.gz')},
'karbor-base': {
'type': 'url',
'location': ('$tarballs_base/karbor/'
'karbor-${openstack_branch}.tar.gz')},
'keystone-base': {
'type': 'url',
'location': ('$tarballs_base/keystone/'
'keystone-${openstack_branch}.tar.gz')},
'kuryr-base': {
'type': 'url',
'location': ('$tarballs_base/kuryr/'
'kuryr-${openstack_branch}.tar.gz')},
'kuryr-libnetwork': {
'type': 'url',
'location': ('$tarballs_base/kuryr-libnetwork/'
'kuryr-libnetwork-${openstack_branch}.tar.gz')},
'magnum-base': {
'type': 'url',
'location': ('$tarballs_base/magnum/'
'magnum-${openstack_branch}.tar.gz')},
'manila-base': {
'type': 'url',
'location': ('$tarballs_base/manila/'
'manila-${openstack_branch}.tar.gz')},
'masakari-base': {
'type': 'url',
'location': ('$tarballs_base/masakari/'
'masakari-${openstack_branch}.tar.gz')},
'masakari-monitors': {
'type': 'url',
'location': ('$tarballs_base/masakari-monitors/'
'masakari-monitors-${openstack_branch}.tar.gz')},
'mistral-base': {
'type': 'url',
'location': ('$tarballs_base/mistral/'
'mistral-${openstack_branch}.tar.gz')},
'mistral-base-plugin-tacker': {
'type': 'url',
'location': ('$tarballs_base/tacker/'
'tacker-${openstack_branch}.tar.gz')},
'monasca-agent': {
'type': 'url',
'location': ('$tarballs_base/monasca-agent/'
'monasca-agent-${openstack_branch}.tar.gz')},
'monasca-api': {
'type': 'url',
'location': ('$tarballs_base/monasca-api/'
'monasca-api-${openstack_branch}.tar.gz')},
'monasca-log-api': {
'type': 'url',
'location': ('$tarballs_base/monasca-log-api/'
'monasca-log-api-${openstack_branch}.tar.gz')},
'monasca-notification': {
'type': 'url',
'location': ('$tarballs_base/monasca-notification/'
'monasca-notification-${openstack_branch}.tar.gz')},
'monasca-persister': {
'type': 'url',
'location': ('$tarballs_base/monasca-persister/'
'monasca-persister-${openstack_branch}.tar.gz')},
'monasca-statsd': {
'type': 'url',
'location': ('$tarballs_base/monasca-statsd/'
'monasca-statsd-${openstack_branch}.tar.gz')},
# FIXME(dszumski): Use openstack tar when infra is fixed
'monasca-thresh': {
'type': 'url',
'location': ('https://github.com/openstack/monasca-thresh/archive/'
'master.tar.gz')},
'monasca-thresh-additions-monasca-common': {
'type': 'url',
'location': ('$tarballs_base/monasca-common/'
'monasca-common-${openstack_branch}.tar.gz')},
'murano-base': {
'type': 'url',
'location': ('$tarballs_base/murano/'
'murano-${openstack_branch}.tar.gz')},
'neutron-base': {
'type': 'url',
'location': ('$tarballs_base/neutron/'
'neutron-${openstack_branch}.tar.gz')},
'neutron-base-plugin-neutron-fwaas': {
'type': 'url',
'location': ('$tarballs_base/neutron-fwaas/'
'neutron-fwaas-${openstack_branch}.tar.gz')},
'neutron-base-plugin-networking-ansible': {
'type': 'url',
'location': ('$tarballs_base/networking-ansible/'
'networking-ansible-${openstack_branch}.tar.gz')},
'neutron-base-plugin-networking-baremetal': {
'type': 'url',
'location': ('$tarballs_base/networking-baremetal/'
'networking-baremetal-${openstack_branch}.tar.gz')},
'neutron-base-plugin-networking-generic-switch': {
'type': 'url',
'location': ('$tarballs_base/networking-generic-switch/'
'networking-generic-switch-${openstack_branch}.tar.gz')},
'neutron-base-plugin-networking-mlnx': {
'type': 'url',
'location': ('$tarballs_base/networking-mlnx/'
'networking-mlnx-${openstack_branch}.tar.gz')},
'neutron-base-plugin-networking-sfc': {
'type': 'url',
'location': ('$tarballs_base/networking-sfc/'
'networking-sfc-${openstack_branch}.tar.gz')},
'neutron-base-plugin-vmware-nsx': {
'type': 'url',
'location': ('$tarballs_base/vmware-nsx/'
'vmware-nsx-${openstack_branch}.tar.gz')},
'neutron-base-plugin-vpnaas-agent': {
'type': 'url',
'location': ('$tarballs_base/neutron-vpnaas/'
'neutron-vpnaas-${openstack_branch}.tar.gz')},
'neutron-bgp-dragent': {
'type': 'url',
'location': ('$tarballs_base/neutron-dynamic-routing/'
'neutron-dynamic-routing-${openstack_branch}.tar.gz')},
'neutron-server-opendaylight-plugin-networking-odl': {
'type': 'url',
'location': ('$tarballs_base/networking-odl/'
'networking-odl-${openstack_branch}.tar.gz')},
'neutron-server-opendaylight-plugin-networking-bgpvpn': {
'type': 'url',
'location': ('$tarballs_base/networking-bgpvpn/'
'networking-bgpvpn-${openstack_branch}.tar.gz')},
'neutron-server-opendaylight-plugin-networking-l2gw': {
'type': 'url',
'location': ('$tarballs_base/networking-l2gw/'
'networking-l2gw-${openstack_branch}.tar.gz')},
'neutron-server-opendaylight-plugin-networking-sfc': {
'type': 'url',
'location': ('$tarballs_base/networking-sfc/'
'networking-sfc-${openstack_branch}.tar.gz')},
'neutron-server-plugin-neutron-dynamic-routing': {
'type': 'url',
'location': ('$tarballs_base/neutron-dynamic-routing/'
'neutron-dynamic-routing-${openstack_branch}.tar.gz')},
'neutron-server-plugin-vmware-nsxlib': {
'type': 'url',
'location': ('$tarballs_base/vmware-nsxlib/'
'vmware-nsxlib-${openstack_branch}.tar.gz')},
'neutron-vpnaas-agent': {
'type': 'url',
'location': ('$tarballs_base/neutron-vpnaas/'
'neutron-vpnaas-${openstack_branch}.tar.gz')},
'neutron-server-ovn-plugin-networking-ovn': {
'type': 'url',
'location': ('$tarballs_base/networking-ovn/'
'networking-ovn-${openstack_branch}.tar.gz')},
'neutron-metadata-agent-ovn-plugin-networking-ovn': {
'type': 'url',
'location': ('$tarballs_base/networking-ovn/'
'networking-ovn-${openstack_branch}.tar.gz')},
'nova-base': {
'type': 'url',
'location': ('$tarballs_base/nova/'
'nova-${openstack_branch}.tar.gz')},
'nova-base-plugin-blazar': {
'type': 'url',
'location': ('$tarballs_base/blazar-nova/'
'blazar-nova-${openstack_branch}.tar.gz')},
'nova-base-plugin-mksproxy': {
'type': 'url',
'location': ('$tarballs_base/nova-mksproxy/'
'nova-mksproxy-master.tar.gz')},
'novajoin-base': {
'type': 'url',
'location': ('$tarballs_base/novajoin/'
'novajoin-master.tar.gz')},
'octavia-base': {
'type': 'url',
'location': ('$tarballs_base/octavia/'
'octavia-${openstack_branch}.tar.gz')},
'panko-base': {
'type': 'url',
'location': ('$tarballs_base/panko/'
'panko-${openstack_branch}.tar.gz')},
'placement-base': {
'type': 'url',
'location': ('$tarballs_base/placement/'
'placement-${openstack_branch}.tar.gz')},
'qinling-base': {
'type': 'url',
'location': ('$tarballs_base/qinling/'
'qinling-${openstack_branch}.tar.gz')},
'tempest-plugin-tempest-conf': {
'type': 'url',
'location': ('$tarballs_base/python-tempestconf/'
'python-tempestconf-master.tar.gz')},
'tempest-plugin-barbican': {
'type': 'url',
'location': ('$tarballs_base/barbican-tempest-plugin/'
'barbican-tempest-plugin-master.tar.gz')},
'tempest-plugin-blazar': {
'type': 'url',
'location': ('$tarballs_base/blazar-tempest-plugin/'
'blazar-tempest-plugin-master.tar.gz')},
'tempest-plugin-cinder': {
'type': 'url',
'location': ('$tarballs_base/cinder-tempest-plugin/'
'cinder-tempest-plugin-master.tar.gz')},
'tempest-plugin-congress': {
'type': 'url',
'location': ('$tarballs_base/congress-tempest-plugin/'
'congress-tempest-plugin-master.tar.gz')},
'tempest-plugin-ec2api': {
'type': 'url',
'location': ('$tarballs_base/ec2api-tempest-plugin/'
'ec2api-tempest-plugin-master.tar.gz')},
'tempest-plugin-heat': {
'type': 'url',
'location': ('$tarballs_base/heat-tempest-plugin/'
'heat-tempest-plugin-master.tar.gz')},
'tempest-plugin-ironic': {
'type': 'url',
'location': ('$tarballs_base/ironic-tempest-plugin/'
'ironic-tempest-plugin-master.tar.gz')},
'tempest-plugin-keystone': {
'type': 'url',
'location': ('$tarballs_base/keystone-tempest-plugin/'
'keystone-tempest-plugin-master.tar.gz')},
'tempest-plugin-magnum': {
'type': 'url',
'location': ('$tarballs_base/magnum-tempest-plugin/'
'magnum-tempest-plugin-master.tar.gz')},
'tempest-plugin-manila': {
'type': 'url',
'location': ('$tarballs_base/manila-tempest-plugin/'
'manila-tempest-plugin-master.tar.gz')},
'tempest-plugin-mistral': {
'type': 'url',
'location': ('$tarballs_base/mistral-tempest-plugin/'
'mistral-tempest-plugin-master.tar.gz')},
'tempest-plugin-monasca': {
'type': 'url',
'location': ('$tarballs_base/monasca-tempest-plugin/'
'monasca-tempest-plugin-master.tar.gz')},
'tempest-plugin-murano': {
'type': 'url',
'location': ('$tarballs_base/murano-tempest-plugin/'
'murano-tempest-plugin-master.tar.gz')},
'tempest-plugin-neutron': {
'type': 'url',
'location': ('$tarballs_base/neutron-tempest-plugin/'
'neutron-tempest-plugin-master.tar.gz')},
'tempest-plugin-patrole': {
'type': 'url',
'location': ('$tarballs_base/patrole/'
'patrole-master.tar.gz')},
'tempest-plugin-telemetry': {
'type': 'url',
'location': ('$tarballs_base/telemetry-tempest-plugin/'
'telemetry-tempest-plugin-master.tar.gz')},
'tempest-plugin-tripleo-common': {
'type': 'url',
'location': ('$tarballs_base/tripleo-common-tempest-plugin/'
'tripleo-common-tempest-plugin-master.'
'tar.gz')},
'tempest-plugin-trove': {
'type': 'url',
'location': ('$tarballs_base/trove-tempest-plugin/'
'trove-tempest-plugin-master.tar.gz')},
'tempest-plugin-vitrage': {
'type': 'url',
'location': ('$tarballs_base/vitrage-tempest-plugin/'
'vitrage-tempest-plugin-master.tar.gz')},
'tempest-plugin-watcher': {
'type': 'url',
'location': ('$tarballs_base/watcher-tempest-plugin/'
'watcher-tempest-plugin-master.tar.gz')},
'tempest-plugin-zaqar': {
'type': 'url',
'location': ('$tarballs_base/zaqar-tempest-plugin/'
'zaqar-tempest-plugin-master.tar.gz')},
'rally': {
'type': 'url',
'location': ('$tarballs_base/rally/'
'rally-master.tar.gz')},
'sahara-base': {
'type': 'url',
'location': ('$tarballs_base/sahara/'
'sahara-${openstack_branch}.tar.gz')},
'sahara-base-plugin-ambari': {
'type': 'url',
'location': ('$tarballs_base/sahara-plugin-ambari/'
'sahara-plugin-ambari-${openstack_branch}.tar.gz')},
'sahara-base-plugin-cdh': {
'type': 'url',
'location': ('$tarballs_base/sahara-plugin-cdh/'
'sahara-plugin-cdh-${openstack_branch}.tar.gz')},
'sahara-base-plugin-mapr': {
'type': 'url',
'location': ('$tarballs_base/sahara-plugin-mapr/'
'sahara-plugin-mapr-${openstack_branch}.tar.gz')},
'sahara-base-plugin-spark': {
'type': 'url',
'location': ('$tarballs_base/sahara-plugin-spark/'
'sahara-plugin-spark-${openstack_branch}.tar.gz')},
'sahara-base-plugin-storm': {
'type': 'url',
'location': ('$tarballs_base/sahara-plugin-storm/'
'sahara-plugin-storm-${openstack_branch}.tar.gz')},
'sahara-base-plugin-vanilla': {
'type': 'url',
'location': ('$tarballs_base/sahara-plugin-vanilla/'
'sahara-plugin-vanilla-${openstack_branch}.tar.gz')},
'searchlight-base': {
'type': 'url',
'location': ('$tarballs_base/searchlight/'
'searchlight-${openstack_branch}.tar.gz')},
'senlin-base': {
'type': 'url',
'location': ('$tarballs_base/senlin/'
'senlin-${openstack_branch}.tar.gz')},
'solum-base': {
'type': 'url',
'location': ('$tarballs_base/solum/'
'solum-${openstack_branch}.tar.gz')},
'swift-base': {
'type': 'url',
'location': ('$tarballs_base/swift/'
'swift-${openstack_branch}.tar.gz')},
'tacker-base': {
'type': 'url',
'location': ('$tarballs_base/tacker/'
'tacker-${openstack_branch}.tar.gz')},
'tacker-base-plugin-networking-sfc': {
'type': 'url',
'location': ('$tarballs_base/networking-sfc/'
'networking-sfc-${openstack_branch}.tar.gz')},
'tempest': {
'type': 'url',
'location': ('$tarballs_base/tempest/'
'tempest-master.tar.gz')},
'tripleoclient': {
'type': 'url',
'location': ('$tarballs_base/python-tripleoclient/'
'tripleoclient-12.3.0.tar.gz')},
'trove-base': {
'type': 'url',
'location': ('$tarballs_base/trove/'
'trove-${openstack_branch}.tar.gz')},
'vitrage-base': {
'type': 'url',
'location': ('$tarballs_base/vitrage/'
'vitrage-${openstack_branch}.tar.gz')},
'vmtp': {
'type': 'url',
'location': ('$tarballs_base/vmtp/'
'vmtp-master.tar.gz')},
'watcher-base': {
'type': 'url',
'location': ('$tarballs_base/watcher/'
'watcher-${openstack_branch}.tar.gz')},
'zaqar-base': {
'type': 'url',
'location': ('$tarballs_base/zaqar/'
'zaqar-${openstack_branch}.tar.gz')},
'zun-base': {
'type': 'url',
'location': ('$tarballs_base/zun/'
'zun-${openstack_branch}.tar.gz')}
}
# NOTE(SamYaple): Only increment the UID. Never reuse old or removed UIDs.
# Starting point 42400+ was chosen arbitrarily to ensure no conflicts
USERS = {
'kolla-user': {
'uid': 42400,
'gid': 42400,
},
'ansible-user': {
'uid': 42401,
'gid': 42401,
},
'aodh-user': {
'uid': 42402,
'gid': 42402,
},
'barbican-user': {
'uid': 42403,
'gid': 42403,
},
'bifrost-user': {
'uid': 42404,
'gid': 42404,
},
'ceilometer-user': {
'uid': 42405,
'gid': 42405,
},
'chrony-user': {
'uid': 42406,
'gid': 42406,
},
'cinder-user': {
'uid': 42407,
'gid': 42407,
},
'cloudkitty-user': {
'uid': 42408,
'gid': 42408,
},
'collectd-user': {
'uid': 42409,
'gid': 42409,
},
'congress-user': {
'uid': 42410,
'gid': 42410,
},
'designate-user': {
'uid': 42411,
'gid': 42411,
},
'elasticsearch-user': {
'uid': 42412,
'gid': 42412,
},
'etcd-user': {
'uid': 42413,
'gid': 42413,
},
'freezer-user': {
'uid': 42414,
'gid': 42414,
},
'glance-user': {
'uid': 42415,
'gid': 42415,
},
'gnocchi-user': {
'uid': 42416,
'gid': 42416,
},
'grafana-user': {
'uid': 42417,
'gid': 42417,
},
'heat-user': {
'uid': 42418,
'gid': 42418,
},
'horizon-user': {
'uid': 42420,
'gid': 42420,
},
'influxdb-user': {
'uid': 42421,
'gid': 42421,
},
'ironic-user': {
'uid': 42422,
'gid': 42422,
},
'kafka-user': {
'uid': 42423,
'gid': 42423,
},
'keystone-user': {
'uid': 42425,
'gid': 42425,
},
'kibana-user': {
'uid': 42426,
'gid': 42426,
},
'qemu-user': {
'uid': 42427,
'gid': 42427,
},
'magnum-user': {
'uid': 42428,
'gid': 42428,
},
'manila-user': {
'uid': 42429,
'gid': 42429,
},
'mistral-user': {
'uid': 42430,
'gid': 42430,
},
'monasca-user': {
'uid': 42431,
'gid': 42431,
},
'mongodb-user': {
'uid': 42432,
'gid': 65534,
},
'murano-user': {
'uid': 42433,
'gid': 42433,
},
'mysql-user': {
'uid': 42434,
'gid': 42434,
},
'neutron-user': {
'uid': 42435,
'gid': 42435,
},
'nova-user': {
'uid': 42436,
'gid': 42436,
},
'octavia-user': {
'uid': 42437,
'gid': 42437,
},
'panko-user': {
'uid': 42438,
'gid': 42438,
},
'rabbitmq-user': {
'uid': 42439,
'gid': 42439,
},
'rally-user': {
'uid': 42440,
'gid': 42440,
},
'sahara-user': {
'uid': 42441,
'gid': 42441,
},
'searchlight-user': {
'uid': 42442,
'gid': 42442,
},
'senlin-user': {
'uid': 42443,
'gid': 42443,
},
'solum-user': {
'uid': 42444,
'gid': 42444,
},
'swift-user': {
'uid': 42445,
'gid': 42445,
},
'tacker-user': {
'uid': 42446,
'gid': 42446,
},
'td-agent-user': {
'uid': 42447,
'gid': 42447,
},
'telegraf-user': {
'uid': 42448,
'gid': 42448,
},
'trove-user': {
'uid': 42449,
'gid': 42449,
},
'vmtp-user': {
'uid': 42450,
'gid': 42450,
},
'watcher-user': {
'uid': 42451,
'gid': 42451,
},
'zaqar-user': {
'uid': 42452,
'gid': 42452,
},
'zookeeper-user': {
'uid': 42453,
'gid': 42453,
},
'haproxy-user': {
'uid': 42454,
'gid': 42454,
},
'ceph-user': {
'uid': 64045,
'gid': 64045,
},
'memcached-user': {
'uid': 42457,
'gid': 42457,
},
'karbor-user': {
'uid': 42458,
'gid': 42458,
},
'vitrage-user': {
'uid': 42459,
'gid': 42459,
},
'redis-user': {
'uid': 42460,
'gid': 42460,
},
'ironic-inspector-user': {
'uid': 42461,
'gid': 42461,
},
'odl-user': {
'uid': 42462,
'gid': 42462,
},
'zun-user': {
'uid': 42463,
'gid': 42463,
},
'dragonflow-user': { # unused user (dragonflow dropped)
'uid': 42464,
'gid': 42464,
},
'qdrouterd-user': {
'uid': 42465,
'gid': 42465,
},
'ec2api-user': {
'uid': 42466,
'gid': 42466,
},
'sensu-user': {
'uid': 42467,
'gid': 42467,
},
'skydive-user': {
'uid': 42468,
'gid': 42468,
},
'kuryr-user': {
'uid': 42469,
'gid': 42469,
},
'novajoin-user': {
'uid': 42470,
'gid': 42470,
},
'blazar-user': {
'uid': 42471,
'gid': 42471,
},
'prometheus-user': {
'uid': 42472,
'gid': 42472,
},
'libvirt-user': {
'uid': 42473, # unused user, but we need the group for socket access
'gid': 42473,
},
'fluentd-user': {
'uid': 42474,
'gid': 42474,
},
'almanach-user': { # unused user (almanach dropped)
'uid': 42475,
'gid': 42475,
},
'openvswitch-user': {
'uid': 42476, # unused user
'gid': 42476,
},
'hugetlbfs-user': {
'uid': 42477, # unused user, but we need the group for vhost socket
'gid': 42477,
},
'logstash-user': {
'uid': 42478,
'gid': 42478,
},
'storm-user': {
'uid': 42479,
'gid': 42479,
},
'tempest-user': {
'uid': 42480,
'gid': 42480,
},
'nfast-user': {
'uid': 42481, # unused user, but we need the group for thales hsm
'gid': 42481,
},
'placement-user': {
'uid': 42482,
'gid': 42482,
},
'cyborg-user': {
'uid': 42483,
'gid': 42483,
},
'qinling-user': {
'uid': 42484,
'gid': 42484,
},
'masakari-user': {
'uid': 42485,
'gid': 42485,
}
}
def get_source_opts(type_=None, location=None, reference=None):
return [cfg.StrOpt('type', choices=['local', 'git', 'url'],
default=type_,
help='Source location type'),
cfg.StrOpt('location', default=location,
help='The location for source install'),
cfg.StrOpt('reference', default=reference,
help=('Git reference to pull, commit sha, tag '
'or branch name'))]
def get_user_opts(uid, gid):
return [
cfg.IntOpt('uid', default=uid, help='The user id'),
cfg.IntOpt('gid', default=gid, help='The group id'),
]
def gen_all_user_opts():
for name, params in USERS.items():
uid = params['uid']
gid = params['gid']
yield name, get_user_opts(uid, gid)
def gen_all_source_opts():
for name, params in SOURCES.items():
type_ = params['type']
location = params['location']
reference = params.get('reference')
yield name, get_source_opts(type_, location, reference)
def list_opts():
return itertools.chain([(None, _CLI_OPTS),
(None, _BASE_OPTS),
('profiles', _PROFILE_OPTS)],
gen_all_source_opts(),
gen_all_user_opts(),
)
def parse(conf, args, usage=None, prog=None,
default_config_files=None):
conf.register_cli_opts(_CLI_OPTS)
conf.register_opts(_BASE_OPTS)
conf.register_opts(_PROFILE_OPTS, group='profiles')
for name, opts in gen_all_source_opts():
conf.register_opts(opts, name)
for name, opts in gen_all_user_opts():
conf.register_opts(opts, name)
conf(args=args,
project='kolla',
usage=usage,
prog=prog,
version=version.cached_version_string(),
default_config_files=default_config_files)
# NOTE(jeffrey4l): set the default base tag based on the
# base option
conf.set_default('base_tag', DEFAULT_BASE_TAGS.get(conf.base))
conf.set_default('openstack_release', OPENSTACK_RELEASE.get(conf.base))
prefix = '' if conf.openstack_release == 'master' else 'stable-'
openstack_branch = '{}{}'.format(prefix, conf.openstack_release)
conf.set_default('openstack_branch', openstack_branch)
if not conf.base_image:
conf.base_image = conf.base
| true | true |
f713c2cb062950ac487056e50952bf6ad98944fa | 2,603 | py | Python | azure-mgmt-compute/azure/mgmt/compute/v2018_10_01/models/log_analytics_input_base_py3.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 1 | 2021-09-07T18:36:04.000Z | 2021-09-07T18:36:04.000Z | azure-mgmt-compute/azure/mgmt/compute/v2018_10_01/models/log_analytics_input_base_py3.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 2 | 2019-10-02T23:37:38.000Z | 2020-10-02T01:17:31.000Z | azure-mgmt-compute/azure/mgmt/compute/v2018_10_01/models/log_analytics_input_base_py3.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 1 | 2019-06-17T22:18:23.000Z | 2019-06-17T22:18:23.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class LogAnalyticsInputBase(Model):
"""Api input base class for LogAnalytics Api.
All required parameters must be populated in order to send to Azure.
:param blob_container_sas_uri: Required. SAS Uri of the logging blob
container to which LogAnalytics Api writes output logs to.
:type blob_container_sas_uri: str
:param from_time: Required. From time of the query
:type from_time: datetime
:param to_time: Required. To time of the query
:type to_time: datetime
:param group_by_throttle_policy: Group query result by Throttle Policy
applied.
:type group_by_throttle_policy: bool
:param group_by_operation_name: Group query result by Operation Name.
:type group_by_operation_name: bool
:param group_by_resource_name: Group query result by Resource Name.
:type group_by_resource_name: bool
"""
_validation = {
'blob_container_sas_uri': {'required': True},
'from_time': {'required': True},
'to_time': {'required': True},
}
_attribute_map = {
'blob_container_sas_uri': {'key': 'blobContainerSasUri', 'type': 'str'},
'from_time': {'key': 'fromTime', 'type': 'iso-8601'},
'to_time': {'key': 'toTime', 'type': 'iso-8601'},
'group_by_throttle_policy': {'key': 'groupByThrottlePolicy', 'type': 'bool'},
'group_by_operation_name': {'key': 'groupByOperationName', 'type': 'bool'},
'group_by_resource_name': {'key': 'groupByResourceName', 'type': 'bool'},
}
def __init__(self, *, blob_container_sas_uri: str, from_time, to_time, group_by_throttle_policy: bool=None, group_by_operation_name: bool=None, group_by_resource_name: bool=None, **kwargs) -> None:
super(LogAnalyticsInputBase, self).__init__(**kwargs)
self.blob_container_sas_uri = blob_container_sas_uri
self.from_time = from_time
self.to_time = to_time
self.group_by_throttle_policy = group_by_throttle_policy
self.group_by_operation_name = group_by_operation_name
self.group_by_resource_name = group_by_resource_name
| 44.118644 | 201 | 0.668844 |
from msrest.serialization import Model
class LogAnalyticsInputBase(Model):
_validation = {
'blob_container_sas_uri': {'required': True},
'from_time': {'required': True},
'to_time': {'required': True},
}
_attribute_map = {
'blob_container_sas_uri': {'key': 'blobContainerSasUri', 'type': 'str'},
'from_time': {'key': 'fromTime', 'type': 'iso-8601'},
'to_time': {'key': 'toTime', 'type': 'iso-8601'},
'group_by_throttle_policy': {'key': 'groupByThrottlePolicy', 'type': 'bool'},
'group_by_operation_name': {'key': 'groupByOperationName', 'type': 'bool'},
'group_by_resource_name': {'key': 'groupByResourceName', 'type': 'bool'},
}
def __init__(self, *, blob_container_sas_uri: str, from_time, to_time, group_by_throttle_policy: bool=None, group_by_operation_name: bool=None, group_by_resource_name: bool=None, **kwargs) -> None:
super(LogAnalyticsInputBase, self).__init__(**kwargs)
self.blob_container_sas_uri = blob_container_sas_uri
self.from_time = from_time
self.to_time = to_time
self.group_by_throttle_policy = group_by_throttle_policy
self.group_by_operation_name = group_by_operation_name
self.group_by_resource_name = group_by_resource_name
| true | true |
f713c3f0a6eab75b579be5aed0e1103595220c9e | 7,372 | py | Python | mailchimp_marketing_asyncio/models/signup_form.py | john-parton/mailchimp-asyncio | 3865ca0867bec8f537dc1e3256aa3a160c00f8a2 | [
"Apache-2.0"
] | null | null | null | mailchimp_marketing_asyncio/models/signup_form.py | john-parton/mailchimp-asyncio | 3865ca0867bec8f537dc1e3256aa3a160c00f8a2 | [
"Apache-2.0"
] | null | null | null | mailchimp_marketing_asyncio/models/signup_form.py | john-parton/mailchimp-asyncio | 3865ca0867bec8f537dc1e3256aa3a160c00f8a2 | [
"Apache-2.0"
] | 1 | 2022-03-09T14:52:22.000Z | 2022-03-09T14:52:22.000Z | # coding: utf-8
"""
Mailchimp Marketing API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 3.0.74
Contact: apihelp@mailchimp.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class SignupForm(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'header': 'SignupFormHeaderOptions',
'contents': 'list[CollectionOfContentForListSignupForms]',
'styles': 'list[CollectionOfElementStyleForListSignupForms]',
'signup_form_url': 'str',
'list_id': 'str',
'links': 'list[ResourceLink]'
}
attribute_map = {
'header': 'header',
'contents': 'contents',
'styles': 'styles',
'signup_form_url': 'signup_form_url',
'list_id': 'list_id',
'links': '_links'
}
def __init__(self, header=None, contents=None, styles=None, signup_form_url=None, list_id=None, links=None): # noqa: E501
"""SignupForm - a model defined in Swagger""" # noqa: E501
self._header = None
self._contents = None
self._styles = None
self._signup_form_url = None
self._list_id = None
self._links = None
self.discriminator = None
if header is not None:
self.header = header
if contents is not None:
self.contents = contents
if styles is not None:
self.styles = styles
if signup_form_url is not None:
self.signup_form_url = signup_form_url
if list_id is not None:
self.list_id = list_id
if links is not None:
self.links = links
@property
def header(self):
"""Gets the header of this SignupForm. # noqa: E501
:return: The header of this SignupForm. # noqa: E501
:rtype: SignupFormHeaderOptions
"""
return self._header
@header.setter
def header(self, header):
"""Sets the header of this SignupForm.
:param header: The header of this SignupForm. # noqa: E501
:type: SignupFormHeaderOptions
"""
self._header = header
@property
def contents(self):
"""Gets the contents of this SignupForm. # noqa: E501
The signup form body content. # noqa: E501
:return: The contents of this SignupForm. # noqa: E501
:rtype: list[CollectionOfContentForListSignupForms]
"""
return self._contents
@contents.setter
def contents(self, contents):
"""Sets the contents of this SignupForm.
The signup form body content. # noqa: E501
:param contents: The contents of this SignupForm. # noqa: E501
:type: list[CollectionOfContentForListSignupForms]
"""
self._contents = contents
@property
def styles(self):
"""Gets the styles of this SignupForm. # noqa: E501
An array of objects, each representing an element style for the signup form. # noqa: E501
:return: The styles of this SignupForm. # noqa: E501
:rtype: list[CollectionOfElementStyleForListSignupForms]
"""
return self._styles
@styles.setter
def styles(self, styles):
"""Sets the styles of this SignupForm.
An array of objects, each representing an element style for the signup form. # noqa: E501
:param styles: The styles of this SignupForm. # noqa: E501
:type: list[CollectionOfElementStyleForListSignupForms]
"""
self._styles = styles
@property
def signup_form_url(self):
"""Gets the signup_form_url of this SignupForm. # noqa: E501
Signup form URL. # noqa: E501
:return: The signup_form_url of this SignupForm. # noqa: E501
:rtype: str
"""
return self._signup_form_url
@signup_form_url.setter
def signup_form_url(self, signup_form_url):
"""Sets the signup_form_url of this SignupForm.
Signup form URL. # noqa: E501
:param signup_form_url: The signup_form_url of this SignupForm. # noqa: E501
:type: str
"""
self._signup_form_url = signup_form_url
@property
def list_id(self):
"""Gets the list_id of this SignupForm. # noqa: E501
The signup form's list id. # noqa: E501
:return: The list_id of this SignupForm. # noqa: E501
:rtype: str
"""
return self._list_id
@list_id.setter
def list_id(self, list_id):
"""Sets the list_id of this SignupForm.
The signup form's list id. # noqa: E501
:param list_id: The list_id of this SignupForm. # noqa: E501
:type: str
"""
self._list_id = list_id
@property
def links(self):
"""Gets the links of this SignupForm. # noqa: E501
A list of link types and descriptions for the API schema documents. # noqa: E501
:return: The links of this SignupForm. # noqa: E501
:rtype: list[ResourceLink]
"""
return self._links
@links.setter
def links(self, links):
"""Sets the links of this SignupForm.
A list of link types and descriptions for the API schema documents. # noqa: E501
:param links: The links of this SignupForm. # noqa: E501
:type: list[ResourceLink]
"""
self._links = links
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(SignupForm, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, SignupForm):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 28.796875 | 126 | 0.589257 |
import pprint
import re
import six
class SignupForm(object):
swagger_types = {
'header': 'SignupFormHeaderOptions',
'contents': 'list[CollectionOfContentForListSignupForms]',
'styles': 'list[CollectionOfElementStyleForListSignupForms]',
'signup_form_url': 'str',
'list_id': 'str',
'links': 'list[ResourceLink]'
}
attribute_map = {
'header': 'header',
'contents': 'contents',
'styles': 'styles',
'signup_form_url': 'signup_form_url',
'list_id': 'list_id',
'links': '_links'
}
def __init__(self, header=None, contents=None, styles=None, signup_form_url=None, list_id=None, links=None):
self._header = None
self._contents = None
self._styles = None
self._signup_form_url = None
self._list_id = None
self._links = None
self.discriminator = None
if header is not None:
self.header = header
if contents is not None:
self.contents = contents
if styles is not None:
self.styles = styles
if signup_form_url is not None:
self.signup_form_url = signup_form_url
if list_id is not None:
self.list_id = list_id
if links is not None:
self.links = links
@property
def header(self):
return self._header
@header.setter
def header(self, header):
self._header = header
@property
def contents(self):
return self._contents
@contents.setter
def contents(self, contents):
self._contents = contents
@property
def styles(self):
return self._styles
@styles.setter
def styles(self, styles):
self._styles = styles
@property
def signup_form_url(self):
return self._signup_form_url
@signup_form_url.setter
def signup_form_url(self, signup_form_url):
self._signup_form_url = signup_form_url
@property
def list_id(self):
return self._list_id
@list_id.setter
def list_id(self, list_id):
self._list_id = list_id
@property
def links(self):
return self._links
@links.setter
def links(self, links):
self._links = links
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(SignupForm, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, SignupForm):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f713c3fd343f05467d069a91846cfa29aed8d972 | 124 | py | Python | old/02/09.py | systemquant/book-pandas-for-finance | 90b7eb9be1de20a12ae72b9bb5d51424a979b174 | [
"MIT"
] | 10 | 2021-02-04T12:49:56.000Z | 2022-03-26T11:28:11.000Z | old/02/09.py | systemquant/book-pandas-for-finance | 90b7eb9be1de20a12ae72b9bb5d51424a979b174 | [
"MIT"
] | 1 | 2022-03-24T03:47:14.000Z | 2022-03-24T03:54:52.000Z | old/02/09.py | systemquant/book-pandas-for-finance | 90b7eb9be1de20a12ae72b9bb5d51424a979b174 | [
"MIT"
] | 4 | 2021-07-17T16:50:15.000Z | 2022-03-22T05:55:34.000Z | data = [("000060", 8.25), ("000020", 5.75), ("039490", 1.3)]
def 정렬규칙(x):
return x[1]
data.sort(key=정렬규칙)
print(data)
| 15.5 | 60 | 0.564516 | data = [("000060", 8.25), ("000020", 5.75), ("039490", 1.3)]
def 정렬규칙(x):
return x[1]
data.sort(key=정렬규칙)
print(data)
| true | true |
f713c43c0962dea49c922bae1e450935719dbbf8 | 3,500 | py | Python | sdk/resources/azure-mgmt-resource/azure/mgmt/resource/features/v2021_07_01/_configuration.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | 1 | 2022-02-01T18:50:12.000Z | 2022-02-01T18:50:12.000Z | sdk/resources/azure-mgmt-resource/azure/mgmt/resource/features/v2021_07_01/_configuration.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | sdk/resources/azure-mgmt-resource/azure/mgmt/resource/features/v2021_07_01/_configuration.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy
from ._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
class FeatureClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes
"""Configuration for FeatureClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The Azure subscription ID.
:type subscription_id: str
:keyword api_version: Api Version. Default value is "2021-07-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(
self,
credential: "TokenCredential",
subscription_id: str,
**kwargs: Any
) -> None:
super(FeatureClientConfiguration, self).__init__(**kwargs)
api_version = kwargs.pop('api_version', "2021-07-01") # type: str
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
self.credential = credential
self.subscription_id = subscription_id
self.api_version = api_version
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-resource/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs # type: Any
):
# type: (...) -> None
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = ARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs)
| 47.297297 | 125 | 0.693714 |
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy
from ._version import VERSION
if TYPE_CHECKING:
from azure.core.credentials import TokenCredential
class FeatureClientConfiguration(Configuration):
def __init__(
self,
credential: "TokenCredential",
subscription_id: str,
**kwargs: Any
) -> None:
super(FeatureClientConfiguration, self).__init__(**kwargs)
api_version = kwargs.pop('api_version', "2021-07-01")
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
self.credential = credential
self.subscription_id = subscription_id
self.api_version = api_version
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-resource/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs
):
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = ARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs)
| true | true |
f713c66f57be20c2bd1dddc1a8ba715d1887c1ef | 822 | py | Python | cnns/nnlib/robustness/pni/code/models/noise_layer_robust.py | anonymous-user-commits/perturb-net | 66fc7c4a1234fa34b92bcc85751f0a6e23d80a23 | [
"MIT"
] | null | null | null | cnns/nnlib/robustness/pni/code/models/noise_layer_robust.py | anonymous-user-commits/perturb-net | 66fc7c4a1234fa34b92bcc85751f0a6e23d80a23 | [
"MIT"
] | null | null | null | cnns/nnlib/robustness/pni/code/models/noise_layer_robust.py | anonymous-user-commits/perturb-net | 66fc7c4a1234fa34b92bcc85751f0a6e23d80a23 | [
"MIT"
] | null | null | null | import torch.nn as nn
import torch.nn.functional as F
class noise_Conv2d(nn.Conv2d):
def __init__(self, in_channels, out_channels, kernel_size, stride=1,
padding=0, dilation=1,
groups=1, bias=True, noise_std=0.1):
super(noise_Conv2d, self).__init__(in_channels, out_channels,
kernel_size, stride,
padding, dilation, groups, bias)
self.noise_std = noise_std
def forward(self, input):
noise_i = input.clone().normal_(0, self.noise_std)
noise_input = input + noise_i
output = F.conv2d(noise_input, self.weight, self.bias, self.stride,
self.padding, self.dilation,
self.groups)
return output
| 34.25 | 75 | 0.559611 | import torch.nn as nn
import torch.nn.functional as F
class noise_Conv2d(nn.Conv2d):
def __init__(self, in_channels, out_channels, kernel_size, stride=1,
padding=0, dilation=1,
groups=1, bias=True, noise_std=0.1):
super(noise_Conv2d, self).__init__(in_channels, out_channels,
kernel_size, stride,
padding, dilation, groups, bias)
self.noise_std = noise_std
def forward(self, input):
noise_i = input.clone().normal_(0, self.noise_std)
noise_input = input + noise_i
output = F.conv2d(noise_input, self.weight, self.bias, self.stride,
self.padding, self.dilation,
self.groups)
return output
| true | true |
f713c7525055ce6ba983ba32bc4b5e05efb1bc80 | 2,221 | py | Python | rally/plugins/openstack/context/nova/keypairs.py | aforalee/rallyALi | 8050ca08b0e253aeb19a1cec34f33c648f00136a | [
"Apache-2.0"
] | null | null | null | rally/plugins/openstack/context/nova/keypairs.py | aforalee/rallyALi | 8050ca08b0e253aeb19a1cec34f33c648f00136a | [
"Apache-2.0"
] | null | null | null | rally/plugins/openstack/context/nova/keypairs.py | aforalee/rallyALi | 8050ca08b0e253aeb19a1cec34f33c648f00136a | [
"Apache-2.0"
] | null | null | null | # Copyright 2014: Rackspace UK
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import novaclient.exceptions
from rally.common.i18n import _
from rally.common import log as logging
from rally.common import utils
from rally import osclients
from rally.plugins.openstack.context.cleanup import manager as resource_manager
from rally.task import context
LOG = logging.getLogger(__name__)
@context.configure(name="keypair", order=310)
class Keypair(context.Context):
KEYPAIR_NAME = "rally_ssh_key"
def _generate_keypair(self, endpoint):
keypair_name = "%s_%s" % (
self.KEYPAIR_NAME, self.context["task"]["uuid"])
nova_client = osclients.Clients(endpoint).nova()
# NOTE(hughsaunders): If keypair exists, it must be deleted as we can't
# retrieve the private key
try:
nova_client.keypairs.delete(keypair_name)
except novaclient.exceptions.NotFound:
pass
keypair = nova_client.keypairs.create(keypair_name)
return {"private": keypair.private_key,
"public": keypair.public_key,
"name": keypair_name,
"id": keypair.id}
@utils.log_task_wrapper(LOG.info, _("Enter context: `keypair`"))
def setup(self):
for user in self.context["users"]:
user["keypair"] = self._generate_keypair(user["endpoint"])
@utils.log_task_wrapper(LOG.info, _("Exit context: `keypair`"))
def cleanup(self):
# TODO(boris-42): Delete only resources created by this context
resource_manager.cleanup(names=["nova.keypairs"],
users=self.context.get("users", []))
| 35.822581 | 79 | 0.677172 |
import novaclient.exceptions
from rally.common.i18n import _
from rally.common import log as logging
from rally.common import utils
from rally import osclients
from rally.plugins.openstack.context.cleanup import manager as resource_manager
from rally.task import context
LOG = logging.getLogger(__name__)
@context.configure(name="keypair", order=310)
class Keypair(context.Context):
KEYPAIR_NAME = "rally_ssh_key"
def _generate_keypair(self, endpoint):
keypair_name = "%s_%s" % (
self.KEYPAIR_NAME, self.context["task"]["uuid"])
nova_client = osclients.Clients(endpoint).nova()
# retrieve the private key
try:
nova_client.keypairs.delete(keypair_name)
except novaclient.exceptions.NotFound:
pass
keypair = nova_client.keypairs.create(keypair_name)
return {"private": keypair.private_key,
"public": keypair.public_key,
"name": keypair_name,
"id": keypair.id}
@utils.log_task_wrapper(LOG.info, _("Enter context: `keypair`"))
def setup(self):
for user in self.context["users"]:
user["keypair"] = self._generate_keypair(user["endpoint"])
@utils.log_task_wrapper(LOG.info, _("Exit context: `keypair`"))
def cleanup(self):
# TODO(boris-42): Delete only resources created by this context
resource_manager.cleanup(names=["nova.keypairs"],
users=self.context.get("users", []))
| true | true |
f713c757918c68117662e36f5c6f0adb8d3638a6 | 142 | py | Python | blazingsql/__init__.py | paulhendricks/pyBlazing | 792d286054ec4ae8a0546d57ef758a60812d24c3 | [
"Apache-2.0"
] | 1 | 2020-05-28T14:06:15.000Z | 2020-05-28T14:06:15.000Z | blazingsql/__init__.py | paulhendricks/pyBlazing | 792d286054ec4ae8a0546d57ef758a60812d24c3 | [
"Apache-2.0"
] | null | null | null | blazingsql/__init__.py | paulhendricks/pyBlazing | 792d286054ec4ae8a0546d57ef758a60812d24c3 | [
"Apache-2.0"
] | null | null | null | from pyblazing.apiv2 import context
from pyblazing.apiv2 import make_context
BlazingContext = context.BlazingContext
import pyblazing.apiv2
| 20.285714 | 40 | 0.859155 | from pyblazing.apiv2 import context
from pyblazing.apiv2 import make_context
BlazingContext = context.BlazingContext
import pyblazing.apiv2
| true | true |
f713c763b71c7ae43f02b4463df5842ee7f1a936 | 5,690 | py | Python | test_scripts/test_element_and_compare_to_kratos.py | JoZimmer/ParOptBeam | 50d15d8d822a2718f2932807e06c4a7e02f866a3 | [
"BSD-3-Clause"
] | 1 | 2021-04-09T14:08:20.000Z | 2021-04-09T14:08:20.000Z | test_scripts/test_element_and_compare_to_kratos.py | JoZimmer/ParOptBeam | 50d15d8d822a2718f2932807e06c4a7e02f866a3 | [
"BSD-3-Clause"
] | 2 | 2021-04-28T15:05:01.000Z | 2021-11-10T15:12:56.000Z | test_scripts/test_element_and_compare_to_kratos.py | JoZimmer/ParOptBeam | 50d15d8d822a2718f2932807e06c4a7e02f866a3 | [
"BSD-3-Clause"
] | 2 | 2021-02-01T08:49:45.000Z | 2021-08-10T02:07:36.000Z | from source.element.cr_beam_element import CRBeamElement
import numpy as np
np.set_printoptions(suppress=False, precision=4, linewidth=100)
def test_crbeam_element_update_incremental():
material_params = {'rho': 7850, 'e': 2069000000.0, 'nu': 0.29, 'zeta': 0.05, 'lx_i': 1.2, 'is_nonlinear': True}
element_params = {'a': 0.0001, 'asy': 0.0, 'asz': 0.0, 'iy': 0.0001, 'iz': 0.0001, 'it': 0.0001}
coords = np.array([[1.2, 0.0, 0.0], [0.0, 0.0, 0.0]])
element = CRBeamElement(material_params, element_params, coords, 0, '3D')
Kd_kratos = np.array([
[66828.2, 0, 0, 0, 0, 0],
[0, 172417, 0, 0, 0, 0],
[0, 0, 172417, 0, 0, 0],
[0, 0, 0, 172417, 0, 0],
[0, 0, 0, 0, 517250, 0],
[0, 0, 0, 0, 0, 517250]
])
Kd = element.Kd_mat
try:
assert (abs(Kd_kratos - Kd) < 10).all()
except AssertionError:
msg = "##################################################################################\n"
msg += "Deformation Stiffness matrix\n"
msg += "Kd in Kratos:\n" + str(Kd_kratos)
msg += "\nIt is however:\n" + str(Kd)
print(msg)
Ke_mat_kratos = np.array([
[172417, 0, 0, 0, 0, 0, -172417, 0, 0, 0, 0, 0],
[0, 1.43681e+06, 0, 0, 0, 862083, 0, -1.43681e+06, 0, 0, 0, 862083],
[0, 0, 1.43681e+06, 0, -862083, 0, 0, 0, -1.43681e+06, 0, -862083, 0],
[0, 0, 0, 66828.2, 0, 0, 0, 0, 0, -66828.2, 0, 0],
[0, 0, -862083, 0, 689667, 0, 0, 0, 862083, 0, 344833, 0],
[0, 862083, 0, 0, 0, 689667, 0, -862083, 0, 0, 0, 344833],
[-172417, 0, 0, 0, 0, 0, 172417, 0, 0, 0, 0, 0],
[0, -1.43681e+06, 0, 0, 0, -862083, 0, 1.43681e+06, 0, 0, 0, -862083],
[0, 0, -1.43681e+06, 0, 862083, 0, 0, 0, 1.43681e+06, 0, 862083, 0],
[0, 0, 0, -66828.2, 0, 0, 0, 0, 0, 66828.2, 0, 0],
[0, 0, -862083, 0, 344833, 0, 0, 0, 862083, 0, 689667, 0],
[0, 862083, 0, 0, 0, 344833, 0, -862083, 0, 0, 0, 689667]
])
Ke_mat = element.Ke_mat
try:
assert (abs(Ke_mat_kratos - Ke_mat) < 10).all()
except AssertionError:
msg = "##################################################################################\n"
msg += "Material Stiffness matrix\n"
msg += "Ke_mat in Kratos:\n" + str(Ke_mat_kratos)
msg += "\nIt is however:\n" + str(Ke_mat)
print(msg)
Phiz = 0.0
Phiy = 0.0
CTy = (element.rho * element.A * element.L) / ((1 + Phiy) * (1 + Phiy))
CTz = (element.rho * element.A * element.L) / ((1 + Phiz) * (1 + Phiz))
CRy = (element.rho * element.Iy) / ((1 + Phiy) * (1 + Phiy) * element.L)
CRz = (element.rho * element.Iz) / ((1 + Phiz) * (1 + Phiz) * element.L)
bending_mass_matrix_z = element.build_single_mass_matrix(Phiz, CTz, CRz, element.L, +1)
bending_mass_matrix_kratos_z = np.array([
[1.13489, 0.137711, -0.663886, 0.0435114],
[0.137711, 0.138519, -0.0435114, -0.0410891],
[-0.663886, -0.0435114, 1.13489, -0.137711],
[0.0435114, -0.0410891, -0.137711, 0.138519]
])
try:
assert (abs(bending_mass_matrix_z - bending_mass_matrix_kratos_z) < 1e-4).all()
print("Bending mass_matrix z is correct")
except AssertionError:
msg = "##################################################################################\n"
msg += "Bending mass matrix z\n"
msg += "Me in Kratos:\n" + str(bending_mass_matrix_kratos_z)
msg += "\nIt is however:\n" + str(bending_mass_matrix_z)
print(msg)
bending_mass_matrix_y = element.build_single_mass_matrix(Phiz, CTy, CRy, element.L, -1)
bending_mass_matrix_kratos_y = np.array([
[1.13489, -0.137711, -0.663886, -0.0435114],
[-0.137711, 0.138519, 0.0435114, -0.0410891],
[-0.663886, 0.0435114, 1.13489, 0.137711],
[-0.0435114, -0.0410891, 0.137711, 0.138519]
])
try:
assert (abs(bending_mass_matrix_y - bending_mass_matrix_kratos_y) < 1e-4).all()
print("Bending mass_matrix y is correct")
except AssertionError:
msg = "##################################################################################\n"
msg += "Bending mass matrix y\n"
msg += "Me in Kratos:\n" + str(bending_mass_matrix_kratos_y)
msg += "\nIt is however:\n" + str(bending_mass_matrix_y)
print(msg)
Me = element._get_consistent_mass_matrix()
Me_kratos = np.array([
[0.314, 0, 0, 0, 0, 0, 0.157, 0, 0, 0, 0, 0],
[0, 1.13489, 0, 0, 0, 0.137711, 0, -0.663886, 0, 0, 0, 0.0435114],
[0, 0, 1.13489, 0, -0.137711, 0, 0, 0, -0.663886, 0, -0.0435114, 0],
[0, 0, 0, 0.628, 0, 0, 0, 0, 0, 0.314, 0, 0],
[0, 0, -0.137711, 0, 0.138519, 0, 0, 0, 0.0435114, 0, -0.0410891, 0],
[0, 0.137711, 0, 0, 0, 0.138519, 0, -0.0435114, 0, 0, 0, -0.0410891],
[0.157, 0, 0, 0, 0, 0, 0.314, 0, 0, 0, 0, 0],
[0, -0.663886, 0, 0, 0, -0.0435114, 0, 1.13489, 0, 0, 0, -0.137711],
[0, 0, -0.663886, 0, 0.0435114, 0, 0, 0, 1.13489, 0, 0.137711, 0],
[0, 0, 0, 0.314, 0, 0, 0, 0, 0, 0.628, 0, 0],
[0, 0, -0.0435114, 0, -0.0410891, 0, 0, 0, 0.137711, 0, 0.138519, 0],
[0, 0.0435114, 0, 0, 0, -0.0410891, 0, -0.137711, 0, 0, 0, 0.138519]
])
try:
assert (abs(Me - Me_kratos) < 1e-2).all()
print("Mass matrix is correct")
except AssertionError:
msg = "##################################################################################\n"
msg += "Consistent mass matrix\n"
msg += "Me in Kratos:\n" + str(Me_kratos)
msg += "\nIt is however:\n" + str(Me)
print(msg)
| 42.462687 | 115 | 0.496485 | from source.element.cr_beam_element import CRBeamElement
import numpy as np
np.set_printoptions(suppress=False, precision=4, linewidth=100)
def test_crbeam_element_update_incremental():
material_params = {'rho': 7850, 'e': 2069000000.0, 'nu': 0.29, 'zeta': 0.05, 'lx_i': 1.2, 'is_nonlinear': True}
element_params = {'a': 0.0001, 'asy': 0.0, 'asz': 0.0, 'iy': 0.0001, 'iz': 0.0001, 'it': 0.0001}
coords = np.array([[1.2, 0.0, 0.0], [0.0, 0.0, 0.0]])
element = CRBeamElement(material_params, element_params, coords, 0, '3D')
Kd_kratos = np.array([
[66828.2, 0, 0, 0, 0, 0],
[0, 172417, 0, 0, 0, 0],
[0, 0, 172417, 0, 0, 0],
[0, 0, 0, 172417, 0, 0],
[0, 0, 0, 0, 517250, 0],
[0, 0, 0, 0, 0, 517250]
])
Kd = element.Kd_mat
try:
assert (abs(Kd_kratos - Kd) < 10).all()
except AssertionError:
msg = "##################################################################################\n"
msg += "Deformation Stiffness matrix\n"
msg += "Kd in Kratos:\n" + str(Kd_kratos)
msg += "\nIt is however:\n" + str(Kd)
print(msg)
Ke_mat_kratos = np.array([
[172417, 0, 0, 0, 0, 0, -172417, 0, 0, 0, 0, 0],
[0, 1.43681e+06, 0, 0, 0, 862083, 0, -1.43681e+06, 0, 0, 0, 862083],
[0, 0, 1.43681e+06, 0, -862083, 0, 0, 0, -1.43681e+06, 0, -862083, 0],
[0, 0, 0, 66828.2, 0, 0, 0, 0, 0, -66828.2, 0, 0],
[0, 0, -862083, 0, 689667, 0, 0, 0, 862083, 0, 344833, 0],
[0, 862083, 0, 0, 0, 689667, 0, -862083, 0, 0, 0, 344833],
[-172417, 0, 0, 0, 0, 0, 172417, 0, 0, 0, 0, 0],
[0, -1.43681e+06, 0, 0, 0, -862083, 0, 1.43681e+06, 0, 0, 0, -862083],
[0, 0, -1.43681e+06, 0, 862083, 0, 0, 0, 1.43681e+06, 0, 862083, 0],
[0, 0, 0, -66828.2, 0, 0, 0, 0, 0, 66828.2, 0, 0],
[0, 0, -862083, 0, 344833, 0, 0, 0, 862083, 0, 689667, 0],
[0, 862083, 0, 0, 0, 344833, 0, -862083, 0, 0, 0, 689667]
])
Ke_mat = element.Ke_mat
try:
assert (abs(Ke_mat_kratos - Ke_mat) < 10).all()
except AssertionError:
msg = "##################################################################################\n"
msg += "Material Stiffness matrix\n"
msg += "Ke_mat in Kratos:\n" + str(Ke_mat_kratos)
msg += "\nIt is however:\n" + str(Ke_mat)
print(msg)
Phiz = 0.0
Phiy = 0.0
CTy = (element.rho * element.A * element.L) / ((1 + Phiy) * (1 + Phiy))
CTz = (element.rho * element.A * element.L) / ((1 + Phiz) * (1 + Phiz))
CRy = (element.rho * element.Iy) / ((1 + Phiy) * (1 + Phiy) * element.L)
CRz = (element.rho * element.Iz) / ((1 + Phiz) * (1 + Phiz) * element.L)
bending_mass_matrix_z = element.build_single_mass_matrix(Phiz, CTz, CRz, element.L, +1)
bending_mass_matrix_kratos_z = np.array([
[1.13489, 0.137711, -0.663886, 0.0435114],
[0.137711, 0.138519, -0.0435114, -0.0410891],
[-0.663886, -0.0435114, 1.13489, -0.137711],
[0.0435114, -0.0410891, -0.137711, 0.138519]
])
try:
assert (abs(bending_mass_matrix_z - bending_mass_matrix_kratos_z) < 1e-4).all()
print("Bending mass_matrix z is correct")
except AssertionError:
msg = "##################################################################################\n"
msg += "Bending mass matrix z\n"
msg += "Me in Kratos:\n" + str(bending_mass_matrix_kratos_z)
msg += "\nIt is however:\n" + str(bending_mass_matrix_z)
print(msg)
bending_mass_matrix_y = element.build_single_mass_matrix(Phiz, CTy, CRy, element.L, -1)
bending_mass_matrix_kratos_y = np.array([
[1.13489, -0.137711, -0.663886, -0.0435114],
[-0.137711, 0.138519, 0.0435114, -0.0410891],
[-0.663886, 0.0435114, 1.13489, 0.137711],
[-0.0435114, -0.0410891, 0.137711, 0.138519]
])
try:
assert (abs(bending_mass_matrix_y - bending_mass_matrix_kratos_y) < 1e-4).all()
print("Bending mass_matrix y is correct")
except AssertionError:
msg = "##################################################################################\n"
msg += "Bending mass matrix y\n"
msg += "Me in Kratos:\n" + str(bending_mass_matrix_kratos_y)
msg += "\nIt is however:\n" + str(bending_mass_matrix_y)
print(msg)
Me = element._get_consistent_mass_matrix()
Me_kratos = np.array([
[0.314, 0, 0, 0, 0, 0, 0.157, 0, 0, 0, 0, 0],
[0, 1.13489, 0, 0, 0, 0.137711, 0, -0.663886, 0, 0, 0, 0.0435114],
[0, 0, 1.13489, 0, -0.137711, 0, 0, 0, -0.663886, 0, -0.0435114, 0],
[0, 0, 0, 0.628, 0, 0, 0, 0, 0, 0.314, 0, 0],
[0, 0, -0.137711, 0, 0.138519, 0, 0, 0, 0.0435114, 0, -0.0410891, 0],
[0, 0.137711, 0, 0, 0, 0.138519, 0, -0.0435114, 0, 0, 0, -0.0410891],
[0.157, 0, 0, 0, 0, 0, 0.314, 0, 0, 0, 0, 0],
[0, -0.663886, 0, 0, 0, -0.0435114, 0, 1.13489, 0, 0, 0, -0.137711],
[0, 0, -0.663886, 0, 0.0435114, 0, 0, 0, 1.13489, 0, 0.137711, 0],
[0, 0, 0, 0.314, 0, 0, 0, 0, 0, 0.628, 0, 0],
[0, 0, -0.0435114, 0, -0.0410891, 0, 0, 0, 0.137711, 0, 0.138519, 0],
[0, 0.0435114, 0, 0, 0, -0.0410891, 0, -0.137711, 0, 0, 0, 0.138519]
])
try:
assert (abs(Me - Me_kratos) < 1e-2).all()
print("Mass matrix is correct")
except AssertionError:
msg = "##################################################################################\n"
msg += "Consistent mass matrix\n"
msg += "Me in Kratos:\n" + str(Me_kratos)
msg += "\nIt is however:\n" + str(Me)
print(msg)
| true | true |
f713c8014b3825c0746482bc5b3f6306b4660275 | 2,931 | py | Python | TangoChat/core.py | sierra-m/TangoBot | 2a7eafc391a60862b660efaee5c3b9b1c78fac8e | [
"MIT"
] | null | null | null | TangoChat/core.py | sierra-m/TangoBot | 2a7eafc391a60862b660efaee5c3b9b1c78fac8e | [
"MIT"
] | null | null | null | TangoChat/core.py | sierra-m/TangoBot | 2a7eafc391a60862b660efaee5c3b9b1c78fac8e | [
"MIT"
] | null | null | null | from typing import List
import random
import re
class Concept:
def __init__(self, name: str, options: List[str]):
self.name = name.lower()
self.options = options
def next(self):
return random.choice(self.options)
def render_to(self, template):
return [template.format(option) for option in self.options]
def __iter__(self):
i = 0
while i < len(self.options):
yield self.options[i]
i += 1
class ConceptLibrary:
def __init__(self, concepts : List[Concept] = []):
self.concepts = concepts
def add(self, name: str, options: List[str]):
self.concepts.append(Concept(name, options))
def get(self, term):
term = term.lower()
for concept in self.concepts:
if concept.name == term:
return concept
class Response:
def __init__(self, choices: List[str]):
self.choices = choices
self.iter = 0
def next(self, concepts: ConceptLibrary):
choice = self.choices[self.iter]
self.iter += 1
if self.iter >= len(self.choices):
self.iter = 0
matches = re.findall(r'~([a-z0-9]+)\b', choice, re.I)
if matches:
for match in matches:
valid_concept = concepts.get(match)
if valid_concept:
choice = re.sub('~{}'.format(match), valid_concept.next(), choice, flags=re.I)
break # out of match loop
return choice
class DialogNode:
def __init__(self, **kwargs):
self.triggers = kwargs.get('triggers', [])
self.response = kwargs.get('response', None)
self.proposal = kwargs.get('proposal', False)
self.level = kwargs.get('level', None)
self.scope = [] # type:List[DialogNode]
self.parent = None
self.triggers = [t.lower() for t in self.triggers]
def triggers_on(self, phrase: str, concepts: ConceptLibrary):
for node in self.scope: # type:DialogNode
if node.triggers and not node.proposal:
if phrase.lower() in node.triggers:
return node
def render_triggers(self, concepts: ConceptLibrary):
possible = []
for trigger in self.triggers:
matches = re.findall(r'~([a-z0-9]+)\b', trigger, re.I)
if matches:
for match in matches:
valid_concept = concepts.get(match)
if valid_concept:
template = trigger.replace('~{}'.format(match), '{}')
possible.extend(valid_concept.render_to(template))
else:
possible.append(trigger)
self.triggers = possible
def add_node(self, node):
self.scope.append(node)
def next_response(self, concept_lib: ConceptLibrary):
return self.response.next(concept_lib)
| 29.606061 | 98 | 0.569089 | from typing import List
import random
import re
class Concept:
def __init__(self, name: str, options: List[str]):
self.name = name.lower()
self.options = options
def next(self):
return random.choice(self.options)
def render_to(self, template):
return [template.format(option) for option in self.options]
def __iter__(self):
i = 0
while i < len(self.options):
yield self.options[i]
i += 1
class ConceptLibrary:
def __init__(self, concepts : List[Concept] = []):
self.concepts = concepts
def add(self, name: str, options: List[str]):
self.concepts.append(Concept(name, options))
def get(self, term):
term = term.lower()
for concept in self.concepts:
if concept.name == term:
return concept
class Response:
def __init__(self, choices: List[str]):
self.choices = choices
self.iter = 0
def next(self, concepts: ConceptLibrary):
choice = self.choices[self.iter]
self.iter += 1
if self.iter >= len(self.choices):
self.iter = 0
matches = re.findall(r'~([a-z0-9]+)\b', choice, re.I)
if matches:
for match in matches:
valid_concept = concepts.get(match)
if valid_concept:
choice = re.sub('~{}'.format(match), valid_concept.next(), choice, flags=re.I)
break
return choice
class DialogNode:
def __init__(self, **kwargs):
self.triggers = kwargs.get('triggers', [])
self.response = kwargs.get('response', None)
self.proposal = kwargs.get('proposal', False)
self.level = kwargs.get('level', None)
self.scope = []
self.parent = None
self.triggers = [t.lower() for t in self.triggers]
def triggers_on(self, phrase: str, concepts: ConceptLibrary):
for node in self.scope:
if node.triggers and not node.proposal:
if phrase.lower() in node.triggers:
return node
def render_triggers(self, concepts: ConceptLibrary):
possible = []
for trigger in self.triggers:
matches = re.findall(r'~([a-z0-9]+)\b', trigger, re.I)
if matches:
for match in matches:
valid_concept = concepts.get(match)
if valid_concept:
template = trigger.replace('~{}'.format(match), '{}')
possible.extend(valid_concept.render_to(template))
else:
possible.append(trigger)
self.triggers = possible
def add_node(self, node):
self.scope.append(node)
def next_response(self, concept_lib: ConceptLibrary):
return self.response.next(concept_lib)
| true | true |
f713c8c7f32ad47156ece2c400acb571c9609d70 | 5,669 | py | Python | tests/test_plantuml_markdown.py | getnikola/plugins | c9305572359263c719b19dc17bb6770521e08dee | [
"MIT"
] | 53 | 2015-08-14T20:28:05.000Z | 2021-06-02T00:38:23.000Z | tests/test_plantuml_markdown.py | matthew16550/nikola-plugins | fbd784ea737419dd2b5b7365549bf3cf57d66700 | [
"MIT"
] | 280 | 2015-01-10T15:57:44.000Z | 2022-03-27T20:47:08.000Z | tests/test_plantuml_markdown.py | matthew16550/nikola-plugins | fbd784ea737419dd2b5b7365549bf3cf57d66700 | [
"MIT"
] | 93 | 2015-01-26T19:39:02.000Z | 2022-03-24T17:12:42.000Z | import os
import sys
import pytest
from pytest import fixture
if sys.version_info < (3, 6):
raise pytest.skip("plantuml_markdown plugin requires Python >= 3.6", allow_module_level=True)
from tests import V8_PLUGIN_PATH
from tests.conftest import CompileResult
from v8.plantuml_markdown.plantuml_markdown import PlantUmlMarkdownProcessor, first_line_for_listing_block
def test_svg(do_fence_test):
with do_fence_test('plantuml') as compiled:
assert set(compiled.document.xpath('//svg//text/text()')) == {'Alice', 'Bob', 'hello1', 'hello2'}
assert '<?xml' not in compiled.raw_html
def test_listing(do_fence_test):
with do_fence_test('{ .plantuml listing }') as compiled:
assert compiled.document.xpath('//pre/text()') == [(
'Alice -> Bob : hello1\n'
'Bob -> Alice : hello2\n'
)]
def test_id(do_fence_test):
with do_fence_test('{ .plantuml svg+listing #foo }') as compiled:
assert compiled.document.xpath('/html/body/div/@id') == ['foo']
assert compiled.document.xpath('//pre/a/@name') == ['foo-1', 'foo-2']
assert compiled.raw_html.count('foo') == 5 # ensure the id is not anywhere unexpected
def test_line_numbering(do_fence_test):
with do_fence_test('{ .plantuml listing #foo linenos=y }') as compiled:
assert compiled.document.xpath('//table/tr//code/@data-line-number') == ['1', '2']
assert compiled.document.xpath('//table/tr//a/@href') == ['#foo-1', '#foo-2']
def test_line_highlighting(do_fence_test):
with do_fence_test('{ .plantuml listing hl_lines="1 2" }') as compiled:
assert len(compiled.document.xpath('//pre/span[@class="hll"]')) == 2
def test_svg_and_listing(do_fence_test):
with do_fence_test('{ .plantuml svg+listing }') as compiled:
assert [e.tag for e in compiled.document.xpath('/html/body/div/div/*')] == ['svg', 'pre']
def test_listing_and_svg(do_fence_test):
with do_fence_test('{ .plantuml listing+svg }') as compiled:
assert [e.tag for e in compiled.document.xpath('/html/body/div/div/*')] == ['pre', 'svg']
def test_prefix(do_compile_test):
with do_compile_test("""\
```plantuml-prefix
title Title 1
footer Footer 1
```
```plantuml
Participant foo
```
```plantuml
Participant bar
```
```plantuml-prefix
title Title 2
' no footer this time
```
```plantuml
Participant baz
```
""") as compiled:
text = compiled.document.xpath('//svg//text/text()')
assert text.count('Title 1') == 2
assert text.count('Footer 1') == 2
assert text.count('Title 2') == 1
def test_with_other_markdown(do_compile_test):
with do_compile_test("""\
# Heading
```plantuml
Participant foo
```
```python
# comment
```
""") as compiled:
assert compiled.document.xpath('//h1/text()') == ['Heading']
assert compiled.document.xpath('//svg//text/text()') == ['foo']
assert compiled.document.xpath('//pre//span[@class="c1"]/text()') == ['# comment']
def test_plantuml_syntax_error(do_compile_test):
with do_compile_test("""\
```plantuml
this line is bad
```
""", plantuml_continue_after_failure=True) as compiled:
text = compiled.document.xpath('//svg//text/text()')
assert '[From string (line 2) ]' in text
assert 'this line is bad' in text
assert 'Syntax Error?' in text
@pytest.mark.parametrize('line, expected', [
(
'```plantuml',
'```text',
),
(
'```.plantuml hl_lines="3 4"',
'```text hl_lines="3 4"',
),
(
'```{.plantuml}',
'```{.text}',
),
(
'```{ .plantuml #bar }',
'```{ .text anchor_ref=bar }',
),
(
'```{ .plantuml #bad<>&chars }',
'```{ .text anchor_ref=badchars }',
),
(
'```{ .plantuml #bar .foo linenos=y }',
'```{ .text anchor_ref=bar .foo linenos=y }',
),
])
def test_first_line_for_listing_block(line, expected):
match = PlantUmlMarkdownProcessor.FENCED_BLOCK_RE.search(line + '\n```')
assert match
assert first_line_for_listing_block(match) == expected
@fixture
def do_compile_test(basic_compile_test):
def f(data: str, plantuml_continue_after_failure=False) -> CompileResult:
return basic_compile_test(
'.md',
data,
extra_config={
'PLANTUML_DEBUG': True,
'PLANTUML_CONTINUE_AFTER_FAILURE': plantuml_continue_after_failure,
'PLANTUML_EXEC': os.environ.get('PLANTUML_EXEC', 'plantuml').split(),
'PLANTUML_MARKDOWN_ARGS': [
'-chide footbox',
'-nometadata',
'-Sshadowing=false',
],
},
extra_plugins_dirs=[
V8_PLUGIN_PATH / 'plantuml',
V8_PLUGIN_PATH / 'plantuml_markdown',
]
)
return f
@fixture
def do_fence_test(do_compile_test):
def f(fence: str) -> CompileResult:
return do_compile_test("""\
```{}
Alice -> Bob : hello1
Bob -> Alice : hello2
```
""".format(fence))
return f
| 30.809783 | 106 | 0.550362 | import os
import sys
import pytest
from pytest import fixture
if sys.version_info < (3, 6):
raise pytest.skip("plantuml_markdown plugin requires Python >= 3.6", allow_module_level=True)
from tests import V8_PLUGIN_PATH
from tests.conftest import CompileResult
from v8.plantuml_markdown.plantuml_markdown import PlantUmlMarkdownProcessor, first_line_for_listing_block
def test_svg(do_fence_test):
with do_fence_test('plantuml') as compiled:
assert set(compiled.document.xpath('//svg//text/text()')) == {'Alice', 'Bob', 'hello1', 'hello2'}
assert '<?xml' not in compiled.raw_html
def test_listing(do_fence_test):
with do_fence_test('{ .plantuml listing }') as compiled:
assert compiled.document.xpath('//pre/text()') == [(
'Alice -> Bob : hello1\n'
'Bob -> Alice : hello2\n'
)]
def test_id(do_fence_test):
with do_fence_test('{ .plantuml svg+listing #foo }') as compiled:
assert compiled.document.xpath('/html/body/div/@id') == ['foo']
assert compiled.document.xpath('//pre/a/@name') == ['foo-1', 'foo-2']
assert compiled.raw_html.count('foo') == 5
def test_line_numbering(do_fence_test):
with do_fence_test('{ .plantuml listing #foo linenos=y }') as compiled:
assert compiled.document.xpath('//table/tr//code/@data-line-number') == ['1', '2']
assert compiled.document.xpath('//table/tr//a/@href') == ['#foo-1', '#foo-2']
def test_line_highlighting(do_fence_test):
with do_fence_test('{ .plantuml listing hl_lines="1 2" }') as compiled:
assert len(compiled.document.xpath('//pre/span[@class="hll"]')) == 2
def test_svg_and_listing(do_fence_test):
with do_fence_test('{ .plantuml svg+listing }') as compiled:
assert [e.tag for e in compiled.document.xpath('/html/body/div/div/*')] == ['svg', 'pre']
def test_listing_and_svg(do_fence_test):
with do_fence_test('{ .plantuml listing+svg }') as compiled:
assert [e.tag for e in compiled.document.xpath('/html/body/div/div/*')] == ['pre', 'svg']
def test_prefix(do_compile_test):
with do_compile_test("""\
```plantuml-prefix
title Title 1
footer Footer 1
```
```plantuml
Participant foo
```
```plantuml
Participant bar
```
```plantuml-prefix
title Title 2
' no footer this time
```
```plantuml
Participant baz
```
""") as compiled:
text = compiled.document.xpath('//svg//text/text()')
assert text.count('Title 1') == 2
assert text.count('Footer 1') == 2
assert text.count('Title 2') == 1
def test_with_other_markdown(do_compile_test):
with do_compile_test("""\
# Heading
```plantuml
Participant foo
```
```python
# comment
```
""") as compiled:
assert compiled.document.xpath('//h1/text()') == ['Heading']
assert compiled.document.xpath('//svg//text/text()') == ['foo']
assert compiled.document.xpath('//pre//span[@class="c1"]/text()') == ['
def test_plantuml_syntax_error(do_compile_test):
with do_compile_test("""\
```plantuml
this line is bad
```
""", plantuml_continue_after_failure=True) as compiled:
text = compiled.document.xpath('//svg//text/text()')
assert '[From string (line 2) ]' in text
assert 'this line is bad' in text
assert 'Syntax Error?' in text
@pytest.mark.parametrize('line, expected', [
(
'```plantuml',
'```text',
),
(
'```.plantuml hl_lines="3 4"',
'```text hl_lines="3 4"',
),
(
'```{.plantuml}',
'```{.text}',
),
(
'```{ .plantuml
'```{ .text anchor_ref=bar }',
),
(
'```{ .plantuml
'```{ .text anchor_ref=badchars }',
),
(
'```{ .plantuml
'```{ .text anchor_ref=bar .foo linenos=y }',
),
])
def test_first_line_for_listing_block(line, expected):
match = PlantUmlMarkdownProcessor.FENCED_BLOCK_RE.search(line + '\n```')
assert match
assert first_line_for_listing_block(match) == expected
@fixture
def do_compile_test(basic_compile_test):
def f(data: str, plantuml_continue_after_failure=False) -> CompileResult:
return basic_compile_test(
'.md',
data,
extra_config={
'PLANTUML_DEBUG': True,
'PLANTUML_CONTINUE_AFTER_FAILURE': plantuml_continue_after_failure,
'PLANTUML_EXEC': os.environ.get('PLANTUML_EXEC', 'plantuml').split(),
'PLANTUML_MARKDOWN_ARGS': [
'-chide footbox',
'-nometadata',
'-Sshadowing=false',
],
},
extra_plugins_dirs=[
V8_PLUGIN_PATH / 'plantuml',
V8_PLUGIN_PATH / 'plantuml_markdown',
]
)
return f
@fixture
def do_fence_test(do_compile_test):
def f(fence: str) -> CompileResult:
return do_compile_test("""\
```{}
Alice -> Bob : hello1
Bob -> Alice : hello2
```
""".format(fence))
return f
| true | true |
f713c9b41c726431974465bcc7603f9e84650f97 | 15,254 | py | Python | tests/exceptions.py | skulegirl/pyxero | 700d331392fada72ec0a9490e9c4896048a603b2 | [
"BSD-3-Clause"
] | null | null | null | tests/exceptions.py | skulegirl/pyxero | 700d331392fada72ec0a9490e9c4896048a603b2 | [
"BSD-3-Clause"
] | null | null | null | tests/exceptions.py | skulegirl/pyxero | 700d331392fada72ec0a9490e9c4896048a603b2 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import unicode_literals
import unittest
from datetime import date
from mock import Mock, patch
from xero import Xero
from xero.exceptions import (
XeroBadRequest,
XeroExceptionUnknown,
XeroForbidden,
XeroInternalError,
XeroNotAvailable,
XeroNotFound,
XeroNotImplemented,
XeroRateLimitExceeded,
XeroUnauthorized,
)
from . import mock_data
class ExceptionsTest(unittest.TestCase):
@patch("requests.put")
def test_bad_request(self, r_put):
"Data with validation errors raises a bad request exception"
# Verified response from the live API
head = dict()
head["content-type"] = "text/xml; charset=utf-8"
r_put.return_value = Mock(
status_code=400,
encoding="utf-8",
text=mock_data.bad_request_text,
headers=head,
)
credentials = Mock(base_url="")
xero = Xero(credentials)
try:
xero.invoices.put(
{
"Type": "ACCREC",
"LineAmountTypes": "Exclusive",
"Date": date(2013, 4, 29),
"DueDate": date(2013, 4, 29),
"Reference": "Order # 123456",
"Status": "PAID",
"AmountPaid": "19.05",
"TotalTax": "1.05",
"AmountDue": "0.00",
"Total": "19.05",
"SubTotal": "18.00",
}
)
self.fail("Should raise a XeroBadRequest.")
except XeroBadRequest as e:
# Error messages have been extracted
self.assertEqual(str(e), "A validation exception occurred")
self.assertEqual(
e.errors,
[
"One or more line items must be specified",
"Invoice not of valid status for creation",
"A Contact must be specified for this type of transaction",
],
)
# The response has also been stored
self.assertEqual(e.response.status_code, 400)
self.assertTrue(e.response.text.startswith("<ApiException"))
except Exception as e:
self.fail("Should raise a XeroBadRequest, not %s" % e)
@patch("requests.put")
def test_bad_request_invalid_response(self, r_put):
"If the error response from the backend is malformed (or truncated), raise a XeroExceptionUnknown"
head = {"content-type": "text/xml; charset=utf-8"}
# Same error as before, but the response got cut off prematurely
bad_response = mock_data.bad_request_text[:1000]
r_put.return_value = Mock(
status_code=400, encoding="utf-8", text=bad_response, headers=head
)
credentials = Mock(base_url="")
xero = Xero(credentials)
with self.assertRaises(
XeroExceptionUnknown, msg="Should raise a XeroExceptionUnknown"
):
xero.invoices.put(
{
"Type": "ACCREC",
"LineAmountTypes": "Exclusive",
"Date": date(2013, 4, 29),
"DueDate": date(2013, 4, 29),
"Reference": "Order # 123456",
"Status": "PAID",
"AmountPaid": "19.05",
"TotalTax": "1.05",
"AmountDue": "0.00",
"Total": "19.05",
"SubTotal": "18.00",
}
)
@patch("requests.get")
def test_unregistered_app(self, r_get):
"An app without a signature raises a BadRequest exception, but with HTML payload"
# Verified response from the live API
head = dict()
head["content-type"] = "text/html; charset=utf-8"
r_get.return_value = Mock(
status_code=400,
text="oauth_problem=signature_method_rejected&oauth_problem_advice=No%20certificates%20have%20been%20registered%20for%20the%20consumer",
headers=head,
)
credentials = Mock(base_url="")
xero = Xero(credentials)
try:
xero.contacts.all()
self.fail("Should raise a XeroUnauthorized.")
except XeroBadRequest as e:
# Error messages have been extracted
self.assertEqual(
str(e), "No certificates have been registered for the consumer"
)
self.assertEqual(e.errors[0], "signature_method_rejected")
# The response has also been stored
self.assertEqual(e.response.status_code, 400)
self.assertEqual(
e.response.text,
"oauth_problem=signature_method_rejected&oauth_problem_advice=No%20certificates%20have%20been%20registered%20for%20the%20consumer",
)
except Exception as e:
self.fail("Should raise a XeroBadRequest, not %s" % e)
@patch("requests.get")
def test_unauthorized_invalid(self, r_get):
"A session with an invalid token raises an unauthorized exception"
# Verified response from the live API
r_get.return_value = Mock(
status_code=401,
text="oauth_problem=signature_invalid&oauth_problem_advice=Failed%20to%20validate%20signature",
)
credentials = Mock(base_url="")
xero = Xero(credentials)
try:
xero.contacts.all()
self.fail("Should raise a XeroUnauthorized.")
except XeroUnauthorized as e:
# Error messages have been extracted
self.assertEqual(str(e), "Failed to validate signature")
self.assertEqual(e.errors[0], "signature_invalid")
# The response has also been stored
self.assertEqual(e.response.status_code, 401)
self.assertEqual(
e.response.text,
"oauth_problem=signature_invalid&oauth_problem_advice=Failed%20to%20validate%20signature",
)
except Exception as e:
self.fail("Should raise a XeroUnauthorized, not %s" % e)
@patch("requests.get")
def test_unauthorized_expired(self, r_get):
"A session with an expired token raises an unauthorized exception"
# Verified response from the live API
r_get.return_value = Mock(
status_code=401,
text="oauth_problem=token_expired&oauth_problem_advice=The%20access%20token%20has%20expired",
)
credentials = Mock(base_url="")
xero = Xero(credentials)
try:
xero.contacts.all()
self.fail("Should raise a XeroUnauthorized.")
except XeroUnauthorized as e:
# Error messages have been extracted
self.assertEqual(str(e), "The access token has expired")
self.assertEqual(e.errors[0], "token_expired")
# The response has also been stored
self.assertEqual(e.response.status_code, 401)
self.assertEqual(
e.response.text,
"oauth_problem=token_expired&oauth_problem_advice=The%20access%20token%20has%20expired",
)
except Exception as e:
self.fail("Should raise a XeroUnauthorized, not %s" % e)
@patch("requests.get")
def test_forbidden(self, r_get):
"In case of an SSL failure, a Forbidden exception is raised"
# This is unconfirmed; haven't been able to verify this response from API.
r_get.return_value = Mock(
status_code=403, text="The client SSL certificate was not valid."
)
credentials = Mock(base_url="")
xero = Xero(credentials)
try:
xero.contacts.all()
self.fail("Should raise a XeroForbidden.")
except XeroForbidden as e:
# Error messages have been extracted
self.assertEqual(str(e), "The client SSL certificate was not valid.")
# The response has also been stored
self.assertEqual(e.response.status_code, 403)
self.assertEqual(
e.response.text, "The client SSL certificate was not valid."
)
except Exception as e:
self.fail("Should raise a XeroForbidden, not %s" % e)
@patch("requests.get")
def test_not_found(self, r_get):
"If you request an object that doesn't exist, a Not Found exception is raised"
# Verified response from the live API
r_get.return_value = Mock(
status_code=404, text="The resource you're looking for cannot be found"
)
credentials = Mock(base_url="")
xero = Xero(credentials)
try:
xero.contacts.get(id="deadbeef")
self.fail("Should raise a XeroNotFound.")
except XeroNotFound as e:
# Error messages have been extracted
self.assertEqual(str(e), "The resource you're looking for cannot be found")
# The response has also been stored
self.assertEqual(e.response.status_code, 404)
self.assertEqual(
e.response.text, "The resource you're looking for cannot be found"
)
except Exception as e:
self.fail("Should raise a XeroNotFound, not %s" % e)
@patch("requests.get")
def test_rate_limit_exceeded_429(self, r_get):
"If you exceed the rate limit, an exception is raised."
# Response based off Xero documentation; not confirmed by reality.
r_get.return_value = Mock(
status_code=429,
headers={"X-Rate-Limit-Problem": "day"},
text="oauth_problem=rate%20limit%20exceeded&oauth_problem_advice=please%20wait%20before%20retrying%20the%20xero%20api",
)
credentials = Mock(base_url="")
xero = Xero(credentials)
try:
xero.contacts.all()
self.fail("Should raise a XeroRateLimitExceeded.")
except XeroRateLimitExceeded as e:
# Error messages have been extracted
self.assertEqual(str(e), "please wait before retrying the xero api, the limit exceeded is: day")
self.assertIn("rate limit exceeded", e.errors[0])
# The response has also been stored
self.assertEqual(e.response.status_code, 429)
self.assertEqual(
e.response.text,
"oauth_problem=rate%20limit%20exceeded&oauth_problem_advice=please%20wait%20before%20retrying%20the%20xero%20api",
)
except Exception as e:
self.fail("Should raise a XeroRateLimitExceeded, not %s" % e)
@patch("requests.get")
def test_internal_error(self, r_get):
"In case of an SSL failure, a Forbidden exception is raised"
# This is unconfirmed; haven't been able to verify this response from API.
r_get.return_value = Mock(
status_code=500,
text="An unhandled error with the Xero API occurred. Contact the Xero API team if problems persist.",
)
credentials = Mock(base_url="")
xero = Xero(credentials)
try:
xero.contacts.all()
self.fail("Should raise a XeroInternalError.")
except XeroInternalError as e:
# Error messages have been extracted
self.assertEqual(
str(e),
"An unhandled error with the Xero API occurred. Contact the Xero API team if problems persist.",
)
# The response has also been stored
self.assertEqual(e.response.status_code, 500)
self.assertEqual(
e.response.text,
"An unhandled error with the Xero API occurred. Contact the Xero API team if problems persist.",
)
except Exception as e:
self.fail("Should raise a XeroInternalError, not %s" % e)
@patch("requests.post")
def test_not_implemented(self, r_post):
"In case of an SSL failure, a Forbidden exception is raised"
# Verified response from the live API
r_post.return_value = Mock(
status_code=501, encoding="utf-8", text=mock_data.not_implemented_text
)
credentials = Mock(base_url="")
xero = Xero(credentials)
try:
xero.organisations.save({})
self.fail("Should raise a XeroNotImplemented.")
except XeroNotImplemented as e:
# Error messages have been extracted
self.assertEqual(str(e), "The Api Method called is not implemented")
# The response has also been stored
self.assertEqual(e.response.status_code, 501)
self.assertTrue(e.response.text.startswith, "<ApiException")
except Exception as e:
self.fail("Should raise a XeroNotImplemented, not %s" % e)
@patch("requests.get")
def test_rate_limit_exceeded(self, r_get):
"If you exceed the rate limit, an exception is raised."
# Response based off Xero documentation; not confirmed by reality.
r_get.return_value = Mock(
status_code=503,
text="oauth_problem=rate%20limit%20exceeded&oauth_problem_advice=please%20wait%20before%20retrying%20the%20xero%20api",
)
credentials = Mock(base_url="")
xero = Xero(credentials)
try:
xero.contacts.all()
self.fail("Should raise a XeroRateLimitExceeded.")
except XeroRateLimitExceeded as e:
# Error messages have been extracted
self.assertEqual(str(e), "please wait before retrying the xero api")
self.assertEqual(e.errors[0], "rate limit exceeded")
# The response has also been stored
self.assertEqual(e.response.status_code, 503)
self.assertEqual(
e.response.text,
"oauth_problem=rate%20limit%20exceeded&oauth_problem_advice=please%20wait%20before%20retrying%20the%20xero%20api",
)
except Exception as e:
self.fail("Should raise a XeroRateLimitExceeded, not %s" % e)
@patch("requests.get")
def test_not_available(self, r_get):
"If Xero goes down for maintenance, an exception is raised"
# Response based off Xero documentation; not confirmed by reality.
r_get.return_value = Mock(
status_code=503, text="The Xero API is currently offline for maintenance"
)
credentials = Mock(base_url="")
xero = Xero(credentials)
try:
xero.contacts.all()
self.fail("Should raise a XeroNotAvailable.")
except XeroNotAvailable as e:
# Error messages have been extracted
self.assertEqual(
str(e), "The Xero API is currently offline for maintenance"
)
# The response has also been stored
self.assertEqual(e.response.status_code, 503)
self.assertEqual(
e.response.text, "The Xero API is currently offline for maintenance"
)
except Exception as e:
self.fail("Should raise a XeroNotAvailable, not %s" % e)
| 37.571429 | 148 | 0.592631 | from __future__ import unicode_literals
import unittest
from datetime import date
from mock import Mock, patch
from xero import Xero
from xero.exceptions import (
XeroBadRequest,
XeroExceptionUnknown,
XeroForbidden,
XeroInternalError,
XeroNotAvailable,
XeroNotFound,
XeroNotImplemented,
XeroRateLimitExceeded,
XeroUnauthorized,
)
from . import mock_data
class ExceptionsTest(unittest.TestCase):
@patch("requests.put")
def test_bad_request(self, r_put):
head = dict()
head["content-type"] = "text/xml; charset=utf-8"
r_put.return_value = Mock(
status_code=400,
encoding="utf-8",
text=mock_data.bad_request_text,
headers=head,
)
credentials = Mock(base_url="")
xero = Xero(credentials)
try:
xero.invoices.put(
{
"Type": "ACCREC",
"LineAmountTypes": "Exclusive",
"Date": date(2013, 4, 29),
"DueDate": date(2013, 4, 29),
"Reference": "Order # 123456",
"Status": "PAID",
"AmountPaid": "19.05",
"TotalTax": "1.05",
"AmountDue": "0.00",
"Total": "19.05",
"SubTotal": "18.00",
}
)
self.fail("Should raise a XeroBadRequest.")
except XeroBadRequest as e:
self.assertEqual(str(e), "A validation exception occurred")
self.assertEqual(
e.errors,
[
"One or more line items must be specified",
"Invoice not of valid status for creation",
"A Contact must be specified for this type of transaction",
],
)
self.assertEqual(e.response.status_code, 400)
self.assertTrue(e.response.text.startswith("<ApiException"))
except Exception as e:
self.fail("Should raise a XeroBadRequest, not %s" % e)
@patch("requests.put")
def test_bad_request_invalid_response(self, r_put):
head = {"content-type": "text/xml; charset=utf-8"}
bad_response = mock_data.bad_request_text[:1000]
r_put.return_value = Mock(
status_code=400, encoding="utf-8", text=bad_response, headers=head
)
credentials = Mock(base_url="")
xero = Xero(credentials)
with self.assertRaises(
XeroExceptionUnknown, msg="Should raise a XeroExceptionUnknown"
):
xero.invoices.put(
{
"Type": "ACCREC",
"LineAmountTypes": "Exclusive",
"Date": date(2013, 4, 29),
"DueDate": date(2013, 4, 29),
"Reference": "Order # 123456",
"Status": "PAID",
"AmountPaid": "19.05",
"TotalTax": "1.05",
"AmountDue": "0.00",
"Total": "19.05",
"SubTotal": "18.00",
}
)
@patch("requests.get")
def test_unregistered_app(self, r_get):
head = dict()
head["content-type"] = "text/html; charset=utf-8"
r_get.return_value = Mock(
status_code=400,
text="oauth_problem=signature_method_rejected&oauth_problem_advice=No%20certificates%20have%20been%20registered%20for%20the%20consumer",
headers=head,
)
credentials = Mock(base_url="")
xero = Xero(credentials)
try:
xero.contacts.all()
self.fail("Should raise a XeroUnauthorized.")
except XeroBadRequest as e:
self.assertEqual(
str(e), "No certificates have been registered for the consumer"
)
self.assertEqual(e.errors[0], "signature_method_rejected")
self.assertEqual(e.response.status_code, 400)
self.assertEqual(
e.response.text,
"oauth_problem=signature_method_rejected&oauth_problem_advice=No%20certificates%20have%20been%20registered%20for%20the%20consumer",
)
except Exception as e:
self.fail("Should raise a XeroBadRequest, not %s" % e)
@patch("requests.get")
def test_unauthorized_invalid(self, r_get):
r_get.return_value = Mock(
status_code=401,
text="oauth_problem=signature_invalid&oauth_problem_advice=Failed%20to%20validate%20signature",
)
credentials = Mock(base_url="")
xero = Xero(credentials)
try:
xero.contacts.all()
self.fail("Should raise a XeroUnauthorized.")
except XeroUnauthorized as e:
self.assertEqual(str(e), "Failed to validate signature")
self.assertEqual(e.errors[0], "signature_invalid")
self.assertEqual(e.response.status_code, 401)
self.assertEqual(
e.response.text,
"oauth_problem=signature_invalid&oauth_problem_advice=Failed%20to%20validate%20signature",
)
except Exception as e:
self.fail("Should raise a XeroUnauthorized, not %s" % e)
@patch("requests.get")
def test_unauthorized_expired(self, r_get):
r_get.return_value = Mock(
status_code=401,
text="oauth_problem=token_expired&oauth_problem_advice=The%20access%20token%20has%20expired",
)
credentials = Mock(base_url="")
xero = Xero(credentials)
try:
xero.contacts.all()
self.fail("Should raise a XeroUnauthorized.")
except XeroUnauthorized as e:
self.assertEqual(str(e), "The access token has expired")
self.assertEqual(e.errors[0], "token_expired")
self.assertEqual(e.response.status_code, 401)
self.assertEqual(
e.response.text,
"oauth_problem=token_expired&oauth_problem_advice=The%20access%20token%20has%20expired",
)
except Exception as e:
self.fail("Should raise a XeroUnauthorized, not %s" % e)
@patch("requests.get")
def test_forbidden(self, r_get):
r_get.return_value = Mock(
status_code=403, text="The client SSL certificate was not valid."
)
credentials = Mock(base_url="")
xero = Xero(credentials)
try:
xero.contacts.all()
self.fail("Should raise a XeroForbidden.")
except XeroForbidden as e:
# Error messages have been extracted
self.assertEqual(str(e), "The client SSL certificate was not valid.")
# The response has also been stored
self.assertEqual(e.response.status_code, 403)
self.assertEqual(
e.response.text, "The client SSL certificate was not valid."
)
except Exception as e:
self.fail("Should raise a XeroForbidden, not %s" % e)
@patch("requests.get")
def test_not_found(self, r_get):
# Verified response from the live API
r_get.return_value = Mock(
status_code=404, text="The resource you're looking for cannot be found"
)
credentials = Mock(base_url="")
xero = Xero(credentials)
try:
xero.contacts.get(id="deadbeef")
self.fail("Should raise a XeroNotFound.")
except XeroNotFound as e:
self.assertEqual(str(e), "The resource you're looking for cannot be found")
# The response has also been stored
self.assertEqual(e.response.status_code, 404)
self.assertEqual(
e.response.text, "The resource you're looking for cannot be found"
)
except Exception as e:
self.fail("Should raise a XeroNotFound, not %s" % e)
@patch("requests.get")
def test_rate_limit_exceeded_429(self, r_get):
r_get.return_value = Mock(
status_code=429,
headers={"X-Rate-Limit-Problem": "day"},
text="oauth_problem=rate%20limit%20exceeded&oauth_problem_advice=please%20wait%20before%20retrying%20the%20xero%20api",
)
credentials = Mock(base_url="")
xero = Xero(credentials)
try:
xero.contacts.all()
self.fail("Should raise a XeroRateLimitExceeded.")
except XeroRateLimitExceeded as e:
self.assertEqual(str(e), "please wait before retrying the xero api, the limit exceeded is: day")
self.assertIn("rate limit exceeded", e.errors[0])
self.assertEqual(e.response.status_code, 429)
self.assertEqual(
e.response.text,
"oauth_problem=rate%20limit%20exceeded&oauth_problem_advice=please%20wait%20before%20retrying%20the%20xero%20api",
)
except Exception as e:
self.fail("Should raise a XeroRateLimitExceeded, not %s" % e)
@patch("requests.get")
def test_internal_error(self, r_get):
r_get.return_value = Mock(
status_code=500,
text="An unhandled error with the Xero API occurred. Contact the Xero API team if problems persist.",
)
credentials = Mock(base_url="")
xero = Xero(credentials)
try:
xero.contacts.all()
self.fail("Should raise a XeroInternalError.")
except XeroInternalError as e:
# Error messages have been extracted
self.assertEqual(
str(e),
"An unhandled error with the Xero API occurred. Contact the Xero API team if problems persist.",
)
# The response has also been stored
self.assertEqual(e.response.status_code, 500)
self.assertEqual(
e.response.text,
"An unhandled error with the Xero API occurred. Contact the Xero API team if problems persist.",
)
except Exception as e:
self.fail("Should raise a XeroInternalError, not %s" % e)
@patch("requests.post")
def test_not_implemented(self, r_post):
# Verified response from the live API
r_post.return_value = Mock(
status_code=501, encoding="utf-8", text=mock_data.not_implemented_text
)
credentials = Mock(base_url="")
xero = Xero(credentials)
try:
xero.organisations.save({})
self.fail("Should raise a XeroNotImplemented.")
except XeroNotImplemented as e:
# Error messages have been extracted
self.assertEqual(str(e), "The Api Method called is not implemented")
# The response has also been stored
self.assertEqual(e.response.status_code, 501)
self.assertTrue(e.response.text.startswith, "<ApiException")
except Exception as e:
self.fail("Should raise a XeroNotImplemented, not %s" % e)
@patch("requests.get")
def test_rate_limit_exceeded(self, r_get):
# Response based off Xero documentation; not confirmed by reality.
r_get.return_value = Mock(
status_code=503,
text="oauth_problem=rate%20limit%20exceeded&oauth_problem_advice=please%20wait%20before%20retrying%20the%20xero%20api",
)
credentials = Mock(base_url="")
xero = Xero(credentials)
try:
xero.contacts.all()
self.fail("Should raise a XeroRateLimitExceeded.")
except XeroRateLimitExceeded as e:
# Error messages have been extracted
self.assertEqual(str(e), "please wait before retrying the xero api")
self.assertEqual(e.errors[0], "rate limit exceeded")
# The response has also been stored
self.assertEqual(e.response.status_code, 503)
self.assertEqual(
e.response.text,
"oauth_problem=rate%20limit%20exceeded&oauth_problem_advice=please%20wait%20before%20retrying%20the%20xero%20api",
)
except Exception as e:
self.fail("Should raise a XeroRateLimitExceeded, not %s" % e)
@patch("requests.get")
def test_not_available(self, r_get):
# Response based off Xero documentation; not confirmed by reality.
r_get.return_value = Mock(
status_code=503, text="The Xero API is currently offline for maintenance"
)
credentials = Mock(base_url="")
xero = Xero(credentials)
try:
xero.contacts.all()
self.fail("Should raise a XeroNotAvailable.")
except XeroNotAvailable as e:
# Error messages have been extracted
self.assertEqual(
str(e), "The Xero API is currently offline for maintenance"
)
# The response has also been stored
self.assertEqual(e.response.status_code, 503)
self.assertEqual(
e.response.text, "The Xero API is currently offline for maintenance"
)
except Exception as e:
self.fail("Should raise a XeroNotAvailable, not %s" % e)
| true | true |
f713ca429669bcdd8b420dc01264b77dfd212b9e | 5,496 | py | Python | audio_controller/api_handler/views.py | Maxning1/MusicBox | 19b35569da31efa2d9e62aae83ddbb028a4032f7 | [
"MIT"
] | 1 | 2021-02-07T23:04:21.000Z | 2021-02-07T23:04:21.000Z | audio_controller/api_handler/views.py | Maxning1/MusicBox | 19b35569da31efa2d9e62aae83ddbb028a4032f7 | [
"MIT"
] | null | null | null | audio_controller/api_handler/views.py | Maxning1/MusicBox | 19b35569da31efa2d9e62aae83ddbb028a4032f7 | [
"MIT"
] | null | null | null | from django.shortcuts import render
from rest_framework import generics, status
from .serializers import RoomSerializer, CreateRoomSerializer, UpdateRoomSerializer
from .models import Room
from rest_framework.views import APIView
from rest_framework.response import Response
from django.http import JsonResponse
# Create your views here.
class RoomView(generics.CreateAPIView):
queryset = Room.objects.all()
serializer_class = RoomSerializer
class GetRoom(APIView):
serializer_class = RoomSerializer
lookup_url_kwarg = 'code'
def get(self, request, format=None):
code = request.GET.get(self.lookup_url_kwarg)
if code != None:
room = Room.objects.filter(code=code)
if len(room) > 0:
data = RoomSerializer(room[0]).data
data['is_host'] = self.request.session.session_key == room[0].host
return Response(data, status=status.HTTP_200_OK)
return Response({'Room Not Found': 'Invalid Room Code.'}, status=status.HTTP_404_NOT_FOUND)
return Response({'Bad Request': 'Code paramater not found in request'}, status=status.HTTP_400_BAD_REQUEST)
class JoinRoom(APIView):
lookup_url_kwarg = 'code'
def post(self, request, format=None):
if not self.request.session.exists(self.request.session.session_key):
self.request.session.create()
code = request.data.get(self.lookup_url_kwarg)
if code != None:
room_result = Room.objects.filter(code=code)
if len(room_result) > 0:
room = room_result[0]
self.request.session['room_code'] = code
return Response({'message' : 'Room Joined!'}, status=status.HTTP_200_OK)
return Response ({'Bad Request' : 'Invalid Room Code'}, status=status.HTTP_400_BAD_REQUEST)
return Response ({'Bad Request' : 'Invalid post data, did not find a code key'}, status=status.HTTP_404_NOT_FOUND)
class CreateRoomView(APIView):
serializer_class = CreateRoomSerializer
def post(self, request, format=None):
if not self.request.session.exists(self.request.session.session_key):
self.request.session.create()
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
guest_can_pause = serializer.data.get('guest_can_pause')
votes_to_skip = serializer.data.get('votes_to_skip')
host = self.request.session.session_key
queryset = Room.objects.filter(host=host)
if queryset.exists():
room = queryset[0]
room.guest_can_pause = guest_can_pause
room.votes_to_skip = votes_to_skip
room.save(update_fields=['guest_can_pause', 'votes_to_skip'])
self.request.session['room_code'] = room.code
return Response(RoomSerializer(room).data, status=status.HTTP_200_OK)
else:
room = Room(host=host, guest_can_pause=guest_can_pause,
votes_to_skip=votes_to_skip)
room.save()
self.request.session['room_code'] = room.code
return Response(RoomSerializer(room).data, status=status.HTTP_201_CREATED)
return Response({'Bad Request': 'Invalid data...'}, status=status.HTTP_400_BAD_REQUEST)
class IsUserInRoom(APIView):
def get(self, request, format=None):
if not self.request.session.exists(self.request.session.session_key):
self.request.session.create()
data = {
'code': self.request.session.get('room_code')
}
return JsonResponse(data, status=status.HTTP_200_OK)
class LeaveRoom(APIView):
def post(self, request, format=None):
if 'room_code' in self.request.session:
self.request.session.pop('room_code')
host_id = self.request.session.session_key
room_results = Room.objects.filter(host=host_id)
if len(room_results) > 0:
room = room_results[0]
room.delete()
return Response({'Message': 'Success'}, status=status.HTTP_200_OK)
class UpdateRoom(APIView):
serializer_class = UpdateRoomSerializer
def patch(self, request, format=None):
if not self.request.session.exists(self.request.session.session_key):
self.request.session.create()
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
guest_can_pause = serializer.data.get('guest_can_pause')
votes_to_skip = serializer.data.get('votes_to_skip')
code = serializer.data.get('code')
queryset = Room.objects.filter(code=code)
if not queryset.exists():
return Response({'msg': 'Room not found.'}, status=status.HTTP_404_NOT_FOUND)
room = queryset[0]
user_id = self.request.session.session_key
if room.host != user_id:
return Response({'msg': 'You are not the host of this room.'}, status=status.HTTP_403_FORBIDDEN)
room.guest_can_pause = guest_can_pause
room.votes_to_skip = votes_to_skip
room.save(update_fields=['guest_can_pause', 'votes_to_skip'])
return Response(RoomSerializer(room).data, status=status.HTTP_200_OK)
return Response({'Bad Request': "Invalid Data..."}, status=status.HTTP_400_BAD_REQUEST) | 43.275591 | 122 | 0.64738 | from django.shortcuts import render
from rest_framework import generics, status
from .serializers import RoomSerializer, CreateRoomSerializer, UpdateRoomSerializer
from .models import Room
from rest_framework.views import APIView
from rest_framework.response import Response
from django.http import JsonResponse
class RoomView(generics.CreateAPIView):
queryset = Room.objects.all()
serializer_class = RoomSerializer
class GetRoom(APIView):
serializer_class = RoomSerializer
lookup_url_kwarg = 'code'
def get(self, request, format=None):
code = request.GET.get(self.lookup_url_kwarg)
if code != None:
room = Room.objects.filter(code=code)
if len(room) > 0:
data = RoomSerializer(room[0]).data
data['is_host'] = self.request.session.session_key == room[0].host
return Response(data, status=status.HTTP_200_OK)
return Response({'Room Not Found': 'Invalid Room Code.'}, status=status.HTTP_404_NOT_FOUND)
return Response({'Bad Request': 'Code paramater not found in request'}, status=status.HTTP_400_BAD_REQUEST)
class JoinRoom(APIView):
lookup_url_kwarg = 'code'
def post(self, request, format=None):
if not self.request.session.exists(self.request.session.session_key):
self.request.session.create()
code = request.data.get(self.lookup_url_kwarg)
if code != None:
room_result = Room.objects.filter(code=code)
if len(room_result) > 0:
room = room_result[0]
self.request.session['room_code'] = code
return Response({'message' : 'Room Joined!'}, status=status.HTTP_200_OK)
return Response ({'Bad Request' : 'Invalid Room Code'}, status=status.HTTP_400_BAD_REQUEST)
return Response ({'Bad Request' : 'Invalid post data, did not find a code key'}, status=status.HTTP_404_NOT_FOUND)
class CreateRoomView(APIView):
serializer_class = CreateRoomSerializer
def post(self, request, format=None):
if not self.request.session.exists(self.request.session.session_key):
self.request.session.create()
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
guest_can_pause = serializer.data.get('guest_can_pause')
votes_to_skip = serializer.data.get('votes_to_skip')
host = self.request.session.session_key
queryset = Room.objects.filter(host=host)
if queryset.exists():
room = queryset[0]
room.guest_can_pause = guest_can_pause
room.votes_to_skip = votes_to_skip
room.save(update_fields=['guest_can_pause', 'votes_to_skip'])
self.request.session['room_code'] = room.code
return Response(RoomSerializer(room).data, status=status.HTTP_200_OK)
else:
room = Room(host=host, guest_can_pause=guest_can_pause,
votes_to_skip=votes_to_skip)
room.save()
self.request.session['room_code'] = room.code
return Response(RoomSerializer(room).data, status=status.HTTP_201_CREATED)
return Response({'Bad Request': 'Invalid data...'}, status=status.HTTP_400_BAD_REQUEST)
class IsUserInRoom(APIView):
def get(self, request, format=None):
if not self.request.session.exists(self.request.session.session_key):
self.request.session.create()
data = {
'code': self.request.session.get('room_code')
}
return JsonResponse(data, status=status.HTTP_200_OK)
class LeaveRoom(APIView):
def post(self, request, format=None):
if 'room_code' in self.request.session:
self.request.session.pop('room_code')
host_id = self.request.session.session_key
room_results = Room.objects.filter(host=host_id)
if len(room_results) > 0:
room = room_results[0]
room.delete()
return Response({'Message': 'Success'}, status=status.HTTP_200_OK)
class UpdateRoom(APIView):
serializer_class = UpdateRoomSerializer
def patch(self, request, format=None):
if not self.request.session.exists(self.request.session.session_key):
self.request.session.create()
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
guest_can_pause = serializer.data.get('guest_can_pause')
votes_to_skip = serializer.data.get('votes_to_skip')
code = serializer.data.get('code')
queryset = Room.objects.filter(code=code)
if not queryset.exists():
return Response({'msg': 'Room not found.'}, status=status.HTTP_404_NOT_FOUND)
room = queryset[0]
user_id = self.request.session.session_key
if room.host != user_id:
return Response({'msg': 'You are not the host of this room.'}, status=status.HTTP_403_FORBIDDEN)
room.guest_can_pause = guest_can_pause
room.votes_to_skip = votes_to_skip
room.save(update_fields=['guest_can_pause', 'votes_to_skip'])
return Response(RoomSerializer(room).data, status=status.HTTP_200_OK)
return Response({'Bad Request': "Invalid Data..."}, status=status.HTTP_400_BAD_REQUEST) | true | true |
f713caaf57b9575f6fccea9f21007d77f54ac6a8 | 1,709 | py | Python | app/core/migrations/0001_initial.py | TecateWine/receipe-app-api | b181aef2abe0b9acdf2ada3f3fd3cfe674345f5f | [
"MIT"
] | null | null | null | app/core/migrations/0001_initial.py | TecateWine/receipe-app-api | b181aef2abe0b9acdf2ada3f3fd3cfe674345f5f | [
"MIT"
] | null | null | null | app/core/migrations/0001_initial.py | TecateWine/receipe-app-api | b181aef2abe0b9acdf2ada3f3fd3cfe674345f5f | [
"MIT"
] | null | null | null | # Generated by Django 2.1.15 on 2019-12-31 00:14
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0009_alter_user_last_name_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('email', models.EmailField(max_length=255, unique=True)),
('name', models.CharField(max_length=255)),
('is_active', models.BooleanField(default=True)),
('is_staff', models.BooleanField(default=False)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'abstract': False,
},
),
]
| 50.264706 | 266 | 0.63897 |
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0009_alter_user_last_name_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('email', models.EmailField(max_length=255, unique=True)),
('name', models.CharField(max_length=255)),
('is_active', models.BooleanField(default=True)),
('is_staff', models.BooleanField(default=False)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'abstract': False,
},
),
]
| true | true |
f713caea696d748be4a67cfee124977a76d0d29a | 6,072 | py | Python | idrac_powerunit/idrac_powerunit.py | anita-mithran/plugins | 3c34f4b92b9c0ae985dc3b6abfb8c952aba00cd7 | [
"BSD-2-Clause"
] | null | null | null | idrac_powerunit/idrac_powerunit.py | anita-mithran/plugins | 3c34f4b92b9c0ae985dc3b6abfb8c952aba00cd7 | [
"BSD-2-Clause"
] | null | null | null | idrac_powerunit/idrac_powerunit.py | anita-mithran/plugins | 3c34f4b92b9c0ae985dc3b6abfb8c952aba00cd7 | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/env python
import json
import SNMPUtil
import argparse
### Monitoring iDRAC Servers - Powerunit Performance
### It uses snmpwalk command to get the hadrware data from the iDRAC Servers.
### SNMPUtil.py is used to get the snmp raw data and parsed to get the output json
### Download and install the latest version of Site24x7 Linux Agent. The agent will execute the plugin and push the data to the Site24x7 server
###
### Author: Anita, Zoho Corp
### Language : Python
### Tested in Ubuntu
### Tested for snmp version 2c
### OIDS for Getting Power unit Details
OIDS = {'powerunit' : ['powerUnitTable','amperageProbeLocationName','amperageProbeReading']}
### OID Attributes
hardware = {'powerunit' : ['powerUnitStateSettings','powerUnitRedundancyStatus','powerUnitStatus','amperageProbeReading']}
### Output Keys and their units
names = {'powerunit' : ['state','redundancystatus','status', {'powerconsumption':'W'}]}
class HardwareParser:
def __init__(self, hostname, snmp_version, snmp_community_str, mib_location):
self.hostname = hostname
self.snmp_version = snmp_version
self.snmp_community_str = snmp_community_str
self.mib_location = mib_location
self.hardware = ''
self.oids = ''
self.pattern = ''
def getData(self):
output_data = {}
output_data['data'] = {}
output_data['units'] = {}
for _ in OIDS:
self.hardware = _
self.oids = OIDS[self.hardware]
self.keys = set()
for _ in self.oids:
try:
### SNMPUtil module is used to get the snmp output for the input OIDS
snmpdata = SNMPUtil.SNMPPARSER('snmpwalk',self.hostname, self.snmp_version, self.snmp_community_str,_, self.mib_location, hardware[self.hardware])
### get Raw SNMP Output as a dict
self.snmp_data = snmpdata.getRawData()
### Method to parse the SNMP command output data
output_data = self.parseSNMPData(output_data)
except Exception as e:
raise Exception(e)
return output_data
### Method to parse the SNMP command output data
def parseSNMPData(self,output_data):
jsondata = output_data['data']
unitdata = output_data['units']
appendkeys = False;
if not jsondata: appendkeys = True
for _ in self.snmp_data:
for index, __ in enumerate(hardware[self.hardware]) :
if __ in _:
name = ''.join(_.split("::")[1:]).replace('"','').split(' ')[0].split('.')
elementname = name[len(name)-1] # Name
value = ''.join(_.split()[1:]).replace('"','') # Value
if appendkeys : self.keys.add(elementname);
if ':' in value:
val = value.split(':')[1:]
value = val[len(val)-1]
if __ == 'powerSupplyOutputWatts' : value = int(value)/float(10)
if __ == 'powerSupplyRatedInputWattage' : value = int(value)/float(10)
if __ == 'amperageProbeReading' : value = int(value)/float(10)
if __ == 'voltageProbeReading' : value = int(value)/float(1000)
elem = names[self.hardware][index]
attribute = '' # Attribute Name
unit = '' # Attribute Value
if type(elem) is str: # Attributes with no units specified
attribute = elem
elif type(elem) is dict: # Attributes with units
attribute = list(elem.keys())[0]
unit = elem[list(elem.keys())[0]]
key = (attribute +'_'+elementname).replace(' ','')
if appendkeys :
jsondata[key] = value
if unit!='': unitdata[key] = unit
elif elementname in self.keys :
jsondata[key] = value
if unit!='': unitdata[key] = unit
elif self.hardware== 'powerunit':
if 'System Board Pwr Consumption' in _: self.keys.add(elementname)
if (elementname in self.keys and 'amperageProbeReading' in _) : jsondata[key] = value
output_data['data'] = jsondata
output_data['units'] = unitdata
return output_data
if __name__ == '__main__':
result = {}
parser = argparse.ArgumentParser()
parser.add_argument('--hostname', help='hostname', nargs='?', default='localhost')
parser.add_argument('--snmp_version', help='snmp version', type=str, nargs='?', default="2c")
parser.add_argument('--snmp_community_str', help='snmp community version', nargs='?', default='public')
parser.add_argument('--idrac_mib_file_locn', help='idrac mib file location', nargs='?', default='')
parser.add_argument('--plugin_version', help='plugin template version', nargs='?', default='1')
parser.add_argument('--heartbeat_required', help='Enable heartbeat for monitoring', nargs='?', default="true")
args = parser.parse_args()
try:
parser = HardwareParser(args.hostname, args.snmp_version, args.snmp_community_str, args.idrac_mib_file_locn)
output = parser.getData()
result = output['data']
result['units'] = output['units']
except Exception as e:
result['msg'] = str(e)
result['plugin_version'] = args.plugin_version
result['heartbeat_required'] = args.heartbeat_required
print(json.dumps(result, indent=2, sort_keys=True))
| 42.461538 | 166 | 0.550889 |
import json
import SNMPUtil
import argparse
eption as e:
raise Exception(e)
return output_data
tdata = output_data['units']
appendkeys = False;
if not jsondata: appendkeys = True
for _ in self.snmp_data:
for index, __ in enumerate(hardware[self.hardware]) :
if __ in _:
name = ''.join(_.split("::")[1:]).replace('"','').split(' ')[0].split('.')
elementname = name[len(name)-1] # Name
value = ''.join(_.split()[1:]).replace('"','')
if appendkeys : self.keys.add(elementname);
if ':' in value:
val = value.split(':')[1:]
value = val[len(val)-1]
if __ == 'powerSupplyOutputWatts' : value = int(value)/float(10)
if __ == 'powerSupplyRatedInputWattage' : value = int(value)/float(10)
if __ == 'amperageProbeReading' : value = int(value)/float(10)
if __ == 'voltageProbeReading' : value = int(value)/float(1000)
elem = names[self.hardware][index]
attribute = ''
unit = ''
if type(elem) is str:
attribute = elem
elif type(elem) is dict:
attribute = list(elem.keys())[0]
unit = elem[list(elem.keys())[0]]
key = (attribute +'_'+elementname).replace(' ','')
if appendkeys :
jsondata[key] = value
if unit!='': unitdata[key] = unit
elif elementname in self.keys :
jsondata[key] = value
if unit!='': unitdata[key] = unit
elif self.hardware== 'powerunit':
if 'System Board Pwr Consumption' in _: self.keys.add(elementname)
if (elementname in self.keys and 'amperageProbeReading' in _) : jsondata[key] = value
output_data['data'] = jsondata
output_data['units'] = unitdata
return output_data
if __name__ == '__main__':
result = {}
parser = argparse.ArgumentParser()
parser.add_argument('--hostname', help='hostname', nargs='?', default='localhost')
parser.add_argument('--snmp_version', help='snmp version', type=str, nargs='?', default="2c")
parser.add_argument('--snmp_community_str', help='snmp community version', nargs='?', default='public')
parser.add_argument('--idrac_mib_file_locn', help='idrac mib file location', nargs='?', default='')
parser.add_argument('--plugin_version', help='plugin template version', nargs='?', default='1')
parser.add_argument('--heartbeat_required', help='Enable heartbeat for monitoring', nargs='?', default="true")
args = parser.parse_args()
try:
parser = HardwareParser(args.hostname, args.snmp_version, args.snmp_community_str, args.idrac_mib_file_locn)
output = parser.getData()
result = output['data']
result['units'] = output['units']
except Exception as e:
result['msg'] = str(e)
result['plugin_version'] = args.plugin_version
result['heartbeat_required'] = args.heartbeat_required
print(json.dumps(result, indent=2, sort_keys=True))
| true | true |
f713cb45e38bb0e36bf56e9f6b5359fe16568126 | 342 | py | Python | examples/blogengine/email_client.py | perkinslr/nevow-py3 | 69c7a64832a176955af5534febf5a405c511b5eb | [
"MIT"
] | 1 | 2019-11-11T13:58:23.000Z | 2019-11-11T13:58:23.000Z | examples/blogengine/email_client.py | perkinslr/nevow-py3 | 69c7a64832a176955af5534febf5a405c511b5eb | [
"MIT"
] | null | null | null | examples/blogengine/email_client.py | perkinslr/nevow-py3 | 69c7a64832a176955af5534febf5a405c511b5eb | [
"MIT"
] | 1 | 2019-02-28T13:46:26.000Z | 2019-02-28T13:46:26.000Z | import sys, smtplib
fromaddr = input("From: ")
toaddrs = input("To: ").split(',')
print("Enter message, end with ^D:")
msg = ''
while 1:
line = sys.stdin.readline()
if not line:
break
msg = msg + line
# The actual mail send
server = smtplib.SMTP('localhost', 2500)
server.sendmail(fromaddr, toaddrs, msg)
server.quit()
| 20.117647 | 40 | 0.640351 | import sys, smtplib
fromaddr = input("From: ")
toaddrs = input("To: ").split(',')
print("Enter message, end with ^D:")
msg = ''
while 1:
line = sys.stdin.readline()
if not line:
break
msg = msg + line
server = smtplib.SMTP('localhost', 2500)
server.sendmail(fromaddr, toaddrs, msg)
server.quit()
| true | true |
f713cb5fa957f4d1f054a5bfa70d75e73f16384a | 51,194 | py | Python | discord/client.py | PanKlipcio/discord.py | 9bb4bd8003dcabcfbedfbfc8fe0cee91543c13a5 | [
"MIT"
] | 1 | 2021-07-23T05:56:09.000Z | 2021-07-23T05:56:09.000Z | discord/client.py | PanKlipcio/discord.py | 9bb4bd8003dcabcfbedfbfc8fe0cee91543c13a5 | [
"MIT"
] | null | null | null | discord/client.py | PanKlipcio/discord.py | 9bb4bd8003dcabcfbedfbfc8fe0cee91543c13a5 | [
"MIT"
] | null | null | null | """
The MIT License (MIT)
Copyright (c) 2015-present Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import annotations
import asyncio
import logging
import signal
import sys
import traceback
from typing import Any, Callable, Coroutine, Dict, Generator, Iterable, List, Optional, Sequence, TYPE_CHECKING, Tuple, TypeVar, Union
import aiohttp
from .user import User
from .invite import Invite
from .template import Template
from .widget import Widget
from .guild import Guild
from .emoji import Emoji
from .channel import _threaded_channel_factory
from .enums import ChannelType
from .mentions import AllowedMentions
from .errors import *
from .enums import Status, VoiceRegion
from .flags import ApplicationFlags, Intents
from .gateway import *
from .activity import ActivityTypes, BaseActivity, create_activity
from .voice_client import VoiceClient
from .http import HTTPClient
from .state import ConnectionState
from . import utils
from .utils import MISSING
from .object import Object
from .backoff import ExponentialBackoff
from .webhook import Webhook
from .iterators import GuildIterator
from .appinfo import AppInfo
from .ui.view import View
from .stage_instance import StageInstance
from .threads import Thread
if TYPE_CHECKING:
from .abc import SnowflakeTime, PrivateChannel, GuildChannel, Snowflake
from .channel import DMChannel
from .user import ClientUser
from .message import Message
from .member import Member
from .voice_client import VoiceProtocol
__all__ = (
'Client',
)
Coro = TypeVar('Coro', bound=Callable[..., Coroutine[Any, Any, Any]])
log: logging.Logger = logging.getLogger(__name__)
def _cancel_tasks(loop: asyncio.AbstractEventLoop) -> None:
tasks = {t for t in asyncio.all_tasks(loop=loop) if not t.done()}
if not tasks:
return
log.info('Cleaning up after %d tasks.', len(tasks))
for task in tasks:
task.cancel()
loop.run_until_complete(asyncio.gather(*tasks, return_exceptions=True))
log.info('All tasks finished cancelling.')
for task in tasks:
if task.cancelled():
continue
if task.exception() is not None:
loop.call_exception_handler({
'message': 'Unhandled exception during Client.run shutdown.',
'exception': task.exception(),
'task': task
})
def _cleanup_loop(loop: asyncio.AbstractEventLoop) -> None:
try:
_cancel_tasks(loop)
loop.run_until_complete(loop.shutdown_asyncgens())
finally:
log.info('Closing the event loop.')
loop.close()
class Client:
r"""Represents a client connection that connects to Discord.
This class is used to interact with the Discord WebSocket and API.
A number of options can be passed to the :class:`Client`.
Parameters
-----------
max_messages: Optional[:class:`int`]
The maximum number of messages to store in the internal message cache.
This defaults to ``1000``. Passing in ``None`` disables the message cache.
.. versionchanged:: 1.3
Allow disabling the message cache and change the default size to ``1000``.
loop: Optional[:class:`asyncio.AbstractEventLoop`]
The :class:`asyncio.AbstractEventLoop` to use for asynchronous operations.
Defaults to ``None``, in which case the default event loop is used via
:func:`asyncio.get_event_loop()`.
connector: Optional[:class:`aiohttp.BaseConnector`]
The connector to use for connection pooling.
proxy: Optional[:class:`str`]
Proxy URL.
proxy_auth: Optional[:class:`aiohttp.BasicAuth`]
An object that represents proxy HTTP Basic Authorization.
shard_id: Optional[:class:`int`]
Integer starting at ``0`` and less than :attr:`.shard_count`.
shard_count: Optional[:class:`int`]
The total number of shards.
application_id: :class:`int`
The client's application ID.
intents: :class:`Intents`
The intents that you want to enable for the session. This is a way of
disabling and enabling certain gateway events from triggering and being sent.
If not given, defaults to a regularly constructed :class:`Intents` class.
.. versionadded:: 1.5
member_cache_flags: :class:`MemberCacheFlags`
Allows for finer control over how the library caches members.
If not given, defaults to cache as much as possible with the
currently selected intents.
.. versionadded:: 1.5
chunk_guilds_at_startup: :class:`bool`
Indicates if :func:`.on_ready` should be delayed to chunk all guilds
at start-up if necessary. This operation is incredibly slow for large
amounts of guilds. The default is ``True`` if :attr:`Intents.members`
is ``True``.
.. versionadded:: 1.5
status: Optional[:class:`.Status`]
A status to start your presence with upon logging on to Discord.
activity: Optional[:class:`.BaseActivity`]
An activity to start your presence with upon logging on to Discord.
allowed_mentions: Optional[:class:`AllowedMentions`]
Control how the client handles mentions by default on every message sent.
.. versionadded:: 1.4
heartbeat_timeout: :class:`float`
The maximum numbers of seconds before timing out and restarting the
WebSocket in the case of not receiving a HEARTBEAT_ACK. Useful if
processing the initial packets take too long to the point of disconnecting
you. The default timeout is 60 seconds.
guild_ready_timeout: :class:`float`
The maximum number of seconds to wait for the GUILD_CREATE stream to end before
preparing the member cache and firing READY. The default timeout is 2 seconds.
.. versionadded:: 1.4
assume_unsync_clock: :class:`bool`
Whether to assume the system clock is unsynced. This applies to the ratelimit handling
code. If this is set to ``True``, the default, then the library uses the time to reset
a rate limit bucket given by Discord. If this is ``False`` then your system clock is
used to calculate how long to sleep for. If this is set to ``False`` it is recommended to
sync your system clock to Google's NTP server.
.. versionadded:: 1.3
Attributes
-----------
ws
The websocket gateway the client is currently connected to. Could be ``None``.
loop: :class:`asyncio.AbstractEventLoop`
The event loop that the client uses for asynchronous operations.
"""
def __init__(
self,
*,
loop: Optional[asyncio.AbstractEventLoop] = None,
**options: Any,
):
self.ws: DiscordWebSocket = None # type: ignore
self.loop: asyncio.AbstractEventLoop = asyncio.get_event_loop() if loop is None else loop
self._listeners: Dict[str, List[Tuple[asyncio.Future, Callable[..., bool]]]] = {}
self.shard_id: Optional[int] = options.get('shard_id')
self.shard_count: Optional[int] = options.get('shard_count')
connector: Optional[aiohttp.BaseConnector] = options.pop('connector', None)
proxy: Optional[str] = options.pop('proxy', None)
proxy_auth: Optional[aiohttp.BasicAuth] = options.pop('proxy_auth', None)
unsync_clock: bool = options.pop('assume_unsync_clock', True)
self.http: HTTPClient = HTTPClient(connector, proxy=proxy, proxy_auth=proxy_auth, unsync_clock=unsync_clock, loop=self.loop)
self._handlers: Dict[str, Callable] = {
'ready': self._handle_ready
}
self._hooks: Dict[str, Callable] = {
'before_identify': self._call_before_identify_hook
}
self._connection: ConnectionState = self._get_state(**options)
self._connection.shard_count = self.shard_count
self._closed: bool = False
self._ready: asyncio.Event = asyncio.Event()
self._connection._get_websocket = self._get_websocket
self._connection._get_client = lambda: self
if VoiceClient.warn_nacl:
VoiceClient.warn_nacl = False
log.warning("PyNaCl is not installed, voice will NOT be supported")
# internals
def _get_websocket(self, guild_id: Optional[int] = None, *, shard_id: Optional[int] = None) -> DiscordWebSocket:
return self.ws
def _get_state(self, **options: Any) -> ConnectionState:
return ConnectionState(dispatch=self.dispatch, handlers=self._handlers,
hooks=self._hooks, http=self.http, loop=self.loop, **options)
def _handle_ready(self) -> None:
self._ready.set()
@property
def latency(self) -> float:
""":class:`float`: Measures latency between a HEARTBEAT and a HEARTBEAT_ACK in seconds.
This could be referred to as the Discord WebSocket protocol latency.
"""
ws = self.ws
return float('nan') if not ws else ws.latency
def is_ws_ratelimited(self) -> bool:
""":class:`bool`: Whether the websocket is currently rate limited.
This can be useful to know when deciding whether you should query members
using HTTP or via the gateway.
.. versionadded:: 1.6
"""
if self.ws:
return self.ws.is_ratelimited()
return False
@property
def user(self) -> Optional[ClientUser]:
"""Optional[:class:`.ClientUser`]: Represents the connected client. ``None`` if not logged in."""
return self._connection.user
@property
def guilds(self) -> List[Guild]:
"""List[:class:`.Guild`]: The guilds that the connected client is a member of."""
return self._connection.guilds
@property
def emojis(self) -> List[Emoji]:
"""List[:class:`.Emoji`]: The emojis that the connected client has."""
return self._connection.emojis
@property
def cached_messages(self) -> Sequence[Message]:
"""Sequence[:class:`.Message`]: Read-only list of messages the connected client has cached.
.. versionadded:: 1.1
"""
return utils.SequenceProxy(self._connection._messages or [])
@property
def private_channels(self) -> List[PrivateChannel]:
"""List[:class:`.abc.PrivateChannel`]: The private channels that the connected client is participating on.
.. note::
This returns only up to 128 most recent private channels due to an internal working
on how Discord deals with private channels.
"""
return self._connection.private_channels
@property
def voice_clients(self) -> List[VoiceProtocol]:
"""List[:class:`.VoiceProtocol`]: Represents a list of voice connections.
These are usually :class:`.VoiceClient` instances.
"""
return self._connection.voice_clients
@property
def application_id(self) -> Optional[int]:
"""Optional[:class:`int`]: The client's application ID.
If this is not passed via ``__init__`` then this is retrieved
through the gateway when an event contains the data. Usually
after :func:`~discord.on_connect` is called.
"""
return self._connection.application_id
@property
def application_flags(self) -> ApplicationFlags:
""":class:`~discord.ApplicationFlags`: The client's application flags.
.. versionadded: 2.0
"""
return self._connection.application_flags # type: ignore
def is_ready(self) -> bool:
""":class:`bool`: Specifies if the client's internal cache is ready for use."""
return self._ready.is_set()
async def _run_event(self, coro: Callable[..., Coroutine[Any, Any, Any]], event_name: str, *args: Any, **kwargs: Any) -> None:
try:
await coro(*args, **kwargs)
except asyncio.CancelledError:
pass
except Exception:
try:
await self.on_error(event_name, *args, **kwargs)
except asyncio.CancelledError:
pass
def _schedule_event(self, coro: Callable[..., Coroutine[Any, Any, Any]], event_name: str, *args: Any, **kwargs: Any) -> asyncio.Task:
wrapped = self._run_event(coro, event_name, *args, **kwargs)
# Schedules the task
task = self.loop.create_task(wrapped)
task.set_name(f'discord.py: {event_name}')
return task
def dispatch(self, event: str, *args: Any, **kwargs: Any) -> None:
log.debug('Dispatching event %s', event)
method = 'on_' + event
listeners = self._listeners.get(event)
if listeners:
removed = []
for i, (future, condition) in enumerate(listeners):
if future.cancelled():
removed.append(i)
continue
try:
result = condition(*args)
except Exception as exc:
future.set_exception(exc)
removed.append(i)
else:
if result:
if len(args) == 0:
future.set_result(None)
elif len(args) == 1:
future.set_result(args[0])
else:
future.set_result(args)
removed.append(i)
if len(removed) == len(listeners):
self._listeners.pop(event)
else:
for idx in reversed(removed):
del listeners[idx]
try:
coro = getattr(self, method)
except AttributeError:
pass
else:
self._schedule_event(coro, method, *args, **kwargs)
async def on_error(self, event_method: str, *args: Any, **kwargs: Any) -> None:
"""|coro|
The default error handler provided by the client.
By default this prints to :data:`sys.stderr` however it could be
overridden to have a different implementation.
Check :func:`~discord.on_error` for more details.
"""
print(f'Ignoring exception in {event_method}', file=sys.stderr)
traceback.print_exc()
# hooks
async def _call_before_identify_hook(self, shard_id: Optional[int], *, initial: bool = False) -> None:
# This hook is an internal hook that actually calls the public one.
# It allows the library to have its own hook without stepping on the
# toes of those who need to override their own hook.
await self.before_identify_hook(shard_id, initial=initial)
async def before_identify_hook(self, shard_id: Optional[int], *, initial: bool = False) -> None:
"""|coro|
A hook that is called before IDENTIFYing a session. This is useful
if you wish to have more control over the synchronization of multiple
IDENTIFYing clients.
The default implementation sleeps for 5 seconds.
.. versionadded:: 1.4
Parameters
------------
shard_id: :class:`int`
The shard ID that requested being IDENTIFY'd
initial: :class:`bool`
Whether this IDENTIFY is the first initial IDENTIFY.
"""
if not initial:
await asyncio.sleep(5.0)
# login state management
async def login(self, token: str) -> None:
"""|coro|
Logs in the client with the specified credentials.
Parameters
-----------
token: :class:`str`
The authentication token. Do not prefix this token with
anything as the library will do it for you.
Raises
------
:exc:`.LoginFailure`
The wrong credentials are passed.
:exc:`.HTTPException`
An unknown HTTP related error occurred,
usually when it isn't 200 or the known incorrect credentials
passing status code.
"""
log.info('logging in using static token')
await self.http.static_login(token.strip())
async def connect(self, *, reconnect: bool = True) -> None:
"""|coro|
Creates a websocket connection and lets the websocket listen
to messages from Discord. This is a loop that runs the entire
event system and miscellaneous aspects of the library. Control
is not resumed until the WebSocket connection is terminated.
Parameters
-----------
reconnect: :class:`bool`
If we should attempt reconnecting, either due to internet
failure or a specific failure on Discord's part. Certain
disconnects that lead to bad state will not be handled (such as
invalid sharding payloads or bad tokens).
Raises
-------
:exc:`.GatewayNotFound`
If the gateway to connect to Discord is not found. Usually if this
is thrown then there is a Discord API outage.
:exc:`.ConnectionClosed`
The websocket connection has been terminated.
"""
backoff = ExponentialBackoff()
ws_params = {
'initial': True,
'shard_id': self.shard_id,
}
while not self.is_closed():
try:
coro = DiscordWebSocket.from_client(self, **ws_params)
self.ws = await asyncio.wait_for(coro, timeout=60.0)
ws_params['initial'] = False
while True:
await self.ws.poll_event()
except ReconnectWebSocket as e:
log.info('Got a request to %s the websocket.', e.op)
self.dispatch('disconnect')
ws_params.update(sequence=self.ws.sequence, resume=e.resume, session=self.ws.session_id)
continue
except (OSError,
HTTPException,
GatewayNotFound,
ConnectionClosed,
aiohttp.ClientError,
asyncio.TimeoutError) as exc:
self.dispatch('disconnect')
if not reconnect:
await self.close()
if isinstance(exc, ConnectionClosed) and exc.code == 1000:
# clean close, don't re-raise this
return
raise
if self.is_closed():
return
# If we get connection reset by peer then try to RESUME
if isinstance(exc, OSError) and exc.errno in (54, 10054):
ws_params.update(sequence=self.ws.sequence, initial=False, resume=True, session=self.ws.session_id)
continue
# We should only get this when an unhandled close code happens,
# such as a clean disconnect (1000) or a bad state (bad token, no sharding, etc)
# sometimes, discord sends us 1000 for unknown reasons so we should reconnect
# regardless and rely on is_closed instead
if isinstance(exc, ConnectionClosed):
if exc.code == 4014:
raise PrivilegedIntentsRequired(exc.shard_id) from None
if exc.code != 1000:
await self.close()
raise
retry = backoff.delay()
log.exception("Attempting a reconnect in %.2fs", retry)
await asyncio.sleep(retry)
# Always try to RESUME the connection
# If the connection is not RESUME-able then the gateway will invalidate the session.
# This is apparently what the official Discord client does.
ws_params.update(sequence=self.ws.sequence, resume=True, session=self.ws.session_id)
async def close(self) -> None:
"""|coro|
Closes the connection to Discord.
"""
if self._closed:
return
self._closed = True
for voice in self.voice_clients:
try:
await voice.disconnect(force=True)
except Exception:
# if an error happens during disconnects, disregard it.
pass
if self.ws is not None and self.ws.open:
await self.ws.close(code=1000)
await self.http.close()
self._ready.clear()
def clear(self) -> None:
"""Clears the internal state of the bot.
After this, the bot can be considered "re-opened", i.e. :meth:`is_closed`
and :meth:`is_ready` both return ``False`` along with the bot's internal
cache cleared.
"""
self._closed = False
self._ready.clear()
self._connection.clear()
self.http.recreate()
async def start(self, token: str, *, reconnect: bool = True) -> None:
"""|coro|
A shorthand coroutine for :meth:`login` + :meth:`connect`.
Raises
-------
TypeError
An unexpected keyword argument was received.
"""
await self.login(token)
await self.connect(reconnect=reconnect)
def run(self, *args: Any, **kwargs: Any) -> None:
"""A blocking call that abstracts away the event loop
initialisation from you.
If you want more control over the event loop then this
function should not be used. Use :meth:`start` coroutine
or :meth:`connect` + :meth:`login`.
Roughly Equivalent to: ::
try:
loop.run_until_complete(start(*args, **kwargs))
except KeyboardInterrupt:
loop.run_until_complete(close())
# cancel all tasks lingering
finally:
loop.close()
.. warning::
This function must be the last function to call due to the fact that it
is blocking. That means that registration of events or anything being
called after this function call will not execute until it returns.
"""
loop = self.loop
try:
loop.add_signal_handler(signal.SIGINT, lambda: loop.stop())
loop.add_signal_handler(signal.SIGTERM, lambda: loop.stop())
except NotImplementedError:
pass
async def runner():
try:
await self.start(*args, **kwargs)
finally:
if not self.is_closed():
await self.close()
def stop_loop_on_completion(f):
loop.stop()
future = asyncio.ensure_future(runner(), loop=loop)
future.add_done_callback(stop_loop_on_completion)
try:
loop.run_forever()
except KeyboardInterrupt:
log.info('Received signal to terminate bot and event loop.')
finally:
future.remove_done_callback(stop_loop_on_completion)
log.info('Cleaning up tasks.')
_cleanup_loop(loop)
if not future.cancelled():
try:
return future.result()
except KeyboardInterrupt:
# I am unsure why this gets raised here but suppress it anyway
return None
# properties
def is_closed(self) -> bool:
""":class:`bool`: Indicates if the websocket connection is closed."""
return self._closed
@property
def activity(self) -> Optional[ActivityTypes]:
"""Optional[:class:`.BaseActivity`]: The activity being used upon
logging in.
"""
return create_activity(self._connection._activity)
@activity.setter
def activity(self, value: Optional[ActivityTypes]) -> None:
if value is None:
self._connection._activity = None
elif isinstance(value, BaseActivity):
self._connection._activity = value.to_dict()
else:
raise TypeError('activity must derive from BaseActivity.')
@property
def allowed_mentions(self) -> Optional[AllowedMentions]:
"""Optional[:class:`~discord.AllowedMentions`]: The allowed mention configuration.
.. versionadded:: 1.4
"""
return self._connection.allowed_mentions
@allowed_mentions.setter
def allowed_mentions(self, value: Optional[AllowedMentions]) -> None:
if value is None or isinstance(value, AllowedMentions):
self._connection.allowed_mentions = value
else:
raise TypeError(f'allowed_mentions must be AllowedMentions not {value.__class__!r}')
@property
def intents(self) -> Intents:
""":class:`~discord.Intents`: The intents configured for this connection.
.. versionadded:: 1.5
"""
return self._connection.intents
# helpers/getters
@property
def users(self) -> List[User]:
"""List[:class:`~discord.User`]: Returns a list of all the users the bot can see."""
return list(self._connection._users.values())
def get_channel(self, id: int) -> Optional[Union[GuildChannel, PrivateChannel]]:
"""Returns a channel with the given ID.
Parameters
-----------
id: :class:`int`
The ID to search for.
Returns
--------
Optional[Union[:class:`.abc.GuildChannel`, :class:`.abc.PrivateChannel`]]
The returned channel or ``None`` if not found.
"""
return self._connection.get_channel(id)
def get_stage_instance(self, id) -> Optional[StageInstance]:
"""Returns a stage instance with the given stage channel ID.
.. versionadded:: 2.0
Parameters
-----------
id: :class:`int`
The ID to search for.
Returns
--------
Optional[:class:`.StageInstance`]
The returns stage instance of ``None`` if not found.
"""
from .channel import StageChannel
channel = self._connection.get_channel(id)
if isinstance(channel, StageChannel):
return channel.instance
def get_guild(self, id) -> Optional[Guild]:
"""Returns a guild with the given ID.
Parameters
-----------
id: :class:`int`
The ID to search for.
Returns
--------
Optional[:class:`.Guild`]
The guild or ``None`` if not found.
"""
return self._connection._get_guild(id)
def get_user(self, id) -> Optional[User]:
"""Returns a user with the given ID.
Parameters
-----------
id: :class:`int`
The ID to search for.
Returns
--------
Optional[:class:`~discord.User`]
The user or ``None`` if not found.
"""
return self._connection.get_user(id)
def get_emoji(self, id) -> Optional[Emoji]:
"""Returns an emoji with the given ID.
Parameters
-----------
id: :class:`int`
The ID to search for.
Returns
--------
Optional[:class:`.Emoji`]
The custom emoji or ``None`` if not found.
"""
return self._connection.get_emoji(id)
def get_all_channels(self) -> Generator[GuildChannel, None, None]:
"""A generator that retrieves every :class:`.abc.GuildChannel` the client can 'access'.
This is equivalent to: ::
for guild in client.guilds:
for channel in guild.channels:
yield channel
.. note::
Just because you receive a :class:`.abc.GuildChannel` does not mean that
you can communicate in said channel. :meth:`.abc.GuildChannel.permissions_for` should
be used for that.
Yields
------
:class:`.abc.GuildChannel`
A channel the client can 'access'.
"""
for guild in self.guilds:
yield from guild.channels
def get_all_members(self) -> Generator[Member, None, None]:
"""Returns a generator with every :class:`.Member` the client can see.
This is equivalent to: ::
for guild in client.guilds:
for member in guild.members:
yield member
Yields
------
:class:`.Member`
A member the client can see.
"""
for guild in self.guilds:
yield from guild.members
# listeners/waiters
async def wait_until_ready(self) -> None:
"""|coro|
Waits until the client's internal cache is all ready.
"""
await self._ready.wait()
def wait_for(
self,
event: str,
*,
check: Optional[Callable[..., bool]] = None,
timeout: Optional[float] = None,
) -> Any:
"""|coro|
Waits for a WebSocket event to be dispatched.
This could be used to wait for a user to reply to a message,
or to react to a message, or to edit a message in a self-contained
way.
The ``timeout`` parameter is passed onto :func:`asyncio.wait_for`. By default,
it does not timeout. Note that this does propagate the
:exc:`asyncio.TimeoutError` for you in case of timeout and is provided for
ease of use.
In case the event returns multiple arguments, a :class:`tuple` containing those
arguments is returned instead. Please check the
:ref:`documentation <discord-api-events>` for a list of events and their
parameters.
This function returns the **first event that meets the requirements**.
Examples
---------
Waiting for a user reply: ::
@client.event
async def on_message(message):
if message.content.startswith('$greet'):
channel = message.channel
await channel.send('Say hello!')
def check(m):
return m.content == 'hello' and m.channel == channel
msg = await client.wait_for('message', check=check)
await channel.send(f'Hello {msg.author}!')
Waiting for a thumbs up reaction from the message author: ::
@client.event
async def on_message(message):
if message.content.startswith('$thumb'):
channel = message.channel
await channel.send('Send me that \N{THUMBS UP SIGN} reaction, mate')
def check(reaction, user):
return user == message.author and str(reaction.emoji) == '\N{THUMBS UP SIGN}'
try:
reaction, user = await client.wait_for('reaction_add', timeout=60.0, check=check)
except asyncio.TimeoutError:
await channel.send('\N{THUMBS DOWN SIGN}')
else:
await channel.send('\N{THUMBS UP SIGN}')
Parameters
------------
event: :class:`str`
The event name, similar to the :ref:`event reference <discord-api-events>`,
but without the ``on_`` prefix, to wait for.
check: Optional[Callable[..., :class:`bool`]]
A predicate to check what to wait for. The arguments must meet the
parameters of the event being waited for.
timeout: Optional[:class:`float`]
The number of seconds to wait before timing out and raising
:exc:`asyncio.TimeoutError`.
Raises
-------
asyncio.TimeoutError
If a timeout is provided and it was reached.
Returns
--------
Any
Returns no arguments, a single argument, or a :class:`tuple` of multiple
arguments that mirrors the parameters passed in the
:ref:`event reference <discord-api-events>`.
"""
future = self.loop.create_future()
if check is None:
def _check(*args):
return True
check = _check
ev = event.lower()
try:
listeners = self._listeners[ev]
except KeyError:
listeners = []
self._listeners[ev] = listeners
listeners.append((future, check))
return asyncio.wait_for(future, timeout)
# event registration
def event(self, coro: Coro) -> Coro:
"""A decorator that registers an event to listen to.
You can find more info about the events on the :ref:`documentation below <discord-api-events>`.
The events must be a :ref:`coroutine <coroutine>`, if not, :exc:`TypeError` is raised.
Example
---------
.. code-block:: python3
@client.event
async def on_ready():
print('Ready!')
Raises
--------
TypeError
The coroutine passed is not actually a coroutine.
"""
if not asyncio.iscoroutinefunction(coro):
raise TypeError('event registered must be a coroutine function')
setattr(self, coro.__name__, coro)
log.debug('%s has successfully been registered as an event', coro.__name__)
return coro
async def change_presence(
self,
*,
activity: Optional[BaseActivity] = None,
status: Optional[Status] = None,
):
"""|coro|
Changes the client's presence.
Example
---------
.. code-block:: python3
game = discord.Game("with the API")
await client.change_presence(status=discord.Status.idle, activity=game)
.. versionchanged:: 2.0
Removed the ``afk`` keyword-only parameter.
Parameters
----------
activity: Optional[:class:`.BaseActivity`]
The activity being done. ``None`` if no currently active activity is done.
status: Optional[:class:`.Status`]
Indicates what status to change to. If ``None``, then
:attr:`.Status.online` is used.
Raises
------
:exc:`.InvalidArgument`
If the ``activity`` parameter is not the proper type.
"""
if status is None:
status_str = 'online'
status = Status.online
elif status is Status.offline:
status_str = 'invisible'
status = Status.offline
else:
status_str = str(status)
await self.ws.change_presence(activity=activity, status=status_str)
for guild in self._connection.guilds:
me = guild.me
if me is None:
continue
if activity is not None:
me.activities = (activity,)
else:
me.activities = ()
me.status = status
# Guild stuff
def fetch_guilds(
self,
*,
limit: Optional[int] = 100,
before: SnowflakeTime = None,
after: SnowflakeTime = None
) -> GuildIterator:
"""Retrieves an :class:`.AsyncIterator` that enables receiving your guilds.
.. note::
Using this, you will only receive :attr:`.Guild.owner`, :attr:`.Guild.icon`,
:attr:`.Guild.id`, and :attr:`.Guild.name` per :class:`.Guild`.
.. note::
This method is an API call. For general usage, consider :attr:`guilds` instead.
Examples
---------
Usage ::
async for guild in client.fetch_guilds(limit=150):
print(guild.name)
Flattening into a list ::
guilds = await client.fetch_guilds(limit=150).flatten()
# guilds is now a list of Guild...
All parameters are optional.
Parameters
-----------
limit: Optional[:class:`int`]
The number of guilds to retrieve.
If ``None``, it retrieves every guild you have access to. Note, however,
that this would make it a slow operation.
Defaults to ``100``.
before: Union[:class:`.abc.Snowflake`, :class:`datetime.datetime`]
Retrieves guilds before this date or object.
If a datetime is provided, it is recommended to use a UTC aware datetime.
If the datetime is naive, it is assumed to be local time.
after: Union[:class:`.abc.Snowflake`, :class:`datetime.datetime`]
Retrieve guilds after this date or object.
If a datetime is provided, it is recommended to use a UTC aware datetime.
If the datetime is naive, it is assumed to be local time.
Raises
------
:exc:`.HTTPException`
Getting the guilds failed.
Yields
--------
:class:`.Guild`
The guild with the guild data parsed.
"""
return GuildIterator(self, limit=limit, before=before, after=after)
async def fetch_template(self, code: Union[Template, str]) -> Template:
"""|coro|
Gets a :class:`.Template` from a discord.new URL or code.
Parameters
-----------
code: Union[:class:`.Template`, :class:`str`]
The Discord Template Code or URL (must be a discord.new URL).
Raises
-------
:exc:`.NotFound`
The template is invalid.
:exc:`.HTTPException`
Getting the template failed.
Returns
--------
:class:`.Template`
The template from the URL/code.
"""
code = utils.resolve_template(code)
data = await self.http.get_template(code)
return Template(data=data, state=self._connection) # type: ignore
async def fetch_guild(self, guild_id: int) -> Guild:
"""|coro|
Retrieves a :class:`.Guild` from an ID.
.. note::
Using this, you will **not** receive :attr:`.Guild.channels`, :attr:`.Guild.members`,
:attr:`.Member.activity` and :attr:`.Member.voice` per :class:`.Member`.
.. note::
This method is an API call. For general usage, consider :meth:`get_guild` instead.
Parameters
-----------
guild_id: :class:`int`
The guild's ID to fetch from.
Raises
------
:exc:`.Forbidden`
You do not have access to the guild.
:exc:`.HTTPException`
Getting the guild failed.
Returns
--------
:class:`.Guild`
The guild from the ID.
"""
data = await self.http.get_guild(guild_id)
return Guild(data=data, state=self._connection)
async def create_guild(
self,
*,
name: str,
region: Union[VoiceRegion, str] = VoiceRegion.us_west,
icon: bytes = MISSING,
code: str = MISSING,
) -> Guild:
"""|coro|
Creates a :class:`.Guild`.
Bot accounts in more than 10 guilds are not allowed to create guilds.
Parameters
----------
name: :class:`str`
The name of the guild.
region: :class:`.VoiceRegion`
The region for the voice communication server.
Defaults to :attr:`.VoiceRegion.us_west`.
icon: Optional[:class:`bytes`]
The :term:`py:bytes-like object` representing the icon. See :meth:`.ClientUser.edit`
for more details on what is expected.
code: :class:`str`
The code for a template to create the guild with.
.. versionadded:: 1.4
Raises
------
:exc:`.HTTPException`
Guild creation failed.
:exc:`.InvalidArgument`
Invalid icon image format given. Must be PNG or JPG.
Returns
-------
:class:`.Guild`
The guild created. This is not the same guild that is
added to cache.
"""
if icon is not MISSING:
icon_base64 = utils._bytes_to_base64_data(icon)
else:
icon_base64 = None
region_value = str(region)
if code:
data = await self.http.create_from_template(code, name, region_value, icon_base64)
else:
data = await self.http.create_guild(name, region_value, icon_base64)
return Guild(data=data, state=self._connection)
async def fetch_stage_instance(self, channel_id: int) -> StageInstance:
"""|coro|
Gets a :class:`.StageInstance` for a stage channel id.
.. versionadded:: 2.0
Parameters
-----------
channel_id: :class:`int`
The stage channel ID.
Raises
-------
:exc:`.NotFound`
The stage instance or channel could not be found.
:exc:`.HTTPException`
Getting the stage instance failed.
Returns
--------
:class:`.StageInstance`
The stage instance from the stage channel ID.
"""
data = await self.http.get_stage_instance(channel_id)
guild = self.get_guild(int(data['guild_id']))
return StageInstance(guild=guild, state=self._connection, data=data) # type: ignore
# Invite management
async def fetch_invite(self, url: Union[Invite, str], *, with_counts: bool = True, with_expiration: bool = True) -> Invite:
"""|coro|
Gets an :class:`.Invite` from a discord.gg URL or ID.
.. note::
If the invite is for a guild you have not joined, the guild and channel
attributes of the returned :class:`.Invite` will be :class:`.PartialInviteGuild` and
:class:`.PartialInviteChannel` respectively.
Parameters
-----------
url: Union[:class:`.Invite`, :class:`str`]
The Discord invite ID or URL (must be a discord.gg URL).
with_counts: :class:`bool`
Whether to include count information in the invite. This fills the
:attr:`.Invite.approximate_member_count` and :attr:`.Invite.approximate_presence_count`
fields.
with_expiration: :class:`bool`
Whether to include the expiration date of the invite. This fills the
:attr:`.Invite.expires_at` field.
.. versionadded:: 2.0
Raises
-------
:exc:`.NotFound`
The invite has expired or is invalid.
:exc:`.HTTPException`
Getting the invite failed.
Returns
--------
:class:`.Invite`
The invite from the URL/ID.
"""
invite_id = utils.resolve_invite(url)
data = await self.http.get_invite(invite_id, with_counts=with_counts, with_expiration=with_expiration)
return Invite.from_incomplete(state=self._connection, data=data)
async def delete_invite(self, invite: Union[Invite, str]) -> None:
"""|coro|
Revokes an :class:`.Invite`, URL, or ID to an invite.
You must have the :attr:`~.Permissions.manage_channels` permission in
the associated guild to do this.
Parameters
----------
invite: Union[:class:`.Invite`, :class:`str`]
The invite to revoke.
Raises
-------
:exc:`.Forbidden`
You do not have permissions to revoke invites.
:exc:`.NotFound`
The invite is invalid or expired.
:exc:`.HTTPException`
Revoking the invite failed.
"""
invite_id = utils.resolve_invite(invite)
await self.http.delete_invite(invite_id)
# Miscellaneous stuff
async def fetch_widget(self, guild_id: int) -> Widget:
"""|coro|
Gets a :class:`.Widget` from a guild ID.
.. note::
The guild must have the widget enabled to get this information.
Parameters
-----------
guild_id: :class:`int`
The ID of the guild.
Raises
-------
:exc:`.Forbidden`
The widget for this guild is disabled.
:exc:`.HTTPException`
Retrieving the widget failed.
Returns
--------
:class:`.Widget`
The guild's widget.
"""
data = await self.http.get_widget(guild_id)
return Widget(state=self._connection, data=data)
async def application_info(self) -> AppInfo:
"""|coro|
Retrieves the bot's application information.
Raises
-------
:exc:`.HTTPException`
Retrieving the information failed somehow.
Returns
--------
:class:`.AppInfo`
The bot's application information.
"""
data = await self.http.application_info()
if 'rpc_origins' not in data:
data['rpc_origins'] = None
return AppInfo(self._connection, data)
async def fetch_user(self, user_id: int) -> User:
"""|coro|
Retrieves a :class:`~discord.User` based on their ID.
You do not have to share any guilds with the user to get this information,
however many operations do require that you do.
.. note::
This method is an API call. If you have :attr:`discord.Intents.members` and member cache enabled, consider :meth:`get_user` instead.
Parameters
-----------
user_id: :class:`int`
The user's ID to fetch from.
Raises
-------
:exc:`.NotFound`
A user with this ID does not exist.
:exc:`.HTTPException`
Fetching the user failed.
Returns
--------
:class:`~discord.User`
The user you requested.
"""
data = await self.http.get_user(user_id)
return User(state=self._connection, data=data)
async def fetch_channel(self, channel_id: int) -> Union[GuildChannel, PrivateChannel, Thread]:
"""|coro|
Retrieves a :class:`.abc.GuildChannel`, :class:`.abc.PrivateChannel`, or :class:`.Thread` with the specified ID.
.. note::
This method is an API call. For general usage, consider :meth:`get_channel` instead.
.. versionadded:: 1.2
Raises
-------
:exc:`.InvalidData`
An unknown channel type was received from Discord.
:exc:`.HTTPException`
Retrieving the channel failed.
:exc:`.NotFound`
Invalid Channel ID.
:exc:`.Forbidden`
You do not have permission to fetch this channel.
Returns
--------
Union[:class:`.abc.GuildChannel`, :class:`.abc.PrivateChannel`, :class:`.Thread`]
The channel from the ID.
"""
data = await self.http.get_channel(channel_id)
factory, ch_type = _threaded_channel_factory(data['type'])
if factory is None:
raise InvalidData('Unknown channel type {type} for channel ID {id}.'.format_map(data))
if ch_type in (ChannelType.group, ChannelType.private):
channel = factory(me=self.user, data=data, state=self._connection)
else:
guild_id = int(data['guild_id'])
guild = self.get_guild(guild_id) or Object(id=guild_id)
channel = factory(guild=guild, state=self._connection, data=data)
return channel
async def fetch_webhook(self, webhook_id: int) -> Webhook:
"""|coro|
Retrieves a :class:`.Webhook` with the specified ID.
Raises
--------
:exc:`.HTTPException`
Retrieving the webhook failed.
:exc:`.NotFound`
Invalid webhook ID.
:exc:`.Forbidden`
You do not have permission to fetch this webhook.
Returns
---------
:class:`.Webhook`
The webhook you requested.
"""
data = await self.http.get_webhook(webhook_id)
return Webhook.from_state(data, state=self._connection)
async def create_dm(self, user: Snowflake) -> DMChannel:
"""|coro|
Creates a :class:`.DMChannel` with this user.
This should be rarely called, as this is done transparently for most
people.
.. versionadded:: 2.0
Parameters
-----------
user: :class:`~discord.abc.Snowflake`
The user to create a DM with.
Returns
-------
:class:`.DMChannel`
The channel that was created.
"""
state = self._connection
found = state._get_private_channel_by_user(user.id)
if found:
return found
data = await state.http.start_private_message(user.id)
return state.add_dm_channel(data)
def add_view(self, view: View, *, message_id: Optional[int] = None) -> None:
"""Registers a :class:`~discord.ui.View` for persistent listening.
This method should be used for when a view is comprised of components
that last longer than the lifecycle of the program.
Parameters
------------
view: :class:`discord.ui.View`
The view to register for dispatching.
message_id: Optional[:class:`int`]
The message ID that the view is attached to. This is currently used to
refresh the view's state during message update events. If not given
then message update events are not propagated for the view.
Raises
-------
TypeError
A view was not passed.
ValueError
The view is not persistent. A persistent view has no timeout
and all their components have an explicitly provided custom_id.
"""
if not isinstance(view, View):
raise TypeError(f'expected an instance of View not {view.__class__!r}')
if not view.is_persistent():
raise ValueError('View is not persistent. Items need to have a custom_id set and View must have no timeout')
self._connection.store_view(view, message_id)
@property
def persistent_views(self) -> Sequence[View]:
"""Sequence[:class:`.View`]: A sequence of persistent views added to the client."""
return self._connection.persistent_views
| 33.858466 | 144 | 0.591476 |
from __future__ import annotations
import asyncio
import logging
import signal
import sys
import traceback
from typing import Any, Callable, Coroutine, Dict, Generator, Iterable, List, Optional, Sequence, TYPE_CHECKING, Tuple, TypeVar, Union
import aiohttp
from .user import User
from .invite import Invite
from .template import Template
from .widget import Widget
from .guild import Guild
from .emoji import Emoji
from .channel import _threaded_channel_factory
from .enums import ChannelType
from .mentions import AllowedMentions
from .errors import *
from .enums import Status, VoiceRegion
from .flags import ApplicationFlags, Intents
from .gateway import *
from .activity import ActivityTypes, BaseActivity, create_activity
from .voice_client import VoiceClient
from .http import HTTPClient
from .state import ConnectionState
from . import utils
from .utils import MISSING
from .object import Object
from .backoff import ExponentialBackoff
from .webhook import Webhook
from .iterators import GuildIterator
from .appinfo import AppInfo
from .ui.view import View
from .stage_instance import StageInstance
from .threads import Thread
if TYPE_CHECKING:
from .abc import SnowflakeTime, PrivateChannel, GuildChannel, Snowflake
from .channel import DMChannel
from .user import ClientUser
from .message import Message
from .member import Member
from .voice_client import VoiceProtocol
__all__ = (
'Client',
)
Coro = TypeVar('Coro', bound=Callable[..., Coroutine[Any, Any, Any]])
log: logging.Logger = logging.getLogger(__name__)
def _cancel_tasks(loop: asyncio.AbstractEventLoop) -> None:
tasks = {t for t in asyncio.all_tasks(loop=loop) if not t.done()}
if not tasks:
return
log.info('Cleaning up after %d tasks.', len(tasks))
for task in tasks:
task.cancel()
loop.run_until_complete(asyncio.gather(*tasks, return_exceptions=True))
log.info('All tasks finished cancelling.')
for task in tasks:
if task.cancelled():
continue
if task.exception() is not None:
loop.call_exception_handler({
'message': 'Unhandled exception during Client.run shutdown.',
'exception': task.exception(),
'task': task
})
def _cleanup_loop(loop: asyncio.AbstractEventLoop) -> None:
try:
_cancel_tasks(loop)
loop.run_until_complete(loop.shutdown_asyncgens())
finally:
log.info('Closing the event loop.')
loop.close()
class Client:
def __init__(
self,
*,
loop: Optional[asyncio.AbstractEventLoop] = None,
**options: Any,
):
self.ws: DiscordWebSocket = None
self.loop: asyncio.AbstractEventLoop = asyncio.get_event_loop() if loop is None else loop
self._listeners: Dict[str, List[Tuple[asyncio.Future, Callable[..., bool]]]] = {}
self.shard_id: Optional[int] = options.get('shard_id')
self.shard_count: Optional[int] = options.get('shard_count')
connector: Optional[aiohttp.BaseConnector] = options.pop('connector', None)
proxy: Optional[str] = options.pop('proxy', None)
proxy_auth: Optional[aiohttp.BasicAuth] = options.pop('proxy_auth', None)
unsync_clock: bool = options.pop('assume_unsync_clock', True)
self.http: HTTPClient = HTTPClient(connector, proxy=proxy, proxy_auth=proxy_auth, unsync_clock=unsync_clock, loop=self.loop)
self._handlers: Dict[str, Callable] = {
'ready': self._handle_ready
}
self._hooks: Dict[str, Callable] = {
'before_identify': self._call_before_identify_hook
}
self._connection: ConnectionState = self._get_state(**options)
self._connection.shard_count = self.shard_count
self._closed: bool = False
self._ready: asyncio.Event = asyncio.Event()
self._connection._get_websocket = self._get_websocket
self._connection._get_client = lambda: self
if VoiceClient.warn_nacl:
VoiceClient.warn_nacl = False
log.warning("PyNaCl is not installed, voice will NOT be supported")
def _get_websocket(self, guild_id: Optional[int] = None, *, shard_id: Optional[int] = None) -> DiscordWebSocket:
return self.ws
def _get_state(self, **options: Any) -> ConnectionState:
return ConnectionState(dispatch=self.dispatch, handlers=self._handlers,
hooks=self._hooks, http=self.http, loop=self.loop, **options)
def _handle_ready(self) -> None:
self._ready.set()
@property
def latency(self) -> float:
ws = self.ws
return float('nan') if not ws else ws.latency
def is_ws_ratelimited(self) -> bool:
if self.ws:
return self.ws.is_ratelimited()
return False
@property
def user(self) -> Optional[ClientUser]:
return self._connection.user
@property
def guilds(self) -> List[Guild]:
return self._connection.guilds
@property
def emojis(self) -> List[Emoji]:
return self._connection.emojis
@property
def cached_messages(self) -> Sequence[Message]:
return utils.SequenceProxy(self._connection._messages or [])
@property
def private_channels(self) -> List[PrivateChannel]:
return self._connection.private_channels
@property
def voice_clients(self) -> List[VoiceProtocol]:
return self._connection.voice_clients
@property
def application_id(self) -> Optional[int]:
return self._connection.application_id
@property
def application_flags(self) -> ApplicationFlags:
return self._connection.application_flags
def is_ready(self) -> bool:
return self._ready.is_set()
async def _run_event(self, coro: Callable[..., Coroutine[Any, Any, Any]], event_name: str, *args: Any, **kwargs: Any) -> None:
try:
await coro(*args, **kwargs)
except asyncio.CancelledError:
pass
except Exception:
try:
await self.on_error(event_name, *args, **kwargs)
except asyncio.CancelledError:
pass
def _schedule_event(self, coro: Callable[..., Coroutine[Any, Any, Any]], event_name: str, *args: Any, **kwargs: Any) -> asyncio.Task:
wrapped = self._run_event(coro, event_name, *args, **kwargs)
task = self.loop.create_task(wrapped)
task.set_name(f'discord.py: {event_name}')
return task
def dispatch(self, event: str, *args: Any, **kwargs: Any) -> None:
log.debug('Dispatching event %s', event)
method = 'on_' + event
listeners = self._listeners.get(event)
if listeners:
removed = []
for i, (future, condition) in enumerate(listeners):
if future.cancelled():
removed.append(i)
continue
try:
result = condition(*args)
except Exception as exc:
future.set_exception(exc)
removed.append(i)
else:
if result:
if len(args) == 0:
future.set_result(None)
elif len(args) == 1:
future.set_result(args[0])
else:
future.set_result(args)
removed.append(i)
if len(removed) == len(listeners):
self._listeners.pop(event)
else:
for idx in reversed(removed):
del listeners[idx]
try:
coro = getattr(self, method)
except AttributeError:
pass
else:
self._schedule_event(coro, method, *args, **kwargs)
async def on_error(self, event_method: str, *args: Any, **kwargs: Any) -> None:
print(f'Ignoring exception in {event_method}', file=sys.stderr)
traceback.print_exc()
async def _call_before_identify_hook(self, shard_id: Optional[int], *, initial: bool = False) -> None:
await self.before_identify_hook(shard_id, initial=initial)
async def before_identify_hook(self, shard_id: Optional[int], *, initial: bool = False) -> None:
if not initial:
await asyncio.sleep(5.0)
async def login(self, token: str) -> None:
log.info('logging in using static token')
await self.http.static_login(token.strip())
async def connect(self, *, reconnect: bool = True) -> None:
backoff = ExponentialBackoff()
ws_params = {
'initial': True,
'shard_id': self.shard_id,
}
while not self.is_closed():
try:
coro = DiscordWebSocket.from_client(self, **ws_params)
self.ws = await asyncio.wait_for(coro, timeout=60.0)
ws_params['initial'] = False
while True:
await self.ws.poll_event()
except ReconnectWebSocket as e:
log.info('Got a request to %s the websocket.', e.op)
self.dispatch('disconnect')
ws_params.update(sequence=self.ws.sequence, resume=e.resume, session=self.ws.session_id)
continue
except (OSError,
HTTPException,
GatewayNotFound,
ConnectionClosed,
aiohttp.ClientError,
asyncio.TimeoutError) as exc:
self.dispatch('disconnect')
if not reconnect:
await self.close()
if isinstance(exc, ConnectionClosed) and exc.code == 1000:
return
raise
if self.is_closed():
return
# If we get connection reset by peer then try to RESUME
if isinstance(exc, OSError) and exc.errno in (54, 10054):
ws_params.update(sequence=self.ws.sequence, initial=False, resume=True, session=self.ws.session_id)
continue
# We should only get this when an unhandled close code happens,
# such as a clean disconnect (1000) or a bad state (bad token, no sharding, etc)
# sometimes, discord sends us 1000 for unknown reasons so we should reconnect
# regardless and rely on is_closed instead
if isinstance(exc, ConnectionClosed):
if exc.code == 4014:
raise PrivilegedIntentsRequired(exc.shard_id) from None
if exc.code != 1000:
await self.close()
raise
retry = backoff.delay()
log.exception("Attempting a reconnect in %.2fs", retry)
await asyncio.sleep(retry)
# Always try to RESUME the connection
# If the connection is not RESUME-able then the gateway will invalidate the session.
# This is apparently what the official Discord client does.
ws_params.update(sequence=self.ws.sequence, resume=True, session=self.ws.session_id)
async def close(self) -> None:
if self._closed:
return
self._closed = True
for voice in self.voice_clients:
try:
await voice.disconnect(force=True)
except Exception:
# if an error happens during disconnects, disregard it.
pass
if self.ws is not None and self.ws.open:
await self.ws.close(code=1000)
await self.http.close()
self._ready.clear()
def clear(self) -> None:
self._closed = False
self._ready.clear()
self._connection.clear()
self.http.recreate()
async def start(self, token: str, *, reconnect: bool = True) -> None:
await self.login(token)
await self.connect(reconnect=reconnect)
def run(self, *args: Any, **kwargs: Any) -> None:
loop = self.loop
try:
loop.add_signal_handler(signal.SIGINT, lambda: loop.stop())
loop.add_signal_handler(signal.SIGTERM, lambda: loop.stop())
except NotImplementedError:
pass
async def runner():
try:
await self.start(*args, **kwargs)
finally:
if not self.is_closed():
await self.close()
def stop_loop_on_completion(f):
loop.stop()
future = asyncio.ensure_future(runner(), loop=loop)
future.add_done_callback(stop_loop_on_completion)
try:
loop.run_forever()
except KeyboardInterrupt:
log.info('Received signal to terminate bot and event loop.')
finally:
future.remove_done_callback(stop_loop_on_completion)
log.info('Cleaning up tasks.')
_cleanup_loop(loop)
if not future.cancelled():
try:
return future.result()
except KeyboardInterrupt:
# I am unsure why this gets raised here but suppress it anyway
return None
# properties
def is_closed(self) -> bool:
return self._closed
@property
def activity(self) -> Optional[ActivityTypes]:
return create_activity(self._connection._activity)
@activity.setter
def activity(self, value: Optional[ActivityTypes]) -> None:
if value is None:
self._connection._activity = None
elif isinstance(value, BaseActivity):
self._connection._activity = value.to_dict()
else:
raise TypeError('activity must derive from BaseActivity.')
@property
def allowed_mentions(self) -> Optional[AllowedMentions]:
return self._connection.allowed_mentions
@allowed_mentions.setter
def allowed_mentions(self, value: Optional[AllowedMentions]) -> None:
if value is None or isinstance(value, AllowedMentions):
self._connection.allowed_mentions = value
else:
raise TypeError(f'allowed_mentions must be AllowedMentions not {value.__class__!r}')
@property
def intents(self) -> Intents:
return self._connection.intents
# helpers/getters
@property
def users(self) -> List[User]:
return list(self._connection._users.values())
def get_channel(self, id: int) -> Optional[Union[GuildChannel, PrivateChannel]]:
return self._connection.get_channel(id)
def get_stage_instance(self, id) -> Optional[StageInstance]:
from .channel import StageChannel
channel = self._connection.get_channel(id)
if isinstance(channel, StageChannel):
return channel.instance
def get_guild(self, id) -> Optional[Guild]:
return self._connection._get_guild(id)
def get_user(self, id) -> Optional[User]:
return self._connection.get_user(id)
def get_emoji(self, id) -> Optional[Emoji]:
return self._connection.get_emoji(id)
def get_all_channels(self) -> Generator[GuildChannel, None, None]:
for guild in self.guilds:
yield from guild.channels
def get_all_members(self) -> Generator[Member, None, None]:
for guild in self.guilds:
yield from guild.members
# listeners/waiters
async def wait_until_ready(self) -> None:
await self._ready.wait()
def wait_for(
self,
event: str,
*,
check: Optional[Callable[..., bool]] = None,
timeout: Optional[float] = None,
) -> Any:
future = self.loop.create_future()
if check is None:
def _check(*args):
return True
check = _check
ev = event.lower()
try:
listeners = self._listeners[ev]
except KeyError:
listeners = []
self._listeners[ev] = listeners
listeners.append((future, check))
return asyncio.wait_for(future, timeout)
# event registration
def event(self, coro: Coro) -> Coro:
if not asyncio.iscoroutinefunction(coro):
raise TypeError('event registered must be a coroutine function')
setattr(self, coro.__name__, coro)
log.debug('%s has successfully been registered as an event', coro.__name__)
return coro
async def change_presence(
self,
*,
activity: Optional[BaseActivity] = None,
status: Optional[Status] = None,
):
if status is None:
status_str = 'online'
status = Status.online
elif status is Status.offline:
status_str = 'invisible'
status = Status.offline
else:
status_str = str(status)
await self.ws.change_presence(activity=activity, status=status_str)
for guild in self._connection.guilds:
me = guild.me
if me is None:
continue
if activity is not None:
me.activities = (activity,)
else:
me.activities = ()
me.status = status
# Guild stuff
def fetch_guilds(
self,
*,
limit: Optional[int] = 100,
before: SnowflakeTime = None,
after: SnowflakeTime = None
) -> GuildIterator:
return GuildIterator(self, limit=limit, before=before, after=after)
async def fetch_template(self, code: Union[Template, str]) -> Template:
code = utils.resolve_template(code)
data = await self.http.get_template(code)
return Template(data=data, state=self._connection) # type: ignore
async def fetch_guild(self, guild_id: int) -> Guild:
data = await self.http.get_guild(guild_id)
return Guild(data=data, state=self._connection)
async def create_guild(
self,
*,
name: str,
region: Union[VoiceRegion, str] = VoiceRegion.us_west,
icon: bytes = MISSING,
code: str = MISSING,
) -> Guild:
if icon is not MISSING:
icon_base64 = utils._bytes_to_base64_data(icon)
else:
icon_base64 = None
region_value = str(region)
if code:
data = await self.http.create_from_template(code, name, region_value, icon_base64)
else:
data = await self.http.create_guild(name, region_value, icon_base64)
return Guild(data=data, state=self._connection)
async def fetch_stage_instance(self, channel_id: int) -> StageInstance:
data = await self.http.get_stage_instance(channel_id)
guild = self.get_guild(int(data['guild_id']))
return StageInstance(guild=guild, state=self._connection, data=data) # type: ignore
# Invite management
async def fetch_invite(self, url: Union[Invite, str], *, with_counts: bool = True, with_expiration: bool = True) -> Invite:
invite_id = utils.resolve_invite(url)
data = await self.http.get_invite(invite_id, with_counts=with_counts, with_expiration=with_expiration)
return Invite.from_incomplete(state=self._connection, data=data)
async def delete_invite(self, invite: Union[Invite, str]) -> None:
invite_id = utils.resolve_invite(invite)
await self.http.delete_invite(invite_id)
# Miscellaneous stuff
async def fetch_widget(self, guild_id: int) -> Widget:
data = await self.http.get_widget(guild_id)
return Widget(state=self._connection, data=data)
async def application_info(self) -> AppInfo:
data = await self.http.application_info()
if 'rpc_origins' not in data:
data['rpc_origins'] = None
return AppInfo(self._connection, data)
async def fetch_user(self, user_id: int) -> User:
data = await self.http.get_user(user_id)
return User(state=self._connection, data=data)
async def fetch_channel(self, channel_id: int) -> Union[GuildChannel, PrivateChannel, Thread]:
data = await self.http.get_channel(channel_id)
factory, ch_type = _threaded_channel_factory(data['type'])
if factory is None:
raise InvalidData('Unknown channel type {type} for channel ID {id}.'.format_map(data))
if ch_type in (ChannelType.group, ChannelType.private):
channel = factory(me=self.user, data=data, state=self._connection)
else:
guild_id = int(data['guild_id'])
guild = self.get_guild(guild_id) or Object(id=guild_id)
channel = factory(guild=guild, state=self._connection, data=data)
return channel
async def fetch_webhook(self, webhook_id: int) -> Webhook:
data = await self.http.get_webhook(webhook_id)
return Webhook.from_state(data, state=self._connection)
async def create_dm(self, user: Snowflake) -> DMChannel:
state = self._connection
found = state._get_private_channel_by_user(user.id)
if found:
return found
data = await state.http.start_private_message(user.id)
return state.add_dm_channel(data)
def add_view(self, view: View, *, message_id: Optional[int] = None) -> None:
if not isinstance(view, View):
raise TypeError(f'expected an instance of View not {view.__class__!r}')
if not view.is_persistent():
raise ValueError('View is not persistent. Items need to have a custom_id set and View must have no timeout')
self._connection.store_view(view, message_id)
@property
def persistent_views(self) -> Sequence[View]:
return self._connection.persistent_views
| true | true |
f713cb7a449c438cfcce5616f6b52263398ce8bd | 1,121 | py | Python | wowp/tests/test_special_actors.py | coobas/wowp | 9d029b1c26ef65f83a5cd0279a45fe0cb4933ad9 | [
"MIT"
] | 2 | 2017-12-14T08:10:28.000Z | 2017-12-29T13:31:33.000Z | wowp/tests/test_special_actors.py | coobas/wowp | 9d029b1c26ef65f83a5cd0279a45fe0cb4933ad9 | [
"MIT"
] | 4 | 2019-10-19T08:55:04.000Z | 2019-10-22T06:50:18.000Z | wowp/tests/test_special_actors.py | coobas/wowp | 9d029b1c26ef65f83a5cd0279a45fe0cb4933ad9 | [
"MIT"
] | 1 | 2017-12-15T08:22:30.000Z | 2017-12-15T08:22:30.000Z | from __future__ import absolute_import, division, print_function, unicode_literals
from wowp.actors.special import Splitter, Chain
from wowp.schedulers import NaiveScheduler
from wowp.actors import FuncActor
from wowp.util import ConstructorWrapper
def test_splitter():
splitter = Splitter(multiplicity=2, inport_name="x")
assert len(splitter.outports) == 2
scheduler = NaiveScheduler()
for i in range(0, 10):
scheduler.put_value(splitter.inports.x, i)
scheduler.execute()
x1_all = list(splitter.outports["x_1"].pop_all())
x2_all = list(splitter.outports["x_2"].pop_all())
print("x1:", x1_all)
print("x2:", x2_all)
assert [0, 2, 4, 6, 8] == x1_all
assert [1, 3, 5, 7, 9] == x2_all
def double_me(x):
return x * 2
def test_chain():
func_generator = ConstructorWrapper(FuncActor, double_me)
chain = Chain("func_chain", [func_generator, func_generator])
wf = chain.get_workflow()
res = wf(inp=4)
assert res["out"].pop() == 16
res = wf(inp=2)
assert res["out"].pop() == 8
res = wf(inp="a")
assert res["out"].pop() == "aaaa"
| 27.341463 | 82 | 0.668153 | from __future__ import absolute_import, division, print_function, unicode_literals
from wowp.actors.special import Splitter, Chain
from wowp.schedulers import NaiveScheduler
from wowp.actors import FuncActor
from wowp.util import ConstructorWrapper
def test_splitter():
splitter = Splitter(multiplicity=2, inport_name="x")
assert len(splitter.outports) == 2
scheduler = NaiveScheduler()
for i in range(0, 10):
scheduler.put_value(splitter.inports.x, i)
scheduler.execute()
x1_all = list(splitter.outports["x_1"].pop_all())
x2_all = list(splitter.outports["x_2"].pop_all())
print("x1:", x1_all)
print("x2:", x2_all)
assert [0, 2, 4, 6, 8] == x1_all
assert [1, 3, 5, 7, 9] == x2_all
def double_me(x):
return x * 2
def test_chain():
func_generator = ConstructorWrapper(FuncActor, double_me)
chain = Chain("func_chain", [func_generator, func_generator])
wf = chain.get_workflow()
res = wf(inp=4)
assert res["out"].pop() == 16
res = wf(inp=2)
assert res["out"].pop() == 8
res = wf(inp="a")
assert res["out"].pop() == "aaaa"
| true | true |
f713cb97fe8f101eb0e99515797b7446859d5de2 | 25,409 | py | Python | project_generator/tools/uvision.py | knowledgejunkie/project_generator | f43c4a3e20d938a0738a4be19a4179ac22168945 | [
"Apache-2.0"
] | 12 | 2015-01-02T06:59:55.000Z | 2019-12-15T17:08:12.000Z | project_generator/tools/uvision.py | knowledgejunkie/project_generator | f43c4a3e20d938a0738a4be19a4179ac22168945 | [
"Apache-2.0"
] | 65 | 2015-01-16T07:38:08.000Z | 2015-07-06T18:16:48.000Z | project_generator/tools/uvision.py | knowledgejunkie/project_generator | f43c4a3e20d938a0738a4be19a4179ac22168945 | [
"Apache-2.0"
] | 7 | 2015-01-17T09:55:54.000Z | 2019-04-11T06:27:54.000Z | # Copyright 2015 0xc0170
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import shutil
import logging
import xmltodict
import copy
import re
from codecs import open
from os import getcwd
from os.path import basename, join, normpath
from collections import OrderedDict
from project_generator_definitions.definitions import ProGenDef
from .tool import Tool, Builder, Exporter
from ..util import SOURCE_KEYS
logger = logging.getLogger('progen.tools.uvision')
class uVisionDefinitions():
debuggers = {
'ulink2-me': {
'uvproj': {
'TargetDlls': {
'Driver': 'BIN\\UL2CM3.dll',
},
'Utilities': {
'Flash2': 'BIN\\UL2CM3.DLL',
},
},
'uvoptx' : {
'DebugOpt' : {
'nTsel' : '1',
'pMon': 'BIN\\UL2CM3.DLL',
},
'SetRegEntry' : {
'Key' : 'UL2CM3',
},
},
},
'cmsis-dap': {
'uvproj': {
'TargetDlls': {
'Driver': 'BIN\\CMSIS_AGDI.dll',
},
'Utilities': {
'Flash2': 'BIN\\CMSIS_AGDI.dll',
},
},
'uvoptx' : {
'DebugOpt' : {
'nTsel' : '12',
'pMon': 'BIN\\CMSIS_AGDI.dll',
},
'SetRegEntry' : {
'Key' : 'CMSIS_AGDI',
},
},
},
'j-link': {
'uvproj': {
'TargetDlls': {
'Driver': 'Segger\\JL2CM3.dll',
},
'Utilities': {
'Flash2': 'Segger\\JL2CM3.dll',
},
},
'uvoptx' : {
'DebugOpt' : {
'nTsel' : '6',
'pMon': 'Segger\\JL2CM3.dll',
},
'SetRegEntry' : {
'Key' : 'JL2CM3',
},
},
},
'ulink-pro': {
'uvproj': {
'TargetDlls': {
'Driver': 'BIN\\ULP2CM3.dll',
},
'Utilities': {
'Flash2': 'BIN\\ULP2CM3.dll',
},
},
'uvoptx' : {
'DebugOpt' : {
'nTsel' : '7',
'pMon': 'BIN\\ULP2CM3.DLL',
},
'SetRegEntry' : {
'Key' : 'ULP2CM3',
},
},
},
'st-link': {
'uvproj': {
'TargetDlls': {
'Driver': 'STLink\\ST-LINKIII-KEIL_SWO.dll',
},
'Utilities': {
'Flash2': 'STLink\\ST-LINKIII-KEIL_SWO.dll',
},
},
'uvoptx' : {
'DebugOpt' : {
'nTsel' : '11',
'pMon': 'STLink\\ST-LINKIII-KEIL_SWO.dll',
},
'SetRegEntry' : {
'Key' : 'ST-LINKIII-KEIL_SWO',
},
},
},
'nu-link': {
'uvproj': {
'TargetDlls': {
'Driver': 'BIN\\Nu_Link.dll',
},
'Utilities': {
'Flash2': 'BIN\\Nu_Link.dll',
},
},
'uvoptx' : {
'DebugOpt' : {
'nTsel' : '9',
'pMon': 'NULink\\Nu_Link.dll',
},
'SetRegEntry' : {
'Key' : 'Nu_Link',
},
},
},
}
# use cmsis-dap debugger as default
debuggers_default = 'cmsis-dap'
class Uvision(Tool, Builder, Exporter):
optimization_options = ['O0', 'O1', 'O2', 'O3']
file_types = {'cpp': 8, 'c': 1, 's': 2, 'obj': 3,'o':3, 'lib': 4, 'ar': 4, 'h': 5}
# flags mapping to uvision uvproj dics
# for available flags, check armcc/armasm/armlink command line guide
# this does not provide all options within a project, most usable options are
# exposed via command line, the rest is covered via template project files
FLAGS_TO_UVISION = {
'asm_flags': 'Aads',
'c_flags': 'Cads',
'cxx_flags': 'Cads',
'ld_flags': 'LDads',
}
ERRORLEVEL = {
0: 'success (0 warnings, 0 errors)',
1: 'warnings',
2: 'errors',
3: 'fatal errors',
11: 'cant write to project file',
12: 'device error',
13: 'error writing',
15: 'error reading xml file',
}
SUCCESSVALUE = 0
WARNVALUE = 1
generated_project = {
'path': '',
'files': {
'uvproj': '',
}
}
def __init__(self, workspace, env_settings):
self.definitions = uVisionDefinitions()
# workspace or project
self.workspace = workspace
self.env_settings = env_settings
self.uvproj_file = join(self.TEMPLATE_DIR, "uvision.uvproj")
self.uvmpw_file = join(self.TEMPLATE_DIR, "uvision.uvmpw")
self.uvoptx_file = join(self.TEMPLATE_DIR, "uvision.uvoptx")
@staticmethod
def get_toolnames():
return ['uvision']
@staticmethod
def get_toolchain():
return 'uvision'
def _expand_one_file(self, source, new_data, extension):
ordered = OrderedDict()
ordered["FileType"] = self.file_types[extension]
ordered["FileName"] = basename(source)
ordered["FilePath"] = source
return ordered
def _normalize_mcu_def(self, mcu_def):
for k, v in mcu_def['TargetOption'].items():
mcu_def['TargetOption'][k] = v[0]
def _uvproj_clean_xmldict(self, uvproj_dic):
for k, v in uvproj_dic.items():
if v is None:
uvproj_dic[k] = ''
def _uvproj_set_CommonProperty(self, uvproj_dic, project_dic):
self._uvproj_clean_xmldict(uvproj_dic)
def _uvproj_set_DebugOption(self, uvproj_dic, project_dic):
self._uvproj_clean_xmldict(uvproj_dic)
self._uvproj_clean_xmldict(uvproj_dic['SimDlls'])
self._uvproj_clean_xmldict(uvproj_dic['Simulator'])
self._uvproj_clean_xmldict(uvproj_dic['Target'])
self._uvproj_clean_xmldict(uvproj_dic['TargetDlls'])
def _uvproj_set_DllOption(self, uvproj_dic, project_dic):
self._uvproj_clean_xmldict(uvproj_dic)
def _uvproj_set_TargetArmAds(self, uvproj_dic, project_dic):
self._uvproj_clean_xmldict(uvproj_dic['Aads'])
self._uvproj_clean_xmldict(uvproj_dic['Aads']['VariousControls'])
self._uvproj_clean_xmldict(uvproj_dic['ArmAdsMisc'])
self._uvproj_clean_xmldict(uvproj_dic['Cads'])
self._uvproj_clean_xmldict(uvproj_dic['Cads']['VariousControls'])
self._uvproj_clean_xmldict(uvproj_dic['LDads'])
uvproj_dic['LDads']['ScatterFile'] = project_dic['linker_file']
uvproj_dic['Cads']['VariousControls']['IncludePath'] = '; '.join(project_dic['include_paths'])
uvproj_dic['Cads']['VariousControls']['Define'] = ', '.join(project_dic['macros'])
if project_dic['macros']:
uvproj_dic['Aads']['VariousControls']['MiscControls'] = '--cpreproc --cpreproc_opts=-D' + ',-D'.join(project_dic['macros'])
for misc_keys in project_dic['misc'].keys():
# ld-flags dont follow the same as asm/c flags, why?!? Please KEIL fix this
if misc_keys == 'ld_flags':
for item in project_dic['misc'][misc_keys]:
uvproj_dic[self.FLAGS_TO_UVISION[misc_keys]]['Misc'] += ' ' + item
else:
for item in project_dic['misc'][misc_keys]:
uvproj_dic[self.FLAGS_TO_UVISION[misc_keys]]['VariousControls']['MiscControls'] += ' ' + item
def _uvproj_set_TargetCommonOption(self, uvproj_dic, project_dic):
self._uvproj_clean_xmldict(uvproj_dic)
self._uvproj_clean_xmldict(uvproj_dic['AfterMake'])
self._uvproj_clean_xmldict(uvproj_dic['BeforeCompile'])
self._uvproj_clean_xmldict(uvproj_dic['BeforeMake'])
self._uvproj_clean_xmldict(uvproj_dic['TargetStatus'])
uvproj_dic['OutputDirectory'] = project_dic['build_dir']
uvproj_dic['OutputName'] = project_dic['name']
uvproj_dic['CreateExecutable'] = 1 if project_dic['output_type'] == 'exe' else 0
uvproj_dic['CreateLib'] = 1 if project_dic['output_type'] == 'lib' else 0
def _uvproj_set_Utilities(self, uvproj_dic, project_dic):
self._uvproj_clean_xmldict(uvproj_dic)
def _uvproj_files_set(self, uvproj_dic, project_dic):
uvproj_dic['Project']['Targets']['Target']['Groups'] = OrderedDict()
uvproj_dic['Project']['Targets']['Target']['Groups']['Group'] = []
i = 0
for group_name, files in project_dic['groups'].items():
# Why OrderedDict() - uvision project requires an order. GroupName must be before Files,
# otherwise it does not sense any file. Same applies for other attributes, like VariousControl.
# Therefore be aware that order matters in this exporter
group = OrderedDict()
group['GroupName'] = group_name
# group['Files'] = {}
group['Files'] = {'File': []}
uvproj_dic['Project']['Targets']['Target']['Groups']['Group'].append(group)
for file in files:
uvproj_dic['Project']['Targets']['Target']['Groups']['Group'][i]['Files']['File'].append(file)
files = uvproj_dic['Project']['Targets']['Target']['Groups']['Group'][i]['Files']['File']
uvproj_dic['Project']['Targets']['Target']['Groups']['Group'][i]['Files']['File'] = sorted(files, key=lambda x: x['FileName'].lower())
i += 1
def _generate_uvmpw_file(self):
uvmpw_dic = xmltodict.parse(open(self.uvmpw_file, "rb"))
uvmpw_dic['ProjectWorkspace']['project'] = []
for project in self.workspace['projects']:
# We check how far is project from root and workspace. IF they dont match,
# get relpath for project and inject it into workspace
path_project = os.path.dirname(project['files']['uvproj'])
path_workspace = os.path.dirname(self.workspace['settings']['path'] + '\\')
destination = os.path.join(os.path.relpath(self.env_settings.root, path_project), project['files']['uvproj'])
if path_project != path_workspace:
destination = os.path.join(os.path.relpath(self.env_settings.root, path_workspace), project['files']['uvproj'])
uvmpw_dic['ProjectWorkspace']['project'].append({'PathAndName': destination})
# generate the file
uvmpw_xml = xmltodict.unparse(uvmpw_dic, pretty=True)
project_path, uvmpw = self.gen_file_raw(uvmpw_xml, '%s.uvmpw' % self.workspace['settings']['name'], self.workspace['settings']['path'])
return project_path, uvmpw
def _set_target(self, expanded_dic, uvproj_dic, tool_name):
pro_def = ProGenDef(tool_name)
if not pro_def.is_supported(expanded_dic['target'].lower()):
raise RuntimeError("Target %s is not supported. Please add them to https://github.com/project-generator/project_generator_definitions" % expanded_dic['target'].lower())
mcu_def_dic = pro_def.get_tool_definition(expanded_dic['target'].lower())
if not mcu_def_dic:
raise RuntimeError(
"Target definitions were not found for %s. Please add them to https://github.com/project-generator/project_generator_definitions" % expanded_dic['target'].lower())
logger.debug("Mcu definitions: %s" % mcu_def_dic)
uvproj_dic['Project']['Targets']['Target']['TargetOption']['TargetCommonOption']['Device'] = mcu_def_dic['TargetOption']['Device'][0]
uvproj_dic['Project']['Targets']['Target']['TargetOption']['TargetCommonOption']['DeviceId'] = mcu_def_dic['TargetOption']['DeviceId'][0]
try:
uvproj_dic['Project']['Targets']['Target']['TargetOption']['TargetCommonOption']['Vendor'] = mcu_def_dic['TargetOption']['Vendor'][0]
uvproj_dic['Project']['Targets']['Target']['TargetOption']['TargetCommonOption']['Cpu'] = mcu_def_dic['TargetOption']['Cpu'][0]
uvproj_dic['Project']['Targets']['Target']['TargetOption']['TargetCommonOption']['FlashDriverDll'] = str(mcu_def_dic['TargetOption']['FlashDriverDll'][0])
uvproj_dic['Project']['Targets']['Target']['TargetOption']['TargetCommonOption']['SFDFile'] = mcu_def_dic['TargetOption']['SFDFile'][0]
uvproj_dic['Project']['Targets']['Target']['TargetOption']['TargetCommonOption']['RegisterFile'] = mcu_def_dic['TargetOption']['RegisterFile'][0]
except KeyError:
pass
# overwrite the template if target has defined debugger
# later progen can overwrite this if debugger is set in project data
try:
debugger_name = pro_def.get_debugger(expanded_dic['target'])['name']
uvproj_dic['Project']['Targets']['Target']['TargetOption']['DebugOption']['TargetDlls']['Driver'] = self.definitions.debuggers[debugger_name]['uvproj']['TargetDlls']['Driver']
uvproj_dic['Project']['Targets']['Target']['TargetOption']['Utilities']['Flash2'] = self.definitions.debuggers[debugger_name]['uvproj']['Utilities']['Flash2']
except (TypeError, KeyError) as err:
pass
# Support new device packs
if 'PackID' in mcu_def_dic['TargetOption']:
if tool_name != 'uvision5':
# using software packs require v5
logger.info("The target might not be supported in %s, requires uvision5" % tool_name)
uvproj_dic['Project']['Targets']['Target']['TargetOption']['TargetCommonOption']['PackID'] = mcu_def_dic['TargetOption']['PackID'][0]
def _uvoptx_set_debugger(self, expanded_dic, uvoptx_dic, tool_name):
pro_def = ProGenDef(tool_name)
if not pro_def.is_supported(expanded_dic['target'].lower()):
raise RuntimeError("Target %s is not supported. Please add them to https://github.com/project-generator/project_generator_definitions" % expanded_dic['target'].lower())
mcu_def_dic = pro_def.get_tool_definition(expanded_dic['target'].lower())
if not mcu_def_dic:
raise RuntimeError(
"Target definitions were not found for %s. Please add them to https://github.com/project-generator/project_generator_definitions" % expanded_dic['target'].lower())
logger.debug("Mcu definitions: %s" % mcu_def_dic)
# set the same target name FlashDriverDll config as in uvprojx file
try:
uvoptx_dic['ProjectOpt']['Target']['TargetName'] = expanded_dic['name']
uvoptx_dic['ProjectOpt']['Target']['TargetOption']['TargetDriverDllRegistry']['SetRegEntry']['Name'] = str(mcu_def_dic['TargetOption']['FlashDriverDll'][0])
except KeyError:
return
# load debugger from target dictionary or use default debugger
try:
debugger_dic = pro_def.get_debugger(expanded_dic['target'])
if debugger_dic is None:
debugger_name = self.definitions.debuggers_default
else:
debugger_name = debugger_dic['name']
uvoptx_dic['ProjectOpt']['Target']['TargetOption']['DebugOpt']['nTsel'] = self.definitions.debuggers[debugger_name]['uvoptx']['DebugOpt']['nTsel']
uvoptx_dic['ProjectOpt']['Target']['TargetOption']['DebugOpt']['pMon'] = self.definitions.debuggers[debugger_name]['uvoptx']['DebugOpt']['pMon']
uvoptx_dic['ProjectOpt']['Target']['TargetOption']['TargetDriverDllRegistry']['SetRegEntry']['Key'] = self.definitions.debuggers[debugger_name]['uvoptx']['SetRegEntry']['Key']
except KeyError:
raise RuntimeError("Debugger %s is not supported" % expanded_dic['debugger'])
def _export_single_project(self, tool_name):
expanded_dic = self.workspace.copy()
groups = self._get_groups(self.workspace)
expanded_dic['groups'] = {}
for group in groups:
expanded_dic['groups'][group] = []
# get relative path and fix all paths within a project
self._iterate(self.workspace, expanded_dic)
expanded_dic['build_dir'] = '.\\' + expanded_dic['build_dir'] + '\\'
# generic tool template specified or project
if expanded_dic['template']:
for template in expanded_dic['template']:
template = join(getcwd(), template)
if os.path.splitext(template)[1] == '.uvproj' or os.path.splitext(template)[1] == '.uvprojx' or \
re.match('.*\.uvproj.tmpl$', template) or re.match('.*\.uvprojx.tmpl$', template):
try:
uvproj_dic = xmltodict.parse(open(template, encoding="utf8").read())
except IOError:
logger.info("Template file %s not found" % template)
return None, None
else:
logger.info("Template file %s contains unknown template extension (.uvproj/x are valid). Using default one" % template)
uvproj_dic = xmltodict.parse(open(self.uvproj_file, "rb"))
elif 'uvision' in self.env_settings.templates.keys():
# template overrides what is set in the yaml files
for template in self.env_settings.templates['uvision']:
template = join(getcwd(), template)
if os.path.splitext(template)[1] == '.uvproj' or os.path.splitext(template)[1] == '.uvprojx' or \
re.match('.*\.uvproj.tmpl$', template) or re.match('.*\.uvprojx.tmpl$', template):
try:
uvproj_dic = xmltodict.parse(open(template, encoding="utf8").read())
except IOError:
logger.info("Template file %s not found. Using default template" % template)
uvproj_dic = xmltodict.parse(open(self.uvproj_file, "rb"))
else:
logger.info("Template file %s contains unknown template extension (.uvproj/x are valid). Using default one" % template)
uvproj_dic = xmltodict.parse(open(self.uvproj_file))
else:
uvproj_dic = xmltodict.parse(open(self.uvproj_file, "rb"))
try:
uvproj_dic['Project']['Targets']['Target']['TargetName'] = expanded_dic['name']
except KeyError:
raise RuntimeError("The uvision template is not valid .uvproj file")
self._uvproj_files_set(uvproj_dic, expanded_dic)
self._uvproj_set_CommonProperty(
uvproj_dic['Project']['Targets']['Target']['TargetOption']['CommonProperty'], expanded_dic)
self._uvproj_set_DebugOption(
uvproj_dic['Project']['Targets']['Target']['TargetOption']['DebugOption'], expanded_dic)
self._uvproj_set_DllOption(
uvproj_dic['Project']['Targets']['Target']['TargetOption']['DllOption'], expanded_dic)
self._uvproj_set_TargetArmAds(
uvproj_dic['Project']['Targets']['Target']['TargetOption']['TargetArmAds'], expanded_dic)
self._uvproj_set_TargetCommonOption(
uvproj_dic['Project']['Targets']['Target']['TargetOption']['TargetCommonOption'], expanded_dic)
self._uvproj_set_Utilities(
uvproj_dic['Project']['Targets']['Target']['TargetOption']['Utilities'], expanded_dic)
# set target only if defined, otherwise use from template/default one
if tool_name == 'uvision5':
extension = 'uvprojx'
uvproj_dic['Project']['SchemaVersion'] = '2.1'
else:
extension = 'uvproj'
uvproj_dic['Project']['SchemaVersion'] = '1.1'
if expanded_dic['target']:
self._set_target(expanded_dic, uvproj_dic, tool_name)
# load debugger
if expanded_dic['debugger']:
try:
uvproj_dic['Project']['Targets']['Target']['TargetOption']['DebugOption']['TargetDlls']['Driver'] = self.definitions.debuggers[expanded_dic['debugger']]['uvproj']['TargetDlls']['Driver']
uvproj_dic['Project']['Targets']['Target']['TargetOption']['Utilities']['Flash2'] = self.definitions.debuggers[expanded_dic['debugger']]['uvproj']['Utilities']['Flash2']
except KeyError:
raise RuntimeError("Debugger %s is not supported" % expanded_dic['debugger'])
# Project file
uvproj_xml = xmltodict.unparse(uvproj_dic, pretty=True)
project_path, uvproj = self.gen_file_raw(uvproj_xml, '%s.%s' % (expanded_dic['name'], extension), expanded_dic['output_dir']['path'])
uvoptx = None
# generic tool template specified
uvoptx_dic = xmltodict.parse(open(self.uvoptx_file, "rb"))
self._uvoptx_set_debugger(expanded_dic, uvoptx_dic, tool_name)
# set target only if defined, otherwise use from template/default one
if tool_name == 'uvision5':
extension = 'uvoptx'
else:
extension = 'uvopt'
# Project file
uvoptx_xml = xmltodict.unparse(uvoptx_dic, pretty=True)
project_path, uvoptx = self.gen_file_raw(uvoptx_xml, '%s.%s' % (expanded_dic['name'], extension), expanded_dic['output_dir']['path'])
return project_path, [uvproj, uvoptx]
def export_workspace(self):
path, workspace = self._generate_uvmpw_file()
return path, [workspace]
def export_project(self):
path, files = self._export_single_project('uvision') #todo: uvision will switch to uv4
generated_projects = copy.deepcopy(self.generated_project)
generated_projects['path'] = path
generated_projects['files']['uvproj'] = files[0]
return generated_projects
def get_generated_project_files(self):
return {'path': self.workspace['path'], 'files': [self.workspace['files']['uvproj']]}
def _build_project(self, tool_name, extension):
# > UV4 -b [project_path]
path = join(self.env_settings.root, self.workspace['files'][extension])
if path.split('.')[-1] != extension:
path = path + extension
if not os.path.exists(path):
logger.debug("The file: %s does not exists, exported prior building?" % path)
return -1
logger.debug("Building uVision project: %s" % path)
build_log_path = join(os.path.dirname(path),'build','build_log.txt')
args = [self.env_settings.get_env_settings(tool_name), '-r', '-j0', '-o', build_log_path, path]
logger.debug(args)
try:
ret_code = None
ret_code = subprocess.call(args)
except:
logger.error(
"Error whilst calling UV4: '%s'. Please set uvision path in the projects.yaml file." % self.env_settings.get_env_settings('uvision'))
return -1
else:
if ret_code != self.SUCCESSVALUE and ret_code != self.WARNVALUE:
# Seems like something went wrong.
logger.error("Project: %s build failed with the status: %s" % (self.workspace['files'][extension], self.ERRORLEVEL.get(ret_code, "Unknown")))
return -1
else:
logger.info("Project: %s build succeeded with the status: %s" % (self.workspace['files'][extension], self.ERRORLEVEL.get(ret_code, "Unknown")))
return 0
def build_project(self):
return self._build_project('uvision', 'uvproj')
class Uvision5(Uvision):
generated_project = {
'path': '',
'files': {
'uvprojx': '',
'uvoptx': '',
}
}
def __init__(self, workspace, env_settings):
super(Uvision5, self).__init__(workspace, env_settings)
@staticmethod
def get_toolnames():
return ['uvision5']
def export_project(self):
path, files = self._export_single_project('uvision5')
generated_projects = copy.deepcopy(self.generated_project)
generated_projects['path'] = path
generated_projects['files']['uvprojx'] = files[0]
generated_projects['files']['uvoptx'] = files[1]
return generated_projects
def get_generated_project_files(self):
return {'path': self.workspace['path'], 'files': [self.workspace['files']['uvprojx'], self.workspace['files']['uvoptx']]}
def build_project(self):
# tool_name uvision as uv4 is still used in uv5
return self._build_project('uvision', 'uvprojx')
| 44.971681 | 202 | 0.587272 |
import os
import subprocess
import shutil
import logging
import xmltodict
import copy
import re
from codecs import open
from os import getcwd
from os.path import basename, join, normpath
from collections import OrderedDict
from project_generator_definitions.definitions import ProGenDef
from .tool import Tool, Builder, Exporter
from ..util import SOURCE_KEYS
logger = logging.getLogger('progen.tools.uvision')
class uVisionDefinitions():
debuggers = {
'ulink2-me': {
'uvproj': {
'TargetDlls': {
'Driver': 'BIN\\UL2CM3.dll',
},
'Utilities': {
'Flash2': 'BIN\\UL2CM3.DLL',
},
},
'uvoptx' : {
'DebugOpt' : {
'nTsel' : '1',
'pMon': 'BIN\\UL2CM3.DLL',
},
'SetRegEntry' : {
'Key' : 'UL2CM3',
},
},
},
'cmsis-dap': {
'uvproj': {
'TargetDlls': {
'Driver': 'BIN\\CMSIS_AGDI.dll',
},
'Utilities': {
'Flash2': 'BIN\\CMSIS_AGDI.dll',
},
},
'uvoptx' : {
'DebugOpt' : {
'nTsel' : '12',
'pMon': 'BIN\\CMSIS_AGDI.dll',
},
'SetRegEntry' : {
'Key' : 'CMSIS_AGDI',
},
},
},
'j-link': {
'uvproj': {
'TargetDlls': {
'Driver': 'Segger\\JL2CM3.dll',
},
'Utilities': {
'Flash2': 'Segger\\JL2CM3.dll',
},
},
'uvoptx' : {
'DebugOpt' : {
'nTsel' : '6',
'pMon': 'Segger\\JL2CM3.dll',
},
'SetRegEntry' : {
'Key' : 'JL2CM3',
},
},
},
'ulink-pro': {
'uvproj': {
'TargetDlls': {
'Driver': 'BIN\\ULP2CM3.dll',
},
'Utilities': {
'Flash2': 'BIN\\ULP2CM3.dll',
},
},
'uvoptx' : {
'DebugOpt' : {
'nTsel' : '7',
'pMon': 'BIN\\ULP2CM3.DLL',
},
'SetRegEntry' : {
'Key' : 'ULP2CM3',
},
},
},
'st-link': {
'uvproj': {
'TargetDlls': {
'Driver': 'STLink\\ST-LINKIII-KEIL_SWO.dll',
},
'Utilities': {
'Flash2': 'STLink\\ST-LINKIII-KEIL_SWO.dll',
},
},
'uvoptx' : {
'DebugOpt' : {
'nTsel' : '11',
'pMon': 'STLink\\ST-LINKIII-KEIL_SWO.dll',
},
'SetRegEntry' : {
'Key' : 'ST-LINKIII-KEIL_SWO',
},
},
},
'nu-link': {
'uvproj': {
'TargetDlls': {
'Driver': 'BIN\\Nu_Link.dll',
},
'Utilities': {
'Flash2': 'BIN\\Nu_Link.dll',
},
},
'uvoptx' : {
'DebugOpt' : {
'nTsel' : '9',
'pMon': 'NULink\\Nu_Link.dll',
},
'SetRegEntry' : {
'Key' : 'Nu_Link',
},
},
},
}
debuggers_default = 'cmsis-dap'
class Uvision(Tool, Builder, Exporter):
optimization_options = ['O0', 'O1', 'O2', 'O3']
file_types = {'cpp': 8, 'c': 1, 's': 2, 'obj': 3,'o':3, 'lib': 4, 'ar': 4, 'h': 5}
FLAGS_TO_UVISION = {
'asm_flags': 'Aads',
'c_flags': 'Cads',
'cxx_flags': 'Cads',
'ld_flags': 'LDads',
}
ERRORLEVEL = {
0: 'success (0 warnings, 0 errors)',
1: 'warnings',
2: 'errors',
3: 'fatal errors',
11: 'cant write to project file',
12: 'device error',
13: 'error writing',
15: 'error reading xml file',
}
SUCCESSVALUE = 0
WARNVALUE = 1
generated_project = {
'path': '',
'files': {
'uvproj': '',
}
}
def __init__(self, workspace, env_settings):
self.definitions = uVisionDefinitions()
self.workspace = workspace
self.env_settings = env_settings
self.uvproj_file = join(self.TEMPLATE_DIR, "uvision.uvproj")
self.uvmpw_file = join(self.TEMPLATE_DIR, "uvision.uvmpw")
self.uvoptx_file = join(self.TEMPLATE_DIR, "uvision.uvoptx")
@staticmethod
def get_toolnames():
return ['uvision']
@staticmethod
def get_toolchain():
return 'uvision'
def _expand_one_file(self, source, new_data, extension):
ordered = OrderedDict()
ordered["FileType"] = self.file_types[extension]
ordered["FileName"] = basename(source)
ordered["FilePath"] = source
return ordered
def _normalize_mcu_def(self, mcu_def):
for k, v in mcu_def['TargetOption'].items():
mcu_def['TargetOption'][k] = v[0]
def _uvproj_clean_xmldict(self, uvproj_dic):
for k, v in uvproj_dic.items():
if v is None:
uvproj_dic[k] = ''
def _uvproj_set_CommonProperty(self, uvproj_dic, project_dic):
self._uvproj_clean_xmldict(uvproj_dic)
def _uvproj_set_DebugOption(self, uvproj_dic, project_dic):
self._uvproj_clean_xmldict(uvproj_dic)
self._uvproj_clean_xmldict(uvproj_dic['SimDlls'])
self._uvproj_clean_xmldict(uvproj_dic['Simulator'])
self._uvproj_clean_xmldict(uvproj_dic['Target'])
self._uvproj_clean_xmldict(uvproj_dic['TargetDlls'])
def _uvproj_set_DllOption(self, uvproj_dic, project_dic):
self._uvproj_clean_xmldict(uvproj_dic)
def _uvproj_set_TargetArmAds(self, uvproj_dic, project_dic):
self._uvproj_clean_xmldict(uvproj_dic['Aads'])
self._uvproj_clean_xmldict(uvproj_dic['Aads']['VariousControls'])
self._uvproj_clean_xmldict(uvproj_dic['ArmAdsMisc'])
self._uvproj_clean_xmldict(uvproj_dic['Cads'])
self._uvproj_clean_xmldict(uvproj_dic['Cads']['VariousControls'])
self._uvproj_clean_xmldict(uvproj_dic['LDads'])
uvproj_dic['LDads']['ScatterFile'] = project_dic['linker_file']
uvproj_dic['Cads']['VariousControls']['IncludePath'] = '; '.join(project_dic['include_paths'])
uvproj_dic['Cads']['VariousControls']['Define'] = ', '.join(project_dic['macros'])
if project_dic['macros']:
uvproj_dic['Aads']['VariousControls']['MiscControls'] = '--cpreproc --cpreproc_opts=-D' + ',-D'.join(project_dic['macros'])
for misc_keys in project_dic['misc'].keys():
if misc_keys == 'ld_flags':
for item in project_dic['misc'][misc_keys]:
uvproj_dic[self.FLAGS_TO_UVISION[misc_keys]]['Misc'] += ' ' + item
else:
for item in project_dic['misc'][misc_keys]:
uvproj_dic[self.FLAGS_TO_UVISION[misc_keys]]['VariousControls']['MiscControls'] += ' ' + item
def _uvproj_set_TargetCommonOption(self, uvproj_dic, project_dic):
self._uvproj_clean_xmldict(uvproj_dic)
self._uvproj_clean_xmldict(uvproj_dic['AfterMake'])
self._uvproj_clean_xmldict(uvproj_dic['BeforeCompile'])
self._uvproj_clean_xmldict(uvproj_dic['BeforeMake'])
self._uvproj_clean_xmldict(uvproj_dic['TargetStatus'])
uvproj_dic['OutputDirectory'] = project_dic['build_dir']
uvproj_dic['OutputName'] = project_dic['name']
uvproj_dic['CreateExecutable'] = 1 if project_dic['output_type'] == 'exe' else 0
uvproj_dic['CreateLib'] = 1 if project_dic['output_type'] == 'lib' else 0
def _uvproj_set_Utilities(self, uvproj_dic, project_dic):
self._uvproj_clean_xmldict(uvproj_dic)
def _uvproj_files_set(self, uvproj_dic, project_dic):
uvproj_dic['Project']['Targets']['Target']['Groups'] = OrderedDict()
uvproj_dic['Project']['Targets']['Target']['Groups']['Group'] = []
i = 0
for group_name, files in project_dic['groups'].items():
group = OrderedDict()
group['GroupName'] = group_name
group['Files'] = {'File': []}
uvproj_dic['Project']['Targets']['Target']['Groups']['Group'].append(group)
for file in files:
uvproj_dic['Project']['Targets']['Target']['Groups']['Group'][i]['Files']['File'].append(file)
files = uvproj_dic['Project']['Targets']['Target']['Groups']['Group'][i]['Files']['File']
uvproj_dic['Project']['Targets']['Target']['Groups']['Group'][i]['Files']['File'] = sorted(files, key=lambda x: x['FileName'].lower())
i += 1
def _generate_uvmpw_file(self):
uvmpw_dic = xmltodict.parse(open(self.uvmpw_file, "rb"))
uvmpw_dic['ProjectWorkspace']['project'] = []
for project in self.workspace['projects']:
path_project = os.path.dirname(project['files']['uvproj'])
path_workspace = os.path.dirname(self.workspace['settings']['path'] + '\\')
destination = os.path.join(os.path.relpath(self.env_settings.root, path_project), project['files']['uvproj'])
if path_project != path_workspace:
destination = os.path.join(os.path.relpath(self.env_settings.root, path_workspace), project['files']['uvproj'])
uvmpw_dic['ProjectWorkspace']['project'].append({'PathAndName': destination})
uvmpw_xml = xmltodict.unparse(uvmpw_dic, pretty=True)
project_path, uvmpw = self.gen_file_raw(uvmpw_xml, '%s.uvmpw' % self.workspace['settings']['name'], self.workspace['settings']['path'])
return project_path, uvmpw
def _set_target(self, expanded_dic, uvproj_dic, tool_name):
pro_def = ProGenDef(tool_name)
if not pro_def.is_supported(expanded_dic['target'].lower()):
raise RuntimeError("Target %s is not supported. Please add them to https://github.com/project-generator/project_generator_definitions" % expanded_dic['target'].lower())
mcu_def_dic = pro_def.get_tool_definition(expanded_dic['target'].lower())
if not mcu_def_dic:
raise RuntimeError(
"Target definitions were not found for %s. Please add them to https://github.com/project-generator/project_generator_definitions" % expanded_dic['target'].lower())
logger.debug("Mcu definitions: %s" % mcu_def_dic)
uvproj_dic['Project']['Targets']['Target']['TargetOption']['TargetCommonOption']['Device'] = mcu_def_dic['TargetOption']['Device'][0]
uvproj_dic['Project']['Targets']['Target']['TargetOption']['TargetCommonOption']['DeviceId'] = mcu_def_dic['TargetOption']['DeviceId'][0]
try:
uvproj_dic['Project']['Targets']['Target']['TargetOption']['TargetCommonOption']['Vendor'] = mcu_def_dic['TargetOption']['Vendor'][0]
uvproj_dic['Project']['Targets']['Target']['TargetOption']['TargetCommonOption']['Cpu'] = mcu_def_dic['TargetOption']['Cpu'][0]
uvproj_dic['Project']['Targets']['Target']['TargetOption']['TargetCommonOption']['FlashDriverDll'] = str(mcu_def_dic['TargetOption']['FlashDriverDll'][0])
uvproj_dic['Project']['Targets']['Target']['TargetOption']['TargetCommonOption']['SFDFile'] = mcu_def_dic['TargetOption']['SFDFile'][0]
uvproj_dic['Project']['Targets']['Target']['TargetOption']['TargetCommonOption']['RegisterFile'] = mcu_def_dic['TargetOption']['RegisterFile'][0]
except KeyError:
pass
try:
debugger_name = pro_def.get_debugger(expanded_dic['target'])['name']
uvproj_dic['Project']['Targets']['Target']['TargetOption']['DebugOption']['TargetDlls']['Driver'] = self.definitions.debuggers[debugger_name]['uvproj']['TargetDlls']['Driver']
uvproj_dic['Project']['Targets']['Target']['TargetOption']['Utilities']['Flash2'] = self.definitions.debuggers[debugger_name]['uvproj']['Utilities']['Flash2']
except (TypeError, KeyError) as err:
pass
if 'PackID' in mcu_def_dic['TargetOption']:
if tool_name != 'uvision5':
logger.info("The target might not be supported in %s, requires uvision5" % tool_name)
uvproj_dic['Project']['Targets']['Target']['TargetOption']['TargetCommonOption']['PackID'] = mcu_def_dic['TargetOption']['PackID'][0]
def _uvoptx_set_debugger(self, expanded_dic, uvoptx_dic, tool_name):
pro_def = ProGenDef(tool_name)
if not pro_def.is_supported(expanded_dic['target'].lower()):
raise RuntimeError("Target %s is not supported. Please add them to https://github.com/project-generator/project_generator_definitions" % expanded_dic['target'].lower())
mcu_def_dic = pro_def.get_tool_definition(expanded_dic['target'].lower())
if not mcu_def_dic:
raise RuntimeError(
"Target definitions were not found for %s. Please add them to https://github.com/project-generator/project_generator_definitions" % expanded_dic['target'].lower())
logger.debug("Mcu definitions: %s" % mcu_def_dic)
try:
uvoptx_dic['ProjectOpt']['Target']['TargetName'] = expanded_dic['name']
uvoptx_dic['ProjectOpt']['Target']['TargetOption']['TargetDriverDllRegistry']['SetRegEntry']['Name'] = str(mcu_def_dic['TargetOption']['FlashDriverDll'][0])
except KeyError:
return
try:
debugger_dic = pro_def.get_debugger(expanded_dic['target'])
if debugger_dic is None:
debugger_name = self.definitions.debuggers_default
else:
debugger_name = debugger_dic['name']
uvoptx_dic['ProjectOpt']['Target']['TargetOption']['DebugOpt']['nTsel'] = self.definitions.debuggers[debugger_name]['uvoptx']['DebugOpt']['nTsel']
uvoptx_dic['ProjectOpt']['Target']['TargetOption']['DebugOpt']['pMon'] = self.definitions.debuggers[debugger_name]['uvoptx']['DebugOpt']['pMon']
uvoptx_dic['ProjectOpt']['Target']['TargetOption']['TargetDriverDllRegistry']['SetRegEntry']['Key'] = self.definitions.debuggers[debugger_name]['uvoptx']['SetRegEntry']['Key']
except KeyError:
raise RuntimeError("Debugger %s is not supported" % expanded_dic['debugger'])
def _export_single_project(self, tool_name):
expanded_dic = self.workspace.copy()
groups = self._get_groups(self.workspace)
expanded_dic['groups'] = {}
for group in groups:
expanded_dic['groups'][group] = []
self._iterate(self.workspace, expanded_dic)
expanded_dic['build_dir'] = '.\\' + expanded_dic['build_dir'] + '\\'
if expanded_dic['template']:
for template in expanded_dic['template']:
template = join(getcwd(), template)
if os.path.splitext(template)[1] == '.uvproj' or os.path.splitext(template)[1] == '.uvprojx' or \
re.match('.*\.uvproj.tmpl$', template) or re.match('.*\.uvprojx.tmpl$', template):
try:
uvproj_dic = xmltodict.parse(open(template, encoding="utf8").read())
except IOError:
logger.info("Template file %s not found" % template)
return None, None
else:
logger.info("Template file %s contains unknown template extension (.uvproj/x are valid). Using default one" % template)
uvproj_dic = xmltodict.parse(open(self.uvproj_file, "rb"))
elif 'uvision' in self.env_settings.templates.keys():
for template in self.env_settings.templates['uvision']:
template = join(getcwd(), template)
if os.path.splitext(template)[1] == '.uvproj' or os.path.splitext(template)[1] == '.uvprojx' or \
re.match('.*\.uvproj.tmpl$', template) or re.match('.*\.uvprojx.tmpl$', template):
try:
uvproj_dic = xmltodict.parse(open(template, encoding="utf8").read())
except IOError:
logger.info("Template file %s not found. Using default template" % template)
uvproj_dic = xmltodict.parse(open(self.uvproj_file, "rb"))
else:
logger.info("Template file %s contains unknown template extension (.uvproj/x are valid). Using default one" % template)
uvproj_dic = xmltodict.parse(open(self.uvproj_file))
else:
uvproj_dic = xmltodict.parse(open(self.uvproj_file, "rb"))
try:
uvproj_dic['Project']['Targets']['Target']['TargetName'] = expanded_dic['name']
except KeyError:
raise RuntimeError("The uvision template is not valid .uvproj file")
self._uvproj_files_set(uvproj_dic, expanded_dic)
self._uvproj_set_CommonProperty(
uvproj_dic['Project']['Targets']['Target']['TargetOption']['CommonProperty'], expanded_dic)
self._uvproj_set_DebugOption(
uvproj_dic['Project']['Targets']['Target']['TargetOption']['DebugOption'], expanded_dic)
self._uvproj_set_DllOption(
uvproj_dic['Project']['Targets']['Target']['TargetOption']['DllOption'], expanded_dic)
self._uvproj_set_TargetArmAds(
uvproj_dic['Project']['Targets']['Target']['TargetOption']['TargetArmAds'], expanded_dic)
self._uvproj_set_TargetCommonOption(
uvproj_dic['Project']['Targets']['Target']['TargetOption']['TargetCommonOption'], expanded_dic)
self._uvproj_set_Utilities(
uvproj_dic['Project']['Targets']['Target']['TargetOption']['Utilities'], expanded_dic)
if tool_name == 'uvision5':
extension = 'uvprojx'
uvproj_dic['Project']['SchemaVersion'] = '2.1'
else:
extension = 'uvproj'
uvproj_dic['Project']['SchemaVersion'] = '1.1'
if expanded_dic['target']:
self._set_target(expanded_dic, uvproj_dic, tool_name)
if expanded_dic['debugger']:
try:
uvproj_dic['Project']['Targets']['Target']['TargetOption']['DebugOption']['TargetDlls']['Driver'] = self.definitions.debuggers[expanded_dic['debugger']]['uvproj']['TargetDlls']['Driver']
uvproj_dic['Project']['Targets']['Target']['TargetOption']['Utilities']['Flash2'] = self.definitions.debuggers[expanded_dic['debugger']]['uvproj']['Utilities']['Flash2']
except KeyError:
raise RuntimeError("Debugger %s is not supported" % expanded_dic['debugger'])
uvproj_xml = xmltodict.unparse(uvproj_dic, pretty=True)
project_path, uvproj = self.gen_file_raw(uvproj_xml, '%s.%s' % (expanded_dic['name'], extension), expanded_dic['output_dir']['path'])
uvoptx = None
uvoptx_dic = xmltodict.parse(open(self.uvoptx_file, "rb"))
self._uvoptx_set_debugger(expanded_dic, uvoptx_dic, tool_name)
if tool_name == 'uvision5':
extension = 'uvoptx'
else:
extension = 'uvopt'
uvoptx_xml = xmltodict.unparse(uvoptx_dic, pretty=True)
project_path, uvoptx = self.gen_file_raw(uvoptx_xml, '%s.%s' % (expanded_dic['name'], extension), expanded_dic['output_dir']['path'])
return project_path, [uvproj, uvoptx]
def export_workspace(self):
path, workspace = self._generate_uvmpw_file()
return path, [workspace]
def export_project(self):
path, files = self._export_single_project('uvision')
generated_projects = copy.deepcopy(self.generated_project)
generated_projects['path'] = path
generated_projects['files']['uvproj'] = files[0]
return generated_projects
def get_generated_project_files(self):
return {'path': self.workspace['path'], 'files': [self.workspace['files']['uvproj']]}
def _build_project(self, tool_name, extension):
path = join(self.env_settings.root, self.workspace['files'][extension])
if path.split('.')[-1] != extension:
path = path + extension
if not os.path.exists(path):
logger.debug("The file: %s does not exists, exported prior building?" % path)
return -1
logger.debug("Building uVision project: %s" % path)
build_log_path = join(os.path.dirname(path),'build','build_log.txt')
args = [self.env_settings.get_env_settings(tool_name), '-r', '-j0', '-o', build_log_path, path]
logger.debug(args)
try:
ret_code = None
ret_code = subprocess.call(args)
except:
logger.error(
"Error whilst calling UV4: '%s'. Please set uvision path in the projects.yaml file." % self.env_settings.get_env_settings('uvision'))
return -1
else:
if ret_code != self.SUCCESSVALUE and ret_code != self.WARNVALUE:
logger.error("Project: %s build failed with the status: %s" % (self.workspace['files'][extension], self.ERRORLEVEL.get(ret_code, "Unknown")))
return -1
else:
logger.info("Project: %s build succeeded with the status: %s" % (self.workspace['files'][extension], self.ERRORLEVEL.get(ret_code, "Unknown")))
return 0
def build_project(self):
return self._build_project('uvision', 'uvproj')
class Uvision5(Uvision):
generated_project = {
'path': '',
'files': {
'uvprojx': '',
'uvoptx': '',
}
}
def __init__(self, workspace, env_settings):
super(Uvision5, self).__init__(workspace, env_settings)
@staticmethod
def get_toolnames():
return ['uvision5']
def export_project(self):
path, files = self._export_single_project('uvision5')
generated_projects = copy.deepcopy(self.generated_project)
generated_projects['path'] = path
generated_projects['files']['uvprojx'] = files[0]
generated_projects['files']['uvoptx'] = files[1]
return generated_projects
def get_generated_project_files(self):
return {'path': self.workspace['path'], 'files': [self.workspace['files']['uvprojx'], self.workspace['files']['uvoptx']]}
def build_project(self):
return self._build_project('uvision', 'uvprojx')
| true | true |
f713cd4ccb4c24a7f9ce8c790aa81d48b9aee1af | 947 | py | Python | VSR/Model/__init__.py | soufiomario/VideoSuperResolution | 3677e562df4674b9ff827feecce1806de15131a4 | [
"MIT"
] | 2 | 2021-08-05T08:32:03.000Z | 2021-08-12T09:33:44.000Z | VSR/Model/__init__.py | AbdulMoqeet/VideoSuperResolution | 82c3347554561ff9dfb5e86d9cf0a55239ca662e | [
"MIT"
] | null | null | null | VSR/Model/__init__.py | AbdulMoqeet/VideoSuperResolution | 82c3347554561ff9dfb5e86d9cf0a55239ca662e | [
"MIT"
] | null | null | null | # Copyright (c) 2017-2020 Wenyi Tang.
# Author: Wenyi Tang
# Email: wenyitang@outlook.com
# Update: 2020 - 2 - 7
from importlib import import_module
from ..Backend import BACKEND
__all__ = [
'get_model',
'list_supported_models'
]
def get_model(name: str):
name = name.lower()
try:
if BACKEND == 'pytorch':
return import_module('.Models', 'VSR.Backend.Torch').get_model(name)
elif BACKEND == 'tensorflow':
return import_module('.Models', 'VSR.Backend.TF').get_model(name)
elif BACKEND == 'tensorflow2':
pass
except (KeyError, ImportError):
raise ImportError(f"Using {BACKEND}, can't find model {name}.")
def list_supported_models():
if BACKEND == 'pytorch':
return import_module('.Models', 'VSR.Backend.Torch').list_supported_models()
elif BACKEND == 'tensorflow':
return import_module('.Models', 'VSR.Backend.TF').list_supported_models()
elif BACKEND == 'tensorflow2':
pass
| 26.305556 | 80 | 0.685322 |
from importlib import import_module
from ..Backend import BACKEND
__all__ = [
'get_model',
'list_supported_models'
]
def get_model(name: str):
name = name.lower()
try:
if BACKEND == 'pytorch':
return import_module('.Models', 'VSR.Backend.Torch').get_model(name)
elif BACKEND == 'tensorflow':
return import_module('.Models', 'VSR.Backend.TF').get_model(name)
elif BACKEND == 'tensorflow2':
pass
except (KeyError, ImportError):
raise ImportError(f"Using {BACKEND}, can't find model {name}.")
def list_supported_models():
if BACKEND == 'pytorch':
return import_module('.Models', 'VSR.Backend.Torch').list_supported_models()
elif BACKEND == 'tensorflow':
return import_module('.Models', 'VSR.Backend.TF').list_supported_models()
elif BACKEND == 'tensorflow2':
pass
| true | true |
f713cdf606aeb3b1f7bc76ca741473444b804ed1 | 9,735 | py | Python | advent_of_code/flood_advent/utils.py | mflood/whimsical | e39865193b232cc8fc280f371a76f7ac7d07d782 | [
"Apache-2.0"
] | null | null | null | advent_of_code/flood_advent/utils.py | mflood/whimsical | e39865193b232cc8fc280f371a76f7ac7d07d782 | [
"Apache-2.0"
] | null | null | null | advent_of_code/flood_advent/utils.py | mflood/whimsical | e39865193b232cc8fc280f371a76f7ac7d07d782 | [
"Apache-2.0"
] | null | null | null |
import argparse
import re
import logging
import requests
from typing import Iterator
from typing import List
LOGGER_NAME="advent"
def init_logging(is_verbose: bool):
"""
Creates standard logging for the logger_name passed in
"""
logger = logging.getLogger(LOGGER_NAME)
logger.setLevel(logging.DEBUG)
channel = logging.StreamHandler()
if is_verbose:
channel.setLevel(logging.DEBUG)
else:
channel.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s %(filename)s:%(lineno)s: %(message)s')
channel.setFormatter(formatter)
logger.addHandler(channel)
logger.debug("Initialized logging")
return logger
def get_input_from_file(filepath: str) -> Iterator[str]:
"""
Read the file from the internet and return
an iterator of each line as a string
"""
logger = logging.getLogger(LOGGER_NAME)
logger.info("Reading input from %s", filepath)
with open(filepath, "r") as handle:
for line in handle:
line = line.strip()
yield line
def read_comma_separated_values(filepath: str) -> Iterator[str]:
logger = logging.getLogger(LOGGER_NAME)
logger.info("Reading comma separated values from %s", filepath)
return_list = []
for line in get_input_from_file(filepath=filepath):
line = line.strip()
values = line.split(",")
for value in values:
v = value.strip()
if v:
yield v
def read_comma_separated_ints(filepath: str) -> Iterator[int]:
values = read_comma_separated_values(filepath=filepath)
for item in values:
yield int(item)
def get_integers_from_file(filepath: str) -> Iterator[int]:
"""
Read the file from the internet and return
an iterator of each line as a string
"""
logger = logging.getLogger(LOGGER_NAME)
logger.info("Reading integers from %s", filepath)
for line in get_input_from_file(filepath=filepath):
yield int(line)
def line_to_parts(line) -> dict:
"""
#123 @ 3,2: 5x4
ID: #123
from_left: 3
from_top: 2
width: 5
height: 4
"""
m = re.match(r"(\w+) (\w+)", "Isaac Newton, physicist")
m = re.match(r"#(\d+) @ (\d+),(\d+): (\d+)x(\d+)", line)
return_dict = {
"id": m.group(1),
"from_left": m.group(2),
"from_top": m.group(3),
"width": m.group(4),
"height": m.group(5),
}
return return_dict
def binary_list_to_int(binary_list: List[str]) -> int:
"""
convert
["1", "0", "1"]
or [1, 0, 1]
to 5
"""
# convert to strings
binary_list = [str(x) for x in binary_list]
as_string = "".join(binary_list)
as_int= int(as_string, 2)
return as_int
class SparseGrid():
def __init__(self):
self.cell_dict = {}
self.max_x = 0
self.max_y = 0
self._logger = logging.getLogger(LOGGER_NAME)
def add_line(self, x1, y1, x2, y2, value, only_horizontal):
# only horizontal and vertical lines
if not (x1 == x2 or y1 == y2) and only_horizontal:
self._logger.debug("Not a horizontal or vertical line")
return
min_x = min(x1, x2)
max_x = max(x1, x2)
min_y = min(y1, y2)
max_y = max(y1, y2)
# Adjust sparse grid metadata for printing the grid
if max_x > self.max_x:
self.max_x = max_x
self._logger.debug("Adjusting sparse grid max_x to %s", self.max_x)
if max_y > self.max_y:
self.max_y = max_y
self._logger.debug("Adjusting sparse grid max_y to %s", self.max_y)
# use range to get coordinates
x_list = list(range(min_x, max_x + 1))
y_list = list(range(min_y, max_y + 1))
# reverse the range if needed
if x1 < x2:
x_list.reverse()
if y1 < y2:
y_list.reverse()
# for vertical lines, duplicate the x value
# for each coordinate so zip works
if len(y_list) == 1:
for x in range(len(x_list)-1):
y_list.append(y_list[0])
# for horizontal lines, duplicate the y value
# for each coordinate so zip works
if len(x_list) == 1:
for y in range(len(y_list)-1):
x_list.append(x_list[0])
cells = list(zip(x_list, y_list))
for x, y in cells:
coordinate = f"{x}:{y}"
self.cell_dict.setdefault(coordinate, [])
self.cell_dict[coordinate].append(value)
def add_block(self, from_left_x: int, from_top_y: int, width: int, height: int, value: str):
if from_left_x + width > self.max_x:
self.max_x = from_left_x + width
print(f"Adjusting width to {self.max_x}")
if from_top_y + height > self.max_y:
self.max_y = from_top_y + height
print(f"Adjusting height to {self.max_y}")
for x in range(width):
for y in range(height):
true_x = x + from_left_x
true_y = y + from_top_y
coordinate = f"{true_x}:{true_y}"
self.cell_dict.setdefault(coordinate, [])
self.cell_dict[coordinate].append(value)
def evaluate_block(self, from_left_x: int, from_top_y: int, width: int, height: int, value: str) -> bool:
"""
One the SparseGrid is populated, evaluate a block to
see if it does not overlap with any other block
"""
for x in range(width):
for y in range(height):
true_x = x + from_left_x
true_y = y + from_top_y
coordinate = f"{true_x}:{true_y}"
array = self.cell_dict[coordinate]
if len(array) != 1:
return False
return True
def get_num_overlapping_cells(self):
"""
return the number of cells with arrays
with more than one element
"""
num = 0
for coord, array in self.cell_dict.items():
if len(array) > 1:
num += 1
return num
def print(self):
for y in range(self.max_y + 1):
for x in range(self.max_x + 1):
coordinate = f"{x}:{y}"
array = self.cell_dict.get(coordinate)
if not array:
print(".", end='')
else:
print(len(array), end='')
print("")
def parse_args(argv=None):
"""
Parse command line args
"""
parser = argparse.ArgumentParser(description="Main Driver for Frivenmeld")
parser.add_argument('-v',
action="store_true",
dest="verbose",
required=False,
help="Debug output")
parser.add_argument('-t',
action="store_true",
dest="use_test_data",
required=False,
help="Use test data")
parser.add_argument('-d',
action="store_true",
dest="print_data",
required=False,
help="Just print out the data")
parser.add_argument("-yd",
dest="year_day",
help="YYYYDD the date to process data for")
results = parser.parse_args(argv)
return results
class Input():
def __init__(self, year: int, day: int, use_test_data: bool):
self._year = year
self._day = day
self._use_test_data = use_test_data
self._logger = logging.getLogger(LOGGER_NAME)
self._logger.info("Input year: %d day: %d test-data: %s", self._year, self._day, self._use_test_data)
def get_filepath(self):
if self._use_test_data:
return f"data/{self._year}/day/{self._day}/test-input.txt"
else:
return f"data/{self._year}/day/{self._day}/input.txt"
def get_raw(self) -> str:
filepath = self.get_filepath()
self._logger.info("Reading raw data from '%s'", filepath)
with open(filepath, "r") as handle:
data = handle.read()
return data
def get_lines(self) -> Iterator[str]:
filepath = self.get_filepath()
self._logger.info("Reading lines from '%s'", filepath)
with open(filepath, "r") as handle:
for line in handle:
line = line.strip()
yield line
def get_chars(self) -> List[str]:
"""
return all the characters in the file as a list
asd
fro
['a', 's', 'd', 'f', 'r', 'o']
"""
for line in self.get_lines():
for char in line:
yield char
def get_ints(self) -> Iterator[int]:
for line in self.get_lines():
yield int(line)
def get_floats(self) -> Iterator[int]:
for line in self.get_lines():
yield float(line)
def get_comma_separated_values(self) -> Iterator[str]:
"""
note: skips empty values
"""
for line in self.get_lines():
line = line.strip()
values = line.split(",")
for value in values:
v = value.strip()
if v:
yield v
def get_comma_separated_ints(self) -> Iterator[int]:
values = self.get_comma_separated_values()
for item in values:
yield int(item)
| 29.5 | 110 | 0.543606 |
import argparse
import re
import logging
import requests
from typing import Iterator
from typing import List
LOGGER_NAME="advent"
def init_logging(is_verbose: bool):
logger = logging.getLogger(LOGGER_NAME)
logger.setLevel(logging.DEBUG)
channel = logging.StreamHandler()
if is_verbose:
channel.setLevel(logging.DEBUG)
else:
channel.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s %(filename)s:%(lineno)s: %(message)s')
channel.setFormatter(formatter)
logger.addHandler(channel)
logger.debug("Initialized logging")
return logger
def get_input_from_file(filepath: str) -> Iterator[str]:
logger = logging.getLogger(LOGGER_NAME)
logger.info("Reading input from %s", filepath)
with open(filepath, "r") as handle:
for line in handle:
line = line.strip()
yield line
def read_comma_separated_values(filepath: str) -> Iterator[str]:
logger = logging.getLogger(LOGGER_NAME)
logger.info("Reading comma separated values from %s", filepath)
return_list = []
for line in get_input_from_file(filepath=filepath):
line = line.strip()
values = line.split(",")
for value in values:
v = value.strip()
if v:
yield v
def read_comma_separated_ints(filepath: str) -> Iterator[int]:
values = read_comma_separated_values(filepath=filepath)
for item in values:
yield int(item)
def get_integers_from_file(filepath: str) -> Iterator[int]:
logger = logging.getLogger(LOGGER_NAME)
logger.info("Reading integers from %s", filepath)
for line in get_input_from_file(filepath=filepath):
yield int(line)
def line_to_parts(line) -> dict:
m = re.match(r"(\w+) (\w+)", "Isaac Newton, physicist")
m = re.match(r"#(\d+) @ (\d+),(\d+): (\d+)x(\d+)", line)
return_dict = {
"id": m.group(1),
"from_left": m.group(2),
"from_top": m.group(3),
"width": m.group(4),
"height": m.group(5),
}
return return_dict
def binary_list_to_int(binary_list: List[str]) -> int:
binary_list = [str(x) for x in binary_list]
as_string = "".join(binary_list)
as_int= int(as_string, 2)
return as_int
class SparseGrid():
def __init__(self):
self.cell_dict = {}
self.max_x = 0
self.max_y = 0
self._logger = logging.getLogger(LOGGER_NAME)
def add_line(self, x1, y1, x2, y2, value, only_horizontal):
if not (x1 == x2 or y1 == y2) and only_horizontal:
self._logger.debug("Not a horizontal or vertical line")
return
min_x = min(x1, x2)
max_x = max(x1, x2)
min_y = min(y1, y2)
max_y = max(y1, y2)
if max_x > self.max_x:
self.max_x = max_x
self._logger.debug("Adjusting sparse grid max_x to %s", self.max_x)
if max_y > self.max_y:
self.max_y = max_y
self._logger.debug("Adjusting sparse grid max_y to %s", self.max_y)
x_list = list(range(min_x, max_x + 1))
y_list = list(range(min_y, max_y + 1))
if x1 < x2:
x_list.reverse()
if y1 < y2:
y_list.reverse()
if len(y_list) == 1:
for x in range(len(x_list)-1):
y_list.append(y_list[0])
if len(x_list) == 1:
for y in range(len(y_list)-1):
x_list.append(x_list[0])
cells = list(zip(x_list, y_list))
for x, y in cells:
coordinate = f"{x}:{y}"
self.cell_dict.setdefault(coordinate, [])
self.cell_dict[coordinate].append(value)
def add_block(self, from_left_x: int, from_top_y: int, width: int, height: int, value: str):
if from_left_x + width > self.max_x:
self.max_x = from_left_x + width
print(f"Adjusting width to {self.max_x}")
if from_top_y + height > self.max_y:
self.max_y = from_top_y + height
print(f"Adjusting height to {self.max_y}")
for x in range(width):
for y in range(height):
true_x = x + from_left_x
true_y = y + from_top_y
coordinate = f"{true_x}:{true_y}"
self.cell_dict.setdefault(coordinate, [])
self.cell_dict[coordinate].append(value)
def evaluate_block(self, from_left_x: int, from_top_y: int, width: int, height: int, value: str) -> bool:
for x in range(width):
for y in range(height):
true_x = x + from_left_x
true_y = y + from_top_y
coordinate = f"{true_x}:{true_y}"
array = self.cell_dict[coordinate]
if len(array) != 1:
return False
return True
def get_num_overlapping_cells(self):
num = 0
for coord, array in self.cell_dict.items():
if len(array) > 1:
num += 1
return num
def print(self):
for y in range(self.max_y + 1):
for x in range(self.max_x + 1):
coordinate = f"{x}:{y}"
array = self.cell_dict.get(coordinate)
if not array:
print(".", end='')
else:
print(len(array), end='')
print("")
def parse_args(argv=None):
parser = argparse.ArgumentParser(description="Main Driver for Frivenmeld")
parser.add_argument('-v',
action="store_true",
dest="verbose",
required=False,
help="Debug output")
parser.add_argument('-t',
action="store_true",
dest="use_test_data",
required=False,
help="Use test data")
parser.add_argument('-d',
action="store_true",
dest="print_data",
required=False,
help="Just print out the data")
parser.add_argument("-yd",
dest="year_day",
help="YYYYDD the date to process data for")
results = parser.parse_args(argv)
return results
class Input():
def __init__(self, year: int, day: int, use_test_data: bool):
self._year = year
self._day = day
self._use_test_data = use_test_data
self._logger = logging.getLogger(LOGGER_NAME)
self._logger.info("Input year: %d day: %d test-data: %s", self._year, self._day, self._use_test_data)
def get_filepath(self):
if self._use_test_data:
return f"data/{self._year}/day/{self._day}/test-input.txt"
else:
return f"data/{self._year}/day/{self._day}/input.txt"
def get_raw(self) -> str:
filepath = self.get_filepath()
self._logger.info("Reading raw data from '%s'", filepath)
with open(filepath, "r") as handle:
data = handle.read()
return data
def get_lines(self) -> Iterator[str]:
filepath = self.get_filepath()
self._logger.info("Reading lines from '%s'", filepath)
with open(filepath, "r") as handle:
for line in handle:
line = line.strip()
yield line
def get_chars(self) -> List[str]:
for line in self.get_lines():
for char in line:
yield char
def get_ints(self) -> Iterator[int]:
for line in self.get_lines():
yield int(line)
def get_floats(self) -> Iterator[int]:
for line in self.get_lines():
yield float(line)
def get_comma_separated_values(self) -> Iterator[str]:
for line in self.get_lines():
line = line.strip()
values = line.split(",")
for value in values:
v = value.strip()
if v:
yield v
def get_comma_separated_ints(self) -> Iterator[int]:
values = self.get_comma_separated_values()
for item in values:
yield int(item)
| true | true |
f713ce651c7de767cc25c8ccf653e5662e9a0c44 | 1,924 | py | Python | discooord/extras/examples/email.py | TehBigA/discooord | 9a290686664a0c8636958058e28d4115f45c1cc3 | [
"MIT"
] | null | null | null | discooord/extras/examples/email.py | TehBigA/discooord | 9a290686664a0c8636958058e28d4115f45c1cc3 | [
"MIT"
] | 1 | 2016-12-15T02:03:58.000Z | 2016-12-15T02:07:42.000Z | discooord/extras/examples/email.py | TehBigA/discooord | 9a290686664a0c8636958058e28d4115f45c1cc3 | [
"MIT"
] | null | null | null | import poplib
from ...utils import Timer
class EmailChecker(Timer):
'''WARNING: This uses POP3 and by default deletes the emails it reads!'''
username = None
password = None
server = None
port = None
on_mail = None
delete = None
def __init__(self, username, password, server, port=110, on_mail=None, delete=True, interval=60):
super(EmailChecker, self).__init__(interval=interval)
self.username = username
self.password = password
self.server = server
self.port = port
if on_mail is not None:
self.on_mail = on_mail
self.delete = delete
def target(self):
client = poplib.POP3(self.server, self.port)
client.user(self.username)
client.pass_(self.password)
count = client.stat()[0]
for i in range(count):
email = client.retr(i + 1)
data = [l.decode('utf-8') for l in email[1]]
sep = data.index(u'')
headers = {}
body = u''
# Headers
last = None
for line in data[:sep]:
if line[0] in (u' ', u'\t', u'\r', u'\n') and last is not None:
# Folded header continuation
headers[last] += line
else:
# Next header
name_separator = line.index(u':')
name = line[:name_separator]
value = line[name_separator + 2:]
headers[name] = value
last = name
# Body
body = u''.join(data[sep + 1:])
if self.on_mail(headers, body) or self.delete:
client.dele(i + 1)
client.quit()
def on_exception(self):
'''Sometimes the mail server doesn't respond in time, ignore the produced error and keep running.'''
return False
| 26 | 108 | 0.519751 | import poplib
from ...utils import Timer
class EmailChecker(Timer):
username = None
password = None
server = None
port = None
on_mail = None
delete = None
def __init__(self, username, password, server, port=110, on_mail=None, delete=True, interval=60):
super(EmailChecker, self).__init__(interval=interval)
self.username = username
self.password = password
self.server = server
self.port = port
if on_mail is not None:
self.on_mail = on_mail
self.delete = delete
def target(self):
client = poplib.POP3(self.server, self.port)
client.user(self.username)
client.pass_(self.password)
count = client.stat()[0]
for i in range(count):
email = client.retr(i + 1)
data = [l.decode('utf-8') for l in email[1]]
sep = data.index(u'')
headers = {}
body = u''
last = None
for line in data[:sep]:
if line[0] in (u' ', u'\t', u'\r', u'\n') and last is not None:
headers[last] += line
else:
name_separator = line.index(u':')
name = line[:name_separator]
value = line[name_separator + 2:]
headers[name] = value
last = name
body = u''.join(data[sep + 1:])
if self.on_mail(headers, body) or self.delete:
client.dele(i + 1)
client.quit()
def on_exception(self):
return False
| true | true |
f713d06ec761c4d741cb3e1577bfbd581ebd687d | 439 | py | Python | python_roms_modules/calc_etat.py | NoeLahaye/InTideScat_JGR | 6849e82b3cda816ca7bdc6ab207e2c857a3f5f5f | [
"CC0-1.0"
] | null | null | null | python_roms_modules/calc_etat.py | NoeLahaye/InTideScat_JGR | 6849e82b3cda816ca7bdc6ab207e2c857a3f5f5f | [
"CC0-1.0"
] | null | null | null | python_roms_modules/calc_etat.py | NoeLahaye/InTideScat_JGR | 6849e82b3cda816ca7bdc6ab207e2c857a3f5f5f | [
"CC0-1.0"
] | null | null | null | from __future__ import print_function, division
import numpy as np
from R_tools import rho2u,rho2v
def calc_etat(ubar,vbar,hflow,pm,pn):
''' compute divergence of barotropic momentum (units m/h)
arrays are (x,y) ordered -- hflow is full column depth (SSE-z_bot)'''
return -( np.diff(rho2u(hflow/pn)*ubar,axis=0)[:,1:-1] \
+ np.diff(rho2v(hflow/pm)*vbar,axis=1)[1:-1,:] )\
* pm[1:-1,1:-1]*pn[1:-1,1:-1]*3600.
| 36.583333 | 73 | 0.649203 | from __future__ import print_function, division
import numpy as np
from R_tools import rho2u,rho2v
def calc_etat(ubar,vbar,hflow,pm,pn):
return -( np.diff(rho2u(hflow/pn)*ubar,axis=0)[:,1:-1] \
+ np.diff(rho2v(hflow/pm)*vbar,axis=1)[1:-1,:] )\
* pm[1:-1,1:-1]*pn[1:-1,1:-1]*3600.
| true | true |
f713d171e7f9310b01643e43fb15f40f64caac4e | 172 | py | Python | starter/__init__.py | sjdillon/starter | 6ed51098008c5ae18d1f809dddf81aa3de3e8ce2 | [
"MIT"
] | null | null | null | starter/__init__.py | sjdillon/starter | 6ed51098008c5ae18d1f809dddf81aa3de3e8ce2 | [
"MIT"
] | null | null | null | starter/__init__.py | sjdillon/starter | 6ed51098008c5ae18d1f809dddf81aa3de3e8ce2 | [
"MIT"
] | null | null | null | from .starter_class import StarterClass
from .boto_manager import BotoClientManager
from .config import _CONFIG
__all__ = ['BotoClientManager', 'StarterClass', '_CONFIG']
| 28.666667 | 58 | 0.813953 | from .starter_class import StarterClass
from .boto_manager import BotoClientManager
from .config import _CONFIG
__all__ = ['BotoClientManager', 'StarterClass', '_CONFIG']
| true | true |
f713d17923015c16f0e5cb322bcc69119cb35958 | 8,925 | py | Python | beaconrunner/eth2spec/test/phase0/block_processing/test_process_voluntary_exit.py | casparschwa/beaconrunner | d5430e08b120462beea19f65a4cf335ec9eb9134 | [
"MIT"
] | 5 | 2021-04-08T23:17:19.000Z | 2021-11-02T18:24:37.000Z | tests/core/pyspec/eth2spec/test/phase0/block_processing/test_process_voluntary_exit.py | sthagen/eth2.0-specs | 27b0d1f32e4ce430dd13b447c273a0f64b637066 | [
"CC0-1.0"
] | 127 | 2021-04-09T21:15:08.000Z | 2021-10-19T03:58:26.000Z | tests/core/pyspec/eth2spec/test/phase0/block_processing/test_process_voluntary_exit.py | sthagen/eth2.0-specs | 27b0d1f32e4ce430dd13b447c273a0f64b637066 | [
"CC0-1.0"
] | 1 | 2022-03-06T06:08:17.000Z | 2022-03-06T06:08:17.000Z | from eth2spec.test.context import spec_state_test, expect_assertion_error, always_bls, with_all_phases
from eth2spec.test.helpers.keys import pubkey_to_privkey
from eth2spec.test.helpers.voluntary_exits import sign_voluntary_exit
def run_voluntary_exit_processing(spec, state, signed_voluntary_exit, valid=True):
"""
Run ``process_voluntary_exit``, yielding:
- pre-state ('pre')
- voluntary_exit ('voluntary_exit')
- post-state ('post').
If ``valid == False``, run expecting ``AssertionError``
"""
validator_index = signed_voluntary_exit.message.validator_index
yield 'pre', state
yield 'voluntary_exit', signed_voluntary_exit
if not valid:
expect_assertion_error(lambda: spec.process_voluntary_exit(state, signed_voluntary_exit))
yield 'post', None
return
pre_exit_epoch = state.validators[validator_index].exit_epoch
spec.process_voluntary_exit(state, signed_voluntary_exit)
yield 'post', state
assert pre_exit_epoch == spec.FAR_FUTURE_EPOCH
assert state.validators[validator_index].exit_epoch < spec.FAR_FUTURE_EPOCH
@with_all_phases
@spec_state_test
def test_success(spec, state):
# move state forward SHARD_COMMITTEE_PERIOD epochs to allow for exit
state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
current_epoch = spec.get_current_epoch(state)
validator_index = spec.get_active_validator_indices(state, current_epoch)[0]
privkey = pubkey_to_privkey[state.validators[validator_index].pubkey]
signed_voluntary_exit = sign_voluntary_exit(
spec, state, spec.VoluntaryExit(epoch=current_epoch, validator_index=validator_index), privkey)
yield from run_voluntary_exit_processing(spec, state, signed_voluntary_exit)
assert state.validators[validator_index].exit_epoch == spec.compute_activation_exit_epoch(current_epoch)
@with_all_phases
@spec_state_test
@always_bls
def test_invalid_signature(spec, state):
# move state forward SHARD_COMMITTEE_PERIOD epochs to allow for exit
state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
current_epoch = spec.get_current_epoch(state)
validator_index = spec.get_active_validator_indices(state, current_epoch)[0]
voluntary_exit = spec.VoluntaryExit(
epoch=current_epoch,
validator_index=validator_index,
)
signed_voluntary_exit = sign_voluntary_exit(spec, state, voluntary_exit, 12345)
yield from run_voluntary_exit_processing(spec, state, signed_voluntary_exit, False)
@with_all_phases
@spec_state_test
def test_success_exit_queue(spec, state):
# move state forward SHARD_COMMITTEE_PERIOD epochs to allow for exit
state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
current_epoch = spec.get_current_epoch(state)
# exit `MAX_EXITS_PER_EPOCH`
initial_indices = spec.get_active_validator_indices(state, current_epoch)[:spec.get_validator_churn_limit(state)]
# Prepare a bunch of exits, based on the current state
exit_queue = []
for index in initial_indices:
privkey = pubkey_to_privkey[state.validators[index].pubkey]
signed_voluntary_exit = sign_voluntary_exit(
spec, state, spec.VoluntaryExit(epoch=current_epoch, validator_index=index), privkey)
exit_queue.append(signed_voluntary_exit)
# Now run all the exits
for voluntary_exit in exit_queue:
# the function yields data, but we are just interested in running it here, ignore yields.
for _ in run_voluntary_exit_processing(spec, state, voluntary_exit):
continue
# exit an additional validator
validator_index = spec.get_active_validator_indices(state, current_epoch)[-1]
privkey = pubkey_to_privkey[state.validators[validator_index].pubkey]
signed_voluntary_exit = sign_voluntary_exit(
spec, state, spec.VoluntaryExit(epoch=current_epoch, validator_index=validator_index), privkey)
# This is the interesting part of the test: on a pre-state with a full exit queue,
# when processing an additional exit, it results in an exit in a later epoch
yield from run_voluntary_exit_processing(spec, state, signed_voluntary_exit)
assert (
state.validators[validator_index].exit_epoch ==
state.validators[initial_indices[0]].exit_epoch + 1
)
@with_all_phases
@spec_state_test
def test_default_exit_epoch_subsequent_exit(spec, state):
# move state forward SHARD_COMMITTEE_PERIOD epochs to allow for exit
state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
current_epoch = spec.get_current_epoch(state)
validator_index = spec.get_active_validator_indices(state, current_epoch)[0]
privkey = pubkey_to_privkey[state.validators[validator_index].pubkey]
signed_voluntary_exit = sign_voluntary_exit(
spec, state, spec.VoluntaryExit(epoch=current_epoch, validator_index=validator_index), privkey)
# Exit one validator prior to this new one
exited_index = spec.get_active_validator_indices(state, current_epoch)[-1]
state.validators[exited_index].exit_epoch = current_epoch - 1
yield from run_voluntary_exit_processing(spec, state, signed_voluntary_exit)
assert state.validators[validator_index].exit_epoch == spec.compute_activation_exit_epoch(current_epoch)
@with_all_phases
@spec_state_test
def test_validator_exit_in_future(spec, state):
# move state forward SHARD_COMMITTEE_PERIOD epochs to allow for exit
state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
current_epoch = spec.get_current_epoch(state)
validator_index = spec.get_active_validator_indices(state, current_epoch)[0]
privkey = pubkey_to_privkey[state.validators[validator_index].pubkey]
voluntary_exit = spec.VoluntaryExit(
epoch=current_epoch + 1,
validator_index=validator_index,
)
signed_voluntary_exit = sign_voluntary_exit(spec, state, voluntary_exit, privkey)
yield from run_voluntary_exit_processing(spec, state, signed_voluntary_exit, False)
@with_all_phases
@spec_state_test
def test_validator_invalid_validator_index(spec, state):
# move state forward SHARD_COMMITTEE_PERIOD epochs to allow for exit
state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
current_epoch = spec.get_current_epoch(state)
validator_index = spec.get_active_validator_indices(state, current_epoch)[0]
privkey = pubkey_to_privkey[state.validators[validator_index].pubkey]
voluntary_exit = spec.VoluntaryExit(
epoch=current_epoch,
validator_index=len(state.validators),
)
signed_voluntary_exit = sign_voluntary_exit(spec, state, voluntary_exit, privkey)
yield from run_voluntary_exit_processing(spec, state, signed_voluntary_exit, False)
@with_all_phases
@spec_state_test
def test_validator_not_active(spec, state):
current_epoch = spec.get_current_epoch(state)
validator_index = spec.get_active_validator_indices(state, current_epoch)[0]
privkey = pubkey_to_privkey[state.validators[validator_index].pubkey]
state.validators[validator_index].activation_epoch = spec.FAR_FUTURE_EPOCH
signed_voluntary_exit = sign_voluntary_exit(
spec, state, spec.VoluntaryExit(epoch=current_epoch, validator_index=validator_index), privkey)
yield from run_voluntary_exit_processing(spec, state, signed_voluntary_exit, False)
@with_all_phases
@spec_state_test
def test_validator_already_exited(spec, state):
# move state forward SHARD_COMMITTEE_PERIOD epochs to allow validator able to exit
state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
current_epoch = spec.get_current_epoch(state)
validator_index = spec.get_active_validator_indices(state, current_epoch)[0]
privkey = pubkey_to_privkey[state.validators[validator_index].pubkey]
# but validator already has exited
state.validators[validator_index].exit_epoch = current_epoch + 2
signed_voluntary_exit = sign_voluntary_exit(
spec, state, spec.VoluntaryExit(epoch=current_epoch, validator_index=validator_index), privkey)
yield from run_voluntary_exit_processing(spec, state, signed_voluntary_exit, False)
@with_all_phases
@spec_state_test
def test_validator_not_active_long_enough(spec, state):
current_epoch = spec.get_current_epoch(state)
validator_index = spec.get_active_validator_indices(state, current_epoch)[0]
privkey = pubkey_to_privkey[state.validators[validator_index].pubkey]
signed_voluntary_exit = sign_voluntary_exit(
spec, state, spec.VoluntaryExit(epoch=current_epoch, validator_index=validator_index), privkey)
assert (
current_epoch - state.validators[validator_index].activation_epoch <
spec.config.SHARD_COMMITTEE_PERIOD
)
yield from run_voluntary_exit_processing(spec, state, signed_voluntary_exit, False)
| 39.666667 | 117 | 0.779384 | from eth2spec.test.context import spec_state_test, expect_assertion_error, always_bls, with_all_phases
from eth2spec.test.helpers.keys import pubkey_to_privkey
from eth2spec.test.helpers.voluntary_exits import sign_voluntary_exit
def run_voluntary_exit_processing(spec, state, signed_voluntary_exit, valid=True):
validator_index = signed_voluntary_exit.message.validator_index
yield 'pre', state
yield 'voluntary_exit', signed_voluntary_exit
if not valid:
expect_assertion_error(lambda: spec.process_voluntary_exit(state, signed_voluntary_exit))
yield 'post', None
return
pre_exit_epoch = state.validators[validator_index].exit_epoch
spec.process_voluntary_exit(state, signed_voluntary_exit)
yield 'post', state
assert pre_exit_epoch == spec.FAR_FUTURE_EPOCH
assert state.validators[validator_index].exit_epoch < spec.FAR_FUTURE_EPOCH
@with_all_phases
@spec_state_test
def test_success(spec, state):
state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
current_epoch = spec.get_current_epoch(state)
validator_index = spec.get_active_validator_indices(state, current_epoch)[0]
privkey = pubkey_to_privkey[state.validators[validator_index].pubkey]
signed_voluntary_exit = sign_voluntary_exit(
spec, state, spec.VoluntaryExit(epoch=current_epoch, validator_index=validator_index), privkey)
yield from run_voluntary_exit_processing(spec, state, signed_voluntary_exit)
assert state.validators[validator_index].exit_epoch == spec.compute_activation_exit_epoch(current_epoch)
@with_all_phases
@spec_state_test
@always_bls
def test_invalid_signature(spec, state):
state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
current_epoch = spec.get_current_epoch(state)
validator_index = spec.get_active_validator_indices(state, current_epoch)[0]
voluntary_exit = spec.VoluntaryExit(
epoch=current_epoch,
validator_index=validator_index,
)
signed_voluntary_exit = sign_voluntary_exit(spec, state, voluntary_exit, 12345)
yield from run_voluntary_exit_processing(spec, state, signed_voluntary_exit, False)
@with_all_phases
@spec_state_test
def test_success_exit_queue(spec, state):
state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
current_epoch = spec.get_current_epoch(state)
initial_indices = spec.get_active_validator_indices(state, current_epoch)[:spec.get_validator_churn_limit(state)]
exit_queue = []
for index in initial_indices:
privkey = pubkey_to_privkey[state.validators[index].pubkey]
signed_voluntary_exit = sign_voluntary_exit(
spec, state, spec.VoluntaryExit(epoch=current_epoch, validator_index=index), privkey)
exit_queue.append(signed_voluntary_exit)
for voluntary_exit in exit_queue:
for _ in run_voluntary_exit_processing(spec, state, voluntary_exit):
continue
validator_index = spec.get_active_validator_indices(state, current_epoch)[-1]
privkey = pubkey_to_privkey[state.validators[validator_index].pubkey]
signed_voluntary_exit = sign_voluntary_exit(
spec, state, spec.VoluntaryExit(epoch=current_epoch, validator_index=validator_index), privkey)
yield from run_voluntary_exit_processing(spec, state, signed_voluntary_exit)
assert (
state.validators[validator_index].exit_epoch ==
state.validators[initial_indices[0]].exit_epoch + 1
)
@with_all_phases
@spec_state_test
def test_default_exit_epoch_subsequent_exit(spec, state):
state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
current_epoch = spec.get_current_epoch(state)
validator_index = spec.get_active_validator_indices(state, current_epoch)[0]
privkey = pubkey_to_privkey[state.validators[validator_index].pubkey]
signed_voluntary_exit = sign_voluntary_exit(
spec, state, spec.VoluntaryExit(epoch=current_epoch, validator_index=validator_index), privkey)
exited_index = spec.get_active_validator_indices(state, current_epoch)[-1]
state.validators[exited_index].exit_epoch = current_epoch - 1
yield from run_voluntary_exit_processing(spec, state, signed_voluntary_exit)
assert state.validators[validator_index].exit_epoch == spec.compute_activation_exit_epoch(current_epoch)
@with_all_phases
@spec_state_test
def test_validator_exit_in_future(spec, state):
state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
current_epoch = spec.get_current_epoch(state)
validator_index = spec.get_active_validator_indices(state, current_epoch)[0]
privkey = pubkey_to_privkey[state.validators[validator_index].pubkey]
voluntary_exit = spec.VoluntaryExit(
epoch=current_epoch + 1,
validator_index=validator_index,
)
signed_voluntary_exit = sign_voluntary_exit(spec, state, voluntary_exit, privkey)
yield from run_voluntary_exit_processing(spec, state, signed_voluntary_exit, False)
@with_all_phases
@spec_state_test
def test_validator_invalid_validator_index(spec, state):
state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
current_epoch = spec.get_current_epoch(state)
validator_index = spec.get_active_validator_indices(state, current_epoch)[0]
privkey = pubkey_to_privkey[state.validators[validator_index].pubkey]
voluntary_exit = spec.VoluntaryExit(
epoch=current_epoch,
validator_index=len(state.validators),
)
signed_voluntary_exit = sign_voluntary_exit(spec, state, voluntary_exit, privkey)
yield from run_voluntary_exit_processing(spec, state, signed_voluntary_exit, False)
@with_all_phases
@spec_state_test
def test_validator_not_active(spec, state):
current_epoch = spec.get_current_epoch(state)
validator_index = spec.get_active_validator_indices(state, current_epoch)[0]
privkey = pubkey_to_privkey[state.validators[validator_index].pubkey]
state.validators[validator_index].activation_epoch = spec.FAR_FUTURE_EPOCH
signed_voluntary_exit = sign_voluntary_exit(
spec, state, spec.VoluntaryExit(epoch=current_epoch, validator_index=validator_index), privkey)
yield from run_voluntary_exit_processing(spec, state, signed_voluntary_exit, False)
@with_all_phases
@spec_state_test
def test_validator_already_exited(spec, state):
state.slot += spec.config.SHARD_COMMITTEE_PERIOD * spec.SLOTS_PER_EPOCH
current_epoch = spec.get_current_epoch(state)
validator_index = spec.get_active_validator_indices(state, current_epoch)[0]
privkey = pubkey_to_privkey[state.validators[validator_index].pubkey]
state.validators[validator_index].exit_epoch = current_epoch + 2
signed_voluntary_exit = sign_voluntary_exit(
spec, state, spec.VoluntaryExit(epoch=current_epoch, validator_index=validator_index), privkey)
yield from run_voluntary_exit_processing(spec, state, signed_voluntary_exit, False)
@with_all_phases
@spec_state_test
def test_validator_not_active_long_enough(spec, state):
current_epoch = spec.get_current_epoch(state)
validator_index = spec.get_active_validator_indices(state, current_epoch)[0]
privkey = pubkey_to_privkey[state.validators[validator_index].pubkey]
signed_voluntary_exit = sign_voluntary_exit(
spec, state, spec.VoluntaryExit(epoch=current_epoch, validator_index=validator_index), privkey)
assert (
current_epoch - state.validators[validator_index].activation_epoch <
spec.config.SHARD_COMMITTEE_PERIOD
)
yield from run_voluntary_exit_processing(spec, state, signed_voluntary_exit, False)
| true | true |
f713d1e1ac8fca799a224fac9038388cc476ff10 | 815 | py | Python | src/personal/migrations/0001_initial.py | shopeyinwale/blog | ade90270f4b23cd11e4529e98ad9c6d78b3eae8e | [
"bzip2-1.0.6"
] | null | null | null | src/personal/migrations/0001_initial.py | shopeyinwale/blog | ade90270f4b23cd11e4529e98ad9c6d78b3eae8e | [
"bzip2-1.0.6"
] | 9 | 2021-03-19T11:55:42.000Z | 2022-02-10T14:35:53.000Z | src/personal/migrations/0001_initial.py | shopeyin5/blog | ade90270f4b23cd11e4529e98ad9c6d78b3eae8e | [
"bzip2-1.0.6"
] | null | null | null | # Generated by Django 2.2.2 on 2019-07-29 19:03
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=60)),
('question', models.TextField(max_length=400)),
('priority', models.CharField(choices=[('H', 'High'), ('M', 'Medium'), ('L', 'Low')], max_length=1)),
],
options={
'verbose_name': 'The Question',
'verbose_name_plural': "People's Question",
},
),
]
| 29.107143 | 117 | 0.537423 |
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=60)),
('question', models.TextField(max_length=400)),
('priority', models.CharField(choices=[('H', 'High'), ('M', 'Medium'), ('L', 'Low')], max_length=1)),
],
options={
'verbose_name': 'The Question',
'verbose_name_plural': "People's Question",
},
),
]
| true | true |
f713d1f837f20b709093ad344a72f4801ea14f31 | 618 | py | Python | Funny_Js_Crack/16-虾米音乐翻页JS破解/taoyizhu.py | qqizai/Func_Js_Crack | 8cc8586107fecace4b71d0519cfbc760584171b1 | [
"MIT"
] | 18 | 2020-12-09T06:49:46.000Z | 2022-01-27T03:20:36.000Z | Funny_Js_Crack/16-虾米音乐翻页JS破解/taoyizhu.py | sumerzhang/Func_Js_Crack | 8cc8586107fecace4b71d0519cfbc760584171b1 | [
"MIT"
] | null | null | null | Funny_Js_Crack/16-虾米音乐翻页JS破解/taoyizhu.py | sumerzhang/Func_Js_Crack | 8cc8586107fecace4b71d0519cfbc760584171b1 | [
"MIT"
] | 9 | 2020-12-20T08:52:09.000Z | 2021-12-19T09:13:09.000Z | import execjs
def get_js_function(js_path, func_name, *func_args):
'''
获取指定目录下的js代码, 并且指定js代码中函数的名字以及函数的参数。
:param js_path: js代码的位置
:param func_name: js代码中函数的名字
:param func_args: js代码中函数的参数
:return: 返回调用js函数的结果
'''
with open(js_path, encoding='utf-8') as fp:
js = fp.read()
ctx = execjs.compile(js)
return ctx.call(func_name, func_args[0], func_args[1])
if __name__ == '__main__':
# 给个star吧
passwd = get_js_function('xiami.js', '_s', "")
print('*'*80)
print(passwd)
print('*'*80)
print('@欢迎Star!')
print('@有问题请联系: scrapy@qq.com') | 24.72 | 62 | 0.621359 | import execjs
def get_js_function(js_path, func_name, *func_args):
with open(js_path, encoding='utf-8') as fp:
js = fp.read()
ctx = execjs.compile(js)
return ctx.call(func_name, func_args[0], func_args[1])
if __name__ == '__main__':
passwd = get_js_function('xiami.js', '_s', "")
print('*'*80)
print(passwd)
print('*'*80)
print('@欢迎Star!')
print('@有问题请联系: scrapy@qq.com') | true | true |
f713d227c0e8d9d2e332c2aa25f7e7796a1d69f9 | 5,727 | py | Python | samples/client/petstore-security-test/python/petstore_api/apis/fake_api.py | zhiyongwang/swagger-codegen | fb3d4e61bb5c7f5ea343d8eac2d0842c2ffe685e | [
"Apache-2.0"
] | 1 | 2019-05-24T14:03:57.000Z | 2019-05-24T14:03:57.000Z | samples/client/petstore-security-test/python/petstore_api/apis/fake_api.py | zhiyongwang/swagger-codegen | fb3d4e61bb5c7f5ea343d8eac2d0842c2ffe685e | [
"Apache-2.0"
] | null | null | null | samples/client/petstore-security-test/python/petstore_api/apis/fake_api.py | zhiyongwang/swagger-codegen | fb3d4e61bb5c7f5ea343d8eac2d0842c2ffe685e | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Swagger Petstore */ ' \" =end -- \\r\\n \\n \\r
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ */ ' \" =end --
OpenAPI spec version: 1.0.0 */ ' \" =end -- \\r\\n \\n \\r
Contact: apiteam@swagger.io */ ' \" =end -- \\r\\n \\n \\r
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class FakeApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def test_code_inject____end__rn_n_r(self, **kwargs):
"""
To test code injection */ ' \" =end -- \\r\\n \\n \\r
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.test_code_inject____end__rn_n_r(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str test_code_inject____end____rn_n_r: To test code injection */ ' \" =end -- \\r\\n \\n \\r
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.test_code_inject____end__rn_n_r_with_http_info(**kwargs)
else:
(data) = self.test_code_inject____end__rn_n_r_with_http_info(**kwargs)
return data
def test_code_inject____end__rn_n_r_with_http_info(self, **kwargs):
"""
To test code injection */ ' \" =end -- \\r\\n \\n \\r
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.test_code_inject____end__rn_n_r_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str test_code_inject____end____rn_n_r: To test code injection */ ' \" =end -- \\r\\n \\n \\r
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['test_code_inject____end____rn_n_r']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_code_inject____end__rn_n_r" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/fake'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
if 'test_code_inject____end____rn_n_r' in params:
form_params.append(('test code inject */ ' " =end -- \r\n \n \r', params['test_code_inject____end____rn_n_r']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', '*/ \" =end -- '])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', '*/ \" =end -- '])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 38.959184 | 184 | 0.572027 |
from __future__ import absolute_import
import sys
import os
import re
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class FakeApi(object):
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def test_code_inject____end__rn_n_r(self, **kwargs):
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.test_code_inject____end__rn_n_r_with_http_info(**kwargs)
else:
(data) = self.test_code_inject____end__rn_n_r_with_http_info(**kwargs)
return data
def test_code_inject____end__rn_n_r_with_http_info(self, **kwargs):
all_params = ['test_code_inject____end____rn_n_r']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_code_inject____end__rn_n_r" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/fake'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
if 'test_code_inject____end____rn_n_r' in params:
form_params.append(('test code inject */ ' " =end -- \r\n \n \r', params['test_code_inject____end____rn_n_r']))
body_params = None
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', '*/ \" =end -- '])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', '*/ \" =end -- '])
auth_settings = []
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| true | true |
f713d2dfa0940a023fa6c459b519fe50dd7f2466 | 407 | py | Python | onem2m/util.py | franjial/ghostm2m | 2e7898761237cb12f4fddd55665b3a15fb84dddc | [
"MIT"
] | null | null | null | onem2m/util.py | franjial/ghostm2m | 2e7898761237cb12f4fddd55665b3a15fb84dddc | [
"MIT"
] | null | null | null | onem2m/util.py | franjial/ghostm2m | 2e7898761237cb12f4fddd55665b3a15fb84dddc | [
"MIT"
] | null | null | null | class Singleton(type):
"""
"""
_instances = {}
def __call__(cls, *args, **kwargs):
"""
Possible changes to the value of the `__init__` argument do not affect
the returned instance.
"""
if cls not in cls._instances:
instance = super().__call__(*args, **kwargs)
cls._instances[cls] = instance
return cls._instances[cls] | 27.133333 | 78 | 0.560197 | class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
instance = super().__call__(*args, **kwargs)
cls._instances[cls] = instance
return cls._instances[cls] | true | true |
f713d2ef2e90d06531edcf6a3efcb4a1542366f0 | 397 | py | Python | to_do_list/to_do_list/asgi.py | sanjaykrishnan/to-do-list-backend | fbc322aac526afc69ba2385a4d503b2e9fd096c0 | [
"MIT"
] | null | null | null | to_do_list/to_do_list/asgi.py | sanjaykrishnan/to-do-list-backend | fbc322aac526afc69ba2385a4d503b2e9fd096c0 | [
"MIT"
] | null | null | null | to_do_list/to_do_list/asgi.py | sanjaykrishnan/to-do-list-backend | fbc322aac526afc69ba2385a4d503b2e9fd096c0 | [
"MIT"
] | null | null | null | """
ASGI config for to_do_list project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'to_do_list.settings')
application = get_asgi_application()
| 23.352941 | 78 | 0.788413 |
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'to_do_list.settings')
application = get_asgi_application()
| true | true |
f713d3498adfa663334e6f44326c17d626d08012 | 8,682 | py | Python | qa/rpc-tests/rawtransactions.py | v1nc0/macclone14.3 | e91fb2566205b5f4e2e1b2384cd93309a24261c4 | [
"MIT"
] | null | null | null | qa/rpc-tests/rawtransactions.py | v1nc0/macclone14.3 | e91fb2566205b5f4e2e1b2384cd93309a24261c4 | [
"MIT"
] | null | null | null | qa/rpc-tests/rawtransactions.py | v1nc0/macclone14.3 | e91fb2566205b5f4e2e1b2384cd93309a24261c4 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2014-2016 The Machinecoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""rawtranscation RPCs QA test.
# Tests the following RPCs:
# - createrawtransaction
# - signrawtransaction
# - sendrawtransaction
# - decoderawtransaction
# - getrawtransaction
"""
from test_framework.test_framework import MachinecoinTestFramework
from test_framework.util import *
# Create one-input, one-output, no-fee transaction:
class RawTransactionsTest(MachinecoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 3
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
#connect to a local machine for debugging
#url = "http://machinecoinrpc:DP6DvqZtqXarpeNWyN3LZTFchCCyCUuHwNF7E8pX99x1@%s:%d" % ('127.0.0.1', 140332)
#proxy = AuthServiceProxy(url)
#proxy.url = url # store URL on proxy for info
#self.nodes.append(proxy)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
self.is_network_split=False
self.sync_all()
def run_test(self):
#prepare some coins for multiple *rawtransaction commands
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(101)
self.sync_all()
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.5)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.0)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),5.0)
self.sync_all()
self.nodes[0].generate(5)
self.sync_all()
#########################################
# sendrawtransaction with missing input #
#########################################
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1}] #won't exists
outputs = { self.nodes[0].getnewaddress() : 4.998 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
rawtx = self.nodes[2].signrawtransaction(rawtx)
try:
rawtx = self.nodes[2].sendrawtransaction(rawtx['hex'])
except JSONRPCException as e:
assert("Missing inputs" in e.error['message'])
else:
assert(False)
#########################
# RAW TX MULTISIG TESTS #
#########################
# 2of2 test
addr1 = self.nodes[2].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[2].validateaddress(addr1)
addr2Obj = self.nodes[2].validateaddress(addr2)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
mSigObjValid = self.nodes[2].validateaddress(mSigObj)
#use balance deltas instead of absolute values
bal = self.nodes[2].getbalance()
# send 1.2 BTC to msig adr
txId = self.nodes[0].sendtoaddress(mSigObj, 1.2)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[2].getbalance(), bal+Decimal('1.20000000')) #node2 has both keys of the 2of2 ms addr., tx should affect the balance
# 2of3 test from different nodes
bal = self.nodes[2].getbalance()
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr3 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[2].validateaddress(addr2)
addr3Obj = self.nodes[2].validateaddress(addr3)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])
mSigObjValid = self.nodes[2].validateaddress(mSigObj)
txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
decTx = self.nodes[0].gettransaction(txId)
rawTx = self.nodes[0].decoderawtransaction(decTx['hex'])
sPK = rawTx['vout'][0]['scriptPubKey']['hex']
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
#THIS IS A INCOMPLETE FEATURE
#NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND COUNT AT BALANCE CALCULATION
assert_equal(self.nodes[2].getbalance(), bal) #for now, assume the funds of a 2of3 multisig tx are not marked as spendable
txDetails = self.nodes[0].gettransaction(txId, True)
rawTx = self.nodes[0].decoderawtransaction(txDetails['hex'])
vout = False
for outpoint in rawTx['vout']:
if outpoint['value'] == Decimal('2.20000000'):
vout = outpoint
break
bal = self.nodes[0].getbalance()
inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex']}]
outputs = { self.nodes[0].getnewaddress() : 2.19 }
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
rawTxPartialSigned = self.nodes[1].signrawtransaction(rawTx, inputs)
assert_equal(rawTxPartialSigned['complete'], False) #node1 only has one key, can't comp. sign the tx
rawTxSigned = self.nodes[2].signrawtransaction(rawTx, inputs)
assert_equal(rawTxSigned['complete'], True) #node2 can sign the tx compl., own two of three keys
self.nodes[2].sendrawtransaction(rawTxSigned['hex'])
rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx
# getrawtransaction tests
# 1. valid parameters - only supply txid
txHash = rawTx["hash"]
assert_equal(self.nodes[0].getrawtransaction(txHash), rawTxSigned['hex'])
# 2. valid parameters - supply txid and 0 for non-verbose
assert_equal(self.nodes[0].getrawtransaction(txHash, 0), rawTxSigned['hex'])
# 3. valid parameters - supply txid and False for non-verbose
assert_equal(self.nodes[0].getrawtransaction(txHash, False), rawTxSigned['hex'])
# 4. valid parameters - supply txid and 1 for verbose.
# We only check the "hex" field of the output so we don't need to update this test every time the output format changes.
assert_equal(self.nodes[0].getrawtransaction(txHash, 1)["hex"], rawTxSigned['hex'])
# 5. valid parameters - supply txid and True for non-verbose
assert_equal(self.nodes[0].getrawtransaction(txHash, True)["hex"], rawTxSigned['hex'])
# 6. invalid parameters - supply txid and string "Flase"
assert_raises(JSONRPCException, self.nodes[0].getrawtransaction, txHash, "Flase")
# 7. invalid parameters - supply txid and empty array
assert_raises(JSONRPCException, self.nodes[0].getrawtransaction, txHash, [])
# 8. invalid parameters - supply txid and empty dict
assert_raises(JSONRPCException, self.nodes[0].getrawtransaction, txHash, {})
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 1000}]
outputs = { self.nodes[0].getnewaddress() : 1 }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
decrawtx= self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['vin'][0]['sequence'], 1000)
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : -1}]
outputs = { self.nodes[0].getnewaddress() : 1 }
assert_raises(JSONRPCException, self.nodes[0].createrawtransaction, inputs, outputs)
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967296}]
outputs = { self.nodes[0].getnewaddress() : 1 }
assert_raises(JSONRPCException, self.nodes[0].createrawtransaction, inputs, outputs)
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967294}]
outputs = { self.nodes[0].getnewaddress() : 1 }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
decrawtx= self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['vin'][0]['sequence'], 4294967294)
if __name__ == '__main__':
RawTransactionsTest().main()
| 44.752577 | 147 | 0.644782 |
from test_framework.test_framework import MachinecoinTestFramework
from test_framework.util import *
class RawTransactionsTest(MachinecoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 3
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
des_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
self.is_network_split=False
self.sync_all()
def run_test(self):
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(101)
self.sync_all()
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.5)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.0)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),5.0)
self.sync_all()
self.nodes[0].generate(5)
self.sync_all()
lf.nodes[2].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[2].validateaddress(addr2)
addr3Obj = self.nodes[2].validateaddress(addr3)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])
mSigObjValid = self.nodes[2].validateaddress(mSigObj)
txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
decTx = self.nodes[0].gettransaction(txId)
rawTx = self.nodes[0].decoderawtransaction(decTx['hex'])
sPK = rawTx['vout'][0]['scriptPubKey']['hex']
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
#THIS IS A INCOMPLETE FEATURE
#NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND COUNT AT BALANCE CALCULATION
assert_equal(self.nodes[2].getbalance(), bal) #for now, assume the funds of a 2of3 multisig tx are not marked as spendable
txDetails = self.nodes[0].gettransaction(txId, True)
rawTx = self.nodes[0].decoderawtransaction(txDetails['hex'])
vout = False
for outpoint in rawTx['vout']:
if outpoint['value'] == Decimal('2.20000000'):
vout = outpoint
break
bal = self.nodes[0].getbalance()
inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex']}]
outputs = { self.nodes[0].getnewaddress() : 2.19 }
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
rawTxPartialSigned = self.nodes[1].signrawtransaction(rawTx, inputs)
assert_equal(rawTxPartialSigned['complete'], False) #node1 only has one key, can't comp. sign the tx
rawTxSigned = self.nodes[2].signrawtransaction(rawTx, inputs)
assert_equal(rawTxSigned['complete'], True)
self.nodes[2].sendrawtransaction(rawTxSigned['hex'])
rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000'))
txHash = rawTx["hash"]
assert_equal(self.nodes[0].getrawtransaction(txHash), rawTxSigned['hex'])
assert_equal(self.nodes[0].getrawtransaction(txHash, 0), rawTxSigned['hex'])
assert_equal(self.nodes[0].getrawtransaction(txHash, False), rawTxSigned['hex'])
assert_equal(self.nodes[0].getrawtransaction(txHash, 1)["hex"], rawTxSigned['hex'])
# 5. valid parameters - supply txid and True for non-verbose
assert_equal(self.nodes[0].getrawtransaction(txHash, True)["hex"], rawTxSigned['hex'])
# 6. invalid parameters - supply txid and string "Flase"
assert_raises(JSONRPCException, self.nodes[0].getrawtransaction, txHash, "Flase")
# 7. invalid parameters - supply txid and empty array
assert_raises(JSONRPCException, self.nodes[0].getrawtransaction, txHash, [])
# 8. invalid parameters - supply txid and empty dict
assert_raises(JSONRPCException, self.nodes[0].getrawtransaction, txHash, {})
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 1000}]
outputs = { self.nodes[0].getnewaddress() : 1 }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
decrawtx= self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['vin'][0]['sequence'], 1000)
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : -1}]
outputs = { self.nodes[0].getnewaddress() : 1 }
assert_raises(JSONRPCException, self.nodes[0].createrawtransaction, inputs, outputs)
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967296}]
outputs = { self.nodes[0].getnewaddress() : 1 }
assert_raises(JSONRPCException, self.nodes[0].createrawtransaction, inputs, outputs)
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967294}]
outputs = { self.nodes[0].getnewaddress() : 1 }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
decrawtx= self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['vin'][0]['sequence'], 4294967294)
if __name__ == '__main__':
RawTransactionsTest().main()
| true | true |
f713d39ef8fd6add029331f47f48d93c39cfe6a7 | 1,039 | py | Python | tests/test_laguerre_inv.py | jab0707/UncertainSCI | 569c978c4f67dd7bb37e730276f2a376b8639235 | [
"MIT"
] | 1 | 2021-07-25T17:02:36.000Z | 2021-07-25T17:02:36.000Z | tests/test_laguerre_inv.py | jab0707/UncertainSCI | 569c978c4f67dd7bb37e730276f2a376b8639235 | [
"MIT"
] | 70 | 2020-04-09T17:38:12.000Z | 2022-03-18T17:06:09.000Z | tests/test_laguerre_inv.py | jab0707/UncertainSCI | 569c978c4f67dd7bb37e730276f2a376b8639235 | [
"MIT"
] | 7 | 2020-05-28T17:26:05.000Z | 2021-08-13T21:41:10.000Z | import unittest
import numpy as np
from UncertainSCI.families import LaguerrePolynomials
class IDistTestCase(unittest.TestCase):
"""
Tests for (Laguerre polynomial) inversed induced distributions.
"""
def test_idistinv_laguerre(self):
"""Evaluation of Laguerre inversed induced distribution function."""
# Randomly generate x, use idist to generate u
rho = 11*np.random.random() - 1
L = LaguerrePolynomials(rho=rho)
n = int(np.ceil(10*np.random.rand(1))[0])
M = 25
x1 = 4*(n+1)*np.random.rand(M)
u = L.idist(x1, n)
# see if idistinv givens x back
x2 = L.idistinv(u, n)
delta = 5e-3
ind = np.where(np.abs(x1-x2) > delta)[:2][0]
if ind.size > 0:
errstr = 'Failed for rho={0:1.3f}, n={1:d}'.format(rho, n)
else:
errstr = ''
self.assertAlmostEqual(np.linalg.norm(x1-x2, ord=np.inf), 0., delta=delta, msg=errstr)
if __name__ == "__main__":
unittest.main(verbosity=2)
| 25.341463 | 94 | 0.596728 | import unittest
import numpy as np
from UncertainSCI.families import LaguerrePolynomials
class IDistTestCase(unittest.TestCase):
def test_idistinv_laguerre(self):
rho = 11*np.random.random() - 1
L = LaguerrePolynomials(rho=rho)
n = int(np.ceil(10*np.random.rand(1))[0])
M = 25
x1 = 4*(n+1)*np.random.rand(M)
u = L.idist(x1, n)
x2 = L.idistinv(u, n)
delta = 5e-3
ind = np.where(np.abs(x1-x2) > delta)[:2][0]
if ind.size > 0:
errstr = 'Failed for rho={0:1.3f}, n={1:d}'.format(rho, n)
else:
errstr = ''
self.assertAlmostEqual(np.linalg.norm(x1-x2, ord=np.inf), 0., delta=delta, msg=errstr)
if __name__ == "__main__":
unittest.main(verbosity=2)
| true | true |
f713d436a3df74fcfc8664b3066de64a6190fdca | 2,796 | py | Python | michiru/modules/logger.py | moeIO/michiru | f1bafb90c2d82debee9e0402b426eba592038f24 | [
"WTFPL"
] | 1 | 2018-01-25T15:39:12.000Z | 2018-01-25T15:39:12.000Z | michiru/modules/logger.py | moeIO/michiru | f1bafb90c2d82debee9e0402b426eba592038f24 | [
"WTFPL"
] | null | null | null | michiru/modules/logger.py | moeIO/michiru | f1bafb90c2d82debee9e0402b426eba592038f24 | [
"WTFPL"
] | null | null | null | # Logging module.
import os
from os import path
import datetime
from michiru import config
from michiru.modules import hook
## Module information.
__name__ = 'logger'
__author__ = 'Shiz'
__license__ = 'WTFPL'
__desc__ = 'Log activities.'
config.item('logger.path', path.join('{local}', 'logs', '{server}', '{channel}.log'))
config.item('logger.date_format', '%Y/%m/%d %H:%M:%S')
## Utility functions.
def log(server, channel, message):
""" Remove earlier entries for `nick` from database and insert new log entry. """
logfile = config.get('logger.path', server=server, channel=channel).format(
site=config.SITE_DIR,
local=config.LOCAL_DIR,
server=server,
channel=channel or '<server>'
)
logpath = path.dirname(logfile)
dateformat = config.get('logger.date_format', server=server, channel=channel)
if not path.exists(logpath):
os.makedirs(logpath)
with open(logfile, 'a') as f:
f.write('[{now}] {message}\n'.format(now=datetime.datetime.utcnow().strftime(dateformat), message=message))
## Commands and hooks.
@hook('chat.join')
def join(bot, server, channel, who):
log(server, channel, '--> {nick} joined {chan}'.format(nick=who, chan=channel))
@hook('chat.part')
def part(bot, server, channel, who, reason):
log(server, channel, '<-- {nick} left {chan} ({reason})'.format(nick=who, chan=channel, reason=reason))
@hook('chat.disconnect')
def quit(bot, server, who, reason):
log(server, None, '<-- {nick} quit ({reason})'.format(nick=who, reason=reason))
@hook('chat.kick')
def kick(bot, server, channel, target, by, reason):
log(server, channel, '<!- {nick} got kicked from {channel} by {kicker} ({reason})'.format(nick=target, channel=channel, kicker=by, reason=reason))
@hook('chat.nickchange')
def nickchange(bot, server, who, to):
log(server, None, '-!- {old} changed nickname to {new}'.format(old=who, new=to))
@hook('chat.message')
def message(bot, server, target, who, message, private, admin):
log(server, who if private else target, '<{nick}> {message}'.format(nick=who, message=message))
@hook('chat.notice')
def notice(bot, server, target, who, message, private, admin):
log(server, who if private else target, '*{nick}* {message}'.format(nick=who, message=message))
@hook('chat.channelchange')
def channelchange(bot, server, channel, new):
log(server, channel, '-!- Channel changed to {new}'.format(new=new))
@hook('chat.topicchange')
def topicchange(bot, server, channel, who, topic):
if who:
log(server, channel, '-!- {who} changed topic to: {topic}'.format(who=who, topic=topic))
else:
log(server, channel, '-!- Topic changed to: {topic}'.format(topic=topic))
## Boilerplate.
def load():
return True
def unload():
pass
| 31.772727 | 150 | 0.670601 |
import os
from os import path
import datetime
from michiru import config
from michiru.modules import hook
__author__ = 'Shiz'
__license__ = 'WTFPL'
__desc__ = 'Log activities.'
config.item('logger.path', path.join('{local}', 'logs', '{server}', '{channel}.log'))
config.item('logger.date_format', '%Y/%m/%d %H:%M:%S')
annel, message):
logfile = config.get('logger.path', server=server, channel=channel).format(
site=config.SITE_DIR,
local=config.LOCAL_DIR,
server=server,
channel=channel or '<server>'
)
logpath = path.dirname(logfile)
dateformat = config.get('logger.date_format', server=server, channel=channel)
if not path.exists(logpath):
os.makedirs(logpath)
with open(logfile, 'a') as f:
f.write('[{now}] {message}\n'.format(now=datetime.datetime.utcnow().strftime(dateformat), message=message))
def join(bot, server, channel, who):
log(server, channel, '--> {nick} joined {chan}'.format(nick=who, chan=channel))
@hook('chat.part')
def part(bot, server, channel, who, reason):
log(server, channel, '<-- {nick} left {chan} ({reason})'.format(nick=who, chan=channel, reason=reason))
@hook('chat.disconnect')
def quit(bot, server, who, reason):
log(server, None, '<-- {nick} quit ({reason})'.format(nick=who, reason=reason))
@hook('chat.kick')
def kick(bot, server, channel, target, by, reason):
log(server, channel, '<!- {nick} got kicked from {channel} by {kicker} ({reason})'.format(nick=target, channel=channel, kicker=by, reason=reason))
@hook('chat.nickchange')
def nickchange(bot, server, who, to):
log(server, None, '-!- {old} changed nickname to {new}'.format(old=who, new=to))
@hook('chat.message')
def message(bot, server, target, who, message, private, admin):
log(server, who if private else target, '<{nick}> {message}'.format(nick=who, message=message))
@hook('chat.notice')
def notice(bot, server, target, who, message, private, admin):
log(server, who if private else target, '*{nick}* {message}'.format(nick=who, message=message))
@hook('chat.channelchange')
def channelchange(bot, server, channel, new):
log(server, channel, '-!- Channel changed to {new}'.format(new=new))
@hook('chat.topicchange')
def topicchange(bot, server, channel, who, topic):
if who:
log(server, channel, '-!- {who} changed topic to: {topic}'.format(who=who, topic=topic))
else:
log(server, channel, '-!- Topic changed to: {topic}'.format(topic=topic))
return True
def unload():
pass
| true | true |
f713d6cad3bb95d57bc72cad5289e97d9d7f1a6b | 2,225 | py | Python | coding/fetching_RE.py | Thyme-The-Herb/Web-Scraping | 80fbc0370ebb0152ce778fe5b985f1703ec4839b | [
"MIT"
] | null | null | null | coding/fetching_RE.py | Thyme-The-Herb/Web-Scraping | 80fbc0370ebb0152ce778fe5b985f1703ec4839b | [
"MIT"
] | null | null | null | coding/fetching_RE.py | Thyme-The-Herb/Web-Scraping | 80fbc0370ebb0152ce778fe5b985f1703ec4839b | [
"MIT"
] | null | null | null | from bs4 import BeautifulSoup
from selenium import webdriver
import selenium as se
from selenium.webdriver.chrome.options import Options
# This is the temporary url **** Need to make it dynamic
url = "https://www.realestate.co.nz/residential/sale?by=featured&lct=d225&maxba=2&maxbe=4&maxp=1400000&ql=80&scat=1"
# Component to fetch elements headless (expanded html)
options = se.webdriver.ChromeOptions() # weddriver library
options.add_argument('headless') # Type of fetching = headless
driver = se.webdriver.Chrome('/Users/Thyme/chromedriver') # PATH for chromedriver without fetching data will fail
driver.get(url)
data = driver.page_source
soup = BeautifulSoup(data, 'html.parser') # Using name soup just to respect its library
houses = str(soup) # transform bs4 type to string
houses = houses.split("\n") # Extract each lines into a list
# Realstate.co.nz
print("**********")
house_numbers = []
numbers = []
http_head = "https://www.realestate.co.nz/"
http_houses = []
# Extract all lines of element that contains house ID
for house in houses:
if "id=\"orbit-" in house:
house_numbers.append(house)
for number in house_numbers:
pos = number.index("id=\"orbit-")
result = number[pos+10:pos+17]
if result not in numbers:
numbers.append(result)
# print(numbers)
# print(len(numbers))
for number in numbers:
http = http_head + str(number)
http_houses.append(http)
print(http_houses)
# After first page http will adds "qo=80" the number represent total number houses shown start counting from second page
# Eg, first page "", second page "qo=80", third page "qo=160", fourth page "qo=240" and so on
# On the last page, if the numbers of houses less than 80 the number of increment will remain constant
# 2*(n-1) where "n" is number of page
# bbb = "https://www.realestate.co.nz/residential/sale?by=featured&lct=d225&maxba=2&maxbe=4&maxp=1400000&ql=80&scat=1"
# aaa = "https://www.realestate.co.nz/residential/sale?by=featured&lct=d225&maxba=2&maxbe=4&maxp=1400000&ql=80&qo=80&scat=1"
| 40.454545 | 124 | 0.670112 | from bs4 import BeautifulSoup
from selenium import webdriver
import selenium as se
from selenium.webdriver.chrome.options import Options
url = "https://www.realestate.co.nz/residential/sale?by=featured&lct=d225&maxba=2&maxbe=4&maxp=1400000&ql=80&scat=1"
options = se.webdriver.ChromeOptions()
options.add_argument('headless')
driver = se.webdriver.Chrome('/Users/Thyme/chromedriver')
driver.get(url)
data = driver.page_source
soup = BeautifulSoup(data, 'html.parser')
houses = str(soup)
houses = houses.split("\n")
print("**********")
house_numbers = []
numbers = []
http_head = "https://www.realestate.co.nz/"
http_houses = []
for house in houses:
if "id=\"orbit-" in house:
house_numbers.append(house)
for number in house_numbers:
pos = number.index("id=\"orbit-")
result = number[pos+10:pos+17]
if result not in numbers:
numbers.append(result)
for number in numbers:
http = http_head + str(number)
http_houses.append(http)
print(http_houses)
| true | true |
f713d72ab83c2f34fb2686c69f7fc5ff3e1e1581 | 13,844 | py | Python | tests/messages_tests/base.py | kkoralsky/django | 924af638e4d4fb8eb46a19ac0cafcb2e83480cf3 | [
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null | tests/messages_tests/base.py | kkoralsky/django | 924af638e4d4fb8eb46a19ac0cafcb2e83480cf3 | [
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null | tests/messages_tests/base.py | kkoralsky/django | 924af638e4d4fb8eb46a19ac0cafcb2e83480cf3 | [
"PSF-2.0",
"BSD-3-Clause"
] | 1 | 2020-02-06T10:31:51.000Z | 2020-02-06T10:31:51.000Z | from django import http
from django.contrib.messages import constants, get_level, set_level, utils
from django.contrib.messages.api import MessageFailure
from django.contrib.messages.constants import DEFAULT_LEVELS
from django.contrib.messages.storage import base, default_storage
from django.contrib.messages.storage.base import Message
from django.test import modify_settings, override_settings
from django.urls import reverse
from django.utils.translation import ugettext_lazy
def add_level_messages(storage):
"""
Add 6 messages from different levels (including a custom one) to a storage
instance.
"""
storage.add(constants.INFO, 'A generic info message')
storage.add(29, 'Some custom level')
storage.add(constants.DEBUG, 'A debugging message', extra_tags='extra-tag')
storage.add(constants.WARNING, 'A warning')
storage.add(constants.ERROR, 'An error')
storage.add(constants.SUCCESS, 'This was a triumph.')
class override_settings_tags(override_settings):
def enable(self):
super().enable()
# LEVEL_TAGS is a constant defined in the
# django.contrib.messages.storage.base module, so after changing
# settings.MESSAGE_TAGS, update that constant also.
self.old_level_tags = base.LEVEL_TAGS
base.LEVEL_TAGS = utils.get_level_tags()
def disable(self):
super().disable()
base.LEVEL_TAGS = self.old_level_tags
class BaseTests:
storage_class = default_storage
levels = {
'debug': constants.DEBUG,
'info': constants.INFO,
'success': constants.SUCCESS,
'warning': constants.WARNING,
'error': constants.ERROR,
}
def setUp(self):
self.settings_override = override_settings_tags(
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': (
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
),
},
}],
ROOT_URLCONF='messages_tests.urls',
MESSAGE_TAGS='',
MESSAGE_STORAGE='%s.%s' % (self.storage_class.__module__, self.storage_class.__name__),
SESSION_SERIALIZER='django.contrib.sessions.serializers.JSONSerializer',
)
self.settings_override.enable()
def tearDown(self):
self.settings_override.disable()
def get_request(self):
return http.HttpRequest()
def get_response(self):
return http.HttpResponse()
def get_storage(self, data=None):
"""
Return the storage backend, setting its loaded data to the ``data``
argument.
This method avoids the storage ``_get`` method from getting called so
that other parts of the storage backend can be tested independent of
the message retrieval logic.
"""
storage = self.storage_class(self.get_request())
storage._loaded_data = data or []
return storage
def test_add(self):
storage = self.get_storage()
self.assertFalse(storage.added_new)
storage.add(constants.INFO, 'Test message 1')
self.assertTrue(storage.added_new)
storage.add(constants.INFO, 'Test message 2', extra_tags='tag')
self.assertEqual(len(storage), 2)
def test_add_lazy_translation(self):
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, ugettext_lazy('lazy message'))
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 1)
def test_no_update(self):
storage = self.get_storage()
response = self.get_response()
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 0)
def test_add_update(self):
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, 'Test message 1')
storage.add(constants.INFO, 'Test message 1', extra_tags='tag')
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 2)
def test_existing_add_read_update(self):
storage = self.get_existing_storage()
response = self.get_response()
storage.add(constants.INFO, 'Test message 3')
list(storage) # Simulates a read
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 0)
def test_existing_read_add_update(self):
storage = self.get_existing_storage()
response = self.get_response()
list(storage) # Simulates a read
storage.add(constants.INFO, 'Test message 3')
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 1)
@override_settings(MESSAGE_LEVEL=constants.DEBUG)
def test_full_request_response_cycle(self):
"""
With the message middleware enabled, messages are properly stored and
retrieved across the full request/redirect/response cycle.
"""
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
show_url = reverse('show_message')
for level in ('debug', 'info', 'success', 'warning', 'error'):
add_url = reverse('add_message', args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertIn('messages', response.context)
messages = [Message(self.levels[level], msg) for msg in data['messages']]
self.assertEqual(list(response.context['messages']), messages)
for msg in data['messages']:
self.assertContains(response, msg)
@override_settings(MESSAGE_LEVEL=constants.DEBUG)
def test_with_template_response(self):
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
show_url = reverse('show_template_response')
for level in self.levels.keys():
add_url = reverse('add_template_response', args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertIn('messages', response.context)
for msg in data['messages']:
self.assertContains(response, msg)
# there shouldn't be any messages on second GET request
response = self.client.get(show_url)
for msg in data['messages']:
self.assertNotContains(response, msg)
def test_context_processor_message_levels(self):
show_url = reverse('show_template_response')
response = self.client.get(show_url)
self.assertIn('DEFAULT_MESSAGE_LEVELS', response.context)
self.assertEqual(response.context['DEFAULT_MESSAGE_LEVELS'], DEFAULT_LEVELS)
@override_settings(MESSAGE_LEVEL=constants.DEBUG)
def test_multiple_posts(self):
"""
Messages persist properly when multiple POSTs are made before a GET.
"""
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
show_url = reverse('show_message')
messages = []
for level in ('debug', 'info', 'success', 'warning', 'error'):
messages.extend(Message(self.levels[level], msg) for msg in data['messages'])
add_url = reverse('add_message', args=(level,))
self.client.post(add_url, data)
response = self.client.get(show_url)
self.assertIn('messages', response.context)
self.assertEqual(list(response.context['messages']), messages)
for msg in data['messages']:
self.assertContains(response, msg)
@modify_settings(
INSTALLED_APPS={'remove': 'django.contrib.messages'},
MIDDLEWARE={'remove': 'django.contrib.messages.middleware.MessageMiddleware'},
)
@override_settings(
MESSAGE_LEVEL=constants.DEBUG,
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
}],
)
def test_middleware_disabled(self):
"""
When the middleware is disabled, an exception is raised when one
attempts to store a message.
"""
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
reverse('show_message')
for level in ('debug', 'info', 'success', 'warning', 'error'):
add_url = reverse('add_message', args=(level,))
with self.assertRaises(MessageFailure):
self.client.post(add_url, data, follow=True)
@modify_settings(
INSTALLED_APPS={'remove': 'django.contrib.messages'},
MIDDLEWARE={'remove': 'django.contrib.messages.middleware.MessageMiddleware'},
)
@override_settings(
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
}],
)
def test_middleware_disabled_fail_silently(self):
"""
When the middleware is disabled, an exception is not raised
if 'fail_silently' = True
"""
data = {
'messages': ['Test message %d' % x for x in range(5)],
'fail_silently': True,
}
show_url = reverse('show_message')
for level in ('debug', 'info', 'success', 'warning', 'error'):
add_url = reverse('add_message', args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertNotIn('messages', response.context)
def stored_messages_count(self, storage, response):
"""
Return the number of messages being stored after a
``storage.update()`` call.
"""
raise NotImplementedError('This method must be set by a subclass.')
def test_get(self):
raise NotImplementedError('This method must be set by a subclass.')
def get_existing_storage(self):
return self.get_storage([
Message(constants.INFO, 'Test message 1'),
Message(constants.INFO, 'Test message 2', extra_tags='tag'),
])
def test_existing_read(self):
"""
Reading the existing storage doesn't cause the data to be lost.
"""
storage = self.get_existing_storage()
self.assertFalse(storage.used)
# After iterating the storage engine directly, the used flag is set.
data = list(storage)
self.assertTrue(storage.used)
# The data does not disappear because it has been iterated.
self.assertEqual(data, list(storage))
def test_existing_add(self):
storage = self.get_existing_storage()
self.assertFalse(storage.added_new)
storage.add(constants.INFO, 'Test message 3')
self.assertTrue(storage.added_new)
def test_default_level(self):
# get_level works even with no storage on the request.
request = self.get_request()
self.assertEqual(get_level(request), constants.INFO)
# get_level returns the default level if it hasn't been set.
storage = self.get_storage()
request._messages = storage
self.assertEqual(get_level(request), constants.INFO)
# Only messages of sufficient level get recorded.
add_level_messages(storage)
self.assertEqual(len(storage), 5)
def test_low_level(self):
request = self.get_request()
storage = self.storage_class(request)
request._messages = storage
self.assertTrue(set_level(request, 5))
self.assertEqual(get_level(request), 5)
add_level_messages(storage)
self.assertEqual(len(storage), 6)
def test_high_level(self):
request = self.get_request()
storage = self.storage_class(request)
request._messages = storage
self.assertTrue(set_level(request, 30))
self.assertEqual(get_level(request), 30)
add_level_messages(storage)
self.assertEqual(len(storage), 2)
@override_settings(MESSAGE_LEVEL=29)
def test_settings_level(self):
request = self.get_request()
storage = self.storage_class(request)
self.assertEqual(get_level(request), 29)
add_level_messages(storage)
self.assertEqual(len(storage), 3)
def test_tags(self):
storage = self.get_storage()
storage.level = 0
add_level_messages(storage)
tags = [msg.tags for msg in storage]
self.assertEqual(tags, ['info', '', 'extra-tag debug', 'warning', 'error', 'success'])
def test_level_tag(self):
storage = self.get_storage()
storage.level = 0
add_level_messages(storage)
tags = [msg.level_tag for msg in storage]
self.assertEqual(tags, ['info', '', 'debug', 'warning', 'error', 'success'])
@override_settings_tags(MESSAGE_TAGS={
constants.INFO: 'info',
constants.DEBUG: '',
constants.WARNING: '',
constants.ERROR: 'bad',
29: 'custom',
})
def test_custom_tags(self):
storage = self.get_storage()
storage.level = 0
add_level_messages(storage)
tags = [msg.tags for msg in storage]
self.assertEqual(tags, ['info', 'custom', 'extra-tag', '', 'bad', 'success'])
| 36.917333 | 99 | 0.630526 | from django import http
from django.contrib.messages import constants, get_level, set_level, utils
from django.contrib.messages.api import MessageFailure
from django.contrib.messages.constants import DEFAULT_LEVELS
from django.contrib.messages.storage import base, default_storage
from django.contrib.messages.storage.base import Message
from django.test import modify_settings, override_settings
from django.urls import reverse
from django.utils.translation import ugettext_lazy
def add_level_messages(storage):
storage.add(constants.INFO, 'A generic info message')
storage.add(29, 'Some custom level')
storage.add(constants.DEBUG, 'A debugging message', extra_tags='extra-tag')
storage.add(constants.WARNING, 'A warning')
storage.add(constants.ERROR, 'An error')
storage.add(constants.SUCCESS, 'This was a triumph.')
class override_settings_tags(override_settings):
def enable(self):
super().enable()
self.old_level_tags = base.LEVEL_TAGS
base.LEVEL_TAGS = utils.get_level_tags()
def disable(self):
super().disable()
base.LEVEL_TAGS = self.old_level_tags
class BaseTests:
storage_class = default_storage
levels = {
'debug': constants.DEBUG,
'info': constants.INFO,
'success': constants.SUCCESS,
'warning': constants.WARNING,
'error': constants.ERROR,
}
def setUp(self):
self.settings_override = override_settings_tags(
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': (
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
),
},
}],
ROOT_URLCONF='messages_tests.urls',
MESSAGE_TAGS='',
MESSAGE_STORAGE='%s.%s' % (self.storage_class.__module__, self.storage_class.__name__),
SESSION_SERIALIZER='django.contrib.sessions.serializers.JSONSerializer',
)
self.settings_override.enable()
def tearDown(self):
self.settings_override.disable()
def get_request(self):
return http.HttpRequest()
def get_response(self):
return http.HttpResponse()
def get_storage(self, data=None):
storage = self.storage_class(self.get_request())
storage._loaded_data = data or []
return storage
def test_add(self):
storage = self.get_storage()
self.assertFalse(storage.added_new)
storage.add(constants.INFO, 'Test message 1')
self.assertTrue(storage.added_new)
storage.add(constants.INFO, 'Test message 2', extra_tags='tag')
self.assertEqual(len(storage), 2)
def test_add_lazy_translation(self):
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, ugettext_lazy('lazy message'))
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 1)
def test_no_update(self):
storage = self.get_storage()
response = self.get_response()
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 0)
def test_add_update(self):
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, 'Test message 1')
storage.add(constants.INFO, 'Test message 1', extra_tags='tag')
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 2)
def test_existing_add_read_update(self):
storage = self.get_existing_storage()
response = self.get_response()
storage.add(constants.INFO, 'Test message 3')
list(storage)
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 0)
def test_existing_read_add_update(self):
storage = self.get_existing_storage()
response = self.get_response()
list(storage)
storage.add(constants.INFO, 'Test message 3')
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 1)
@override_settings(MESSAGE_LEVEL=constants.DEBUG)
def test_full_request_response_cycle(self):
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
show_url = reverse('show_message')
for level in ('debug', 'info', 'success', 'warning', 'error'):
add_url = reverse('add_message', args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertIn('messages', response.context)
messages = [Message(self.levels[level], msg) for msg in data['messages']]
self.assertEqual(list(response.context['messages']), messages)
for msg in data['messages']:
self.assertContains(response, msg)
@override_settings(MESSAGE_LEVEL=constants.DEBUG)
def test_with_template_response(self):
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
show_url = reverse('show_template_response')
for level in self.levels.keys():
add_url = reverse('add_template_response', args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertIn('messages', response.context)
for msg in data['messages']:
self.assertContains(response, msg)
response = self.client.get(show_url)
for msg in data['messages']:
self.assertNotContains(response, msg)
def test_context_processor_message_levels(self):
show_url = reverse('show_template_response')
response = self.client.get(show_url)
self.assertIn('DEFAULT_MESSAGE_LEVELS', response.context)
self.assertEqual(response.context['DEFAULT_MESSAGE_LEVELS'], DEFAULT_LEVELS)
@override_settings(MESSAGE_LEVEL=constants.DEBUG)
def test_multiple_posts(self):
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
show_url = reverse('show_message')
messages = []
for level in ('debug', 'info', 'success', 'warning', 'error'):
messages.extend(Message(self.levels[level], msg) for msg in data['messages'])
add_url = reverse('add_message', args=(level,))
self.client.post(add_url, data)
response = self.client.get(show_url)
self.assertIn('messages', response.context)
self.assertEqual(list(response.context['messages']), messages)
for msg in data['messages']:
self.assertContains(response, msg)
@modify_settings(
INSTALLED_APPS={'remove': 'django.contrib.messages'},
MIDDLEWARE={'remove': 'django.contrib.messages.middleware.MessageMiddleware'},
)
@override_settings(
MESSAGE_LEVEL=constants.DEBUG,
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
}],
)
def test_middleware_disabled(self):
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
reverse('show_message')
for level in ('debug', 'info', 'success', 'warning', 'error'):
add_url = reverse('add_message', args=(level,))
with self.assertRaises(MessageFailure):
self.client.post(add_url, data, follow=True)
@modify_settings(
INSTALLED_APPS={'remove': 'django.contrib.messages'},
MIDDLEWARE={'remove': 'django.contrib.messages.middleware.MessageMiddleware'},
)
@override_settings(
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
}],
)
def test_middleware_disabled_fail_silently(self):
data = {
'messages': ['Test message %d' % x for x in range(5)],
'fail_silently': True,
}
show_url = reverse('show_message')
for level in ('debug', 'info', 'success', 'warning', 'error'):
add_url = reverse('add_message', args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertNotIn('messages', response.context)
def stored_messages_count(self, storage, response):
raise NotImplementedError('This method must be set by a subclass.')
def test_get(self):
raise NotImplementedError('This method must be set by a subclass.')
def get_existing_storage(self):
return self.get_storage([
Message(constants.INFO, 'Test message 1'),
Message(constants.INFO, 'Test message 2', extra_tags='tag'),
])
def test_existing_read(self):
storage = self.get_existing_storage()
self.assertFalse(storage.used)
# After iterating the storage engine directly, the used flag is set.
data = list(storage)
self.assertTrue(storage.used)
# The data does not disappear because it has been iterated.
self.assertEqual(data, list(storage))
def test_existing_add(self):
storage = self.get_existing_storage()
self.assertFalse(storage.added_new)
storage.add(constants.INFO, 'Test message 3')
self.assertTrue(storage.added_new)
def test_default_level(self):
# get_level works even with no storage on the request.
request = self.get_request()
self.assertEqual(get_level(request), constants.INFO)
# get_level returns the default level if it hasn't been set.
storage = self.get_storage()
request._messages = storage
self.assertEqual(get_level(request), constants.INFO)
add_level_messages(storage)
self.assertEqual(len(storage), 5)
def test_low_level(self):
request = self.get_request()
storage = self.storage_class(request)
request._messages = storage
self.assertTrue(set_level(request, 5))
self.assertEqual(get_level(request), 5)
add_level_messages(storage)
self.assertEqual(len(storage), 6)
def test_high_level(self):
request = self.get_request()
storage = self.storage_class(request)
request._messages = storage
self.assertTrue(set_level(request, 30))
self.assertEqual(get_level(request), 30)
add_level_messages(storage)
self.assertEqual(len(storage), 2)
@override_settings(MESSAGE_LEVEL=29)
def test_settings_level(self):
request = self.get_request()
storage = self.storage_class(request)
self.assertEqual(get_level(request), 29)
add_level_messages(storage)
self.assertEqual(len(storage), 3)
def test_tags(self):
storage = self.get_storage()
storage.level = 0
add_level_messages(storage)
tags = [msg.tags for msg in storage]
self.assertEqual(tags, ['info', '', 'extra-tag debug', 'warning', 'error', 'success'])
def test_level_tag(self):
storage = self.get_storage()
storage.level = 0
add_level_messages(storage)
tags = [msg.level_tag for msg in storage]
self.assertEqual(tags, ['info', '', 'debug', 'warning', 'error', 'success'])
@override_settings_tags(MESSAGE_TAGS={
constants.INFO: 'info',
constants.DEBUG: '',
constants.WARNING: '',
constants.ERROR: 'bad',
29: 'custom',
})
def test_custom_tags(self):
storage = self.get_storage()
storage.level = 0
add_level_messages(storage)
tags = [msg.tags for msg in storage]
self.assertEqual(tags, ['info', 'custom', 'extra-tag', '', 'bad', 'success'])
| true | true |
f713d8b7796ccabed8db2a1d07dbfb48bd718890 | 6,406 | py | Python | opps/containers/admin.py | mauler/opps | 24a8d81434b8c17eafc894d51c1c3adbb67d7d07 | [
"MIT"
] | null | null | null | opps/containers/admin.py | mauler/opps | 24a8d81434b8c17eafc894d51c1c3adbb67d7d07 | [
"MIT"
] | null | null | null | opps/containers/admin.py | mauler/opps | 24a8d81434b8c17eafc894d51c1c3adbb67d7d07 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from django.contrib import admin
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from .models import Container, ContainerImage, Mirror
from .models import ContainerBox, ContainerBoxContainers
from .forms import ContainerBoxContainersInlineForm
from opps.core.admin import PublishableAdmin, apply_opps_rules, BaseBoxAdmin
from opps.contrib.multisite.admin import AdminViewPermission
from opps.core.filters import ChannelListFilter, HasQuerySet
from opps.images.generate import image_url
from opps.fields.models import Field, FieldOption
@apply_opps_rules('containers')
class ContainerImageInline(admin.TabularInline):
model = ContainerImage
fk_name = 'container'
raw_id_fields = ['image']
sortable_field_name = "order"
actions = None
extra = 0
verbose_name = _(u"Container image")
verbose_name_plural = _(u"Container images")
fieldsets = [(None, {'fields': ('image', 'image_thumb',
'order', 'caption')})]
ordering = ('order',)
readonly_fields = ['image_thumb']
def image_thumb(self, obj):
if obj.image:
return u'<img width="60px" height="60px" src="{0}" />'.format(
image_url(obj.image.archive.url, width=60, height=60))
return _(u'No Image')
image_thumb.short_description = _(u'Thumbnail')
image_thumb.allow_tags = True
@apply_opps_rules('containers')
class ContainerBoxContainersInline(admin.StackedInline):
model = ContainerBoxContainers
form = ContainerBoxContainersInlineForm
fk_name = 'containerbox'
raw_id_fields = ['container', 'main_image']
sortable_field_name = "order"
actions = None
ordering = ('order',)
extra = 0
fieldsets = [(None, {
'classes': ('collapse',),
'fields': ('container', 'aggregate', 'highlight', 'order',
'date_available', 'date_end', 'hat', 'title',
'main_image', 'main_image_caption', 'url', 'url_target')})]
@apply_opps_rules('containers')
class ContainerAdmin(PublishableAdmin, AdminViewPermission):
prepopulated_fields = {"slug": ["title"]}
readonly_fields = ['get_http_absolute_url', 'short_url',
'in_containerboxes', 'image_thumb']
raw_id_fields = ['main_image', 'channel', 'mirror_channel']
ordering = ('-date_available',)
autocomplete_lookup_fields = {
'fk': ['channel'],
}
def get_list_filter(self, request):
list_filter = super(ContainerAdmin, self).list_filter
list_filter = [ChannelListFilter] + list(list_filter)
return list_filter
def save_model(self, request, obj, form, change):
super(ContainerAdmin, self).save_model(request, obj, form, change)
_json = {}
for field in Field.objects.filter(
application__contains=obj.__class__.__name__):
if field.type == 'checkbox':
for fo in FieldOption.objects.filter(field=field):
key = "{0}_{1}".format(field.slug, fo.option.slug)
_json[key] = request.POST.get('json_{0}'.format(key), '')
else:
_json[field.slug] = request.POST.get(
'json_{0}'.format(field.slug), '')
obj.json = json.dumps(_json)
obj.save()
@apply_opps_rules('containers')
class ContainerBoxAdmin(BaseBoxAdmin, AdminViewPermission):
inlines = [ContainerBoxContainersInline]
raw_id_fields = ['channel', 'queryset', 'main_image']
list_display = ['name', 'site', 'channel_name', 'date_available',
'published']
save_as = True
fieldsets = (
(_(u'Identification'), {
'fields': ('site', 'name', 'slug', 'title', 'title_url',
'main_image', 'main_image_caption')}),
(_(u'Relationships'), {
'fields': ('channel', 'queryset')}),
(_(u'Publication'), {
'classes': ('extrapretty'),
'fields': ('content_group', 'published', 'date_available')}),
)
autocomplete_lookup_fields = {
'fk': ['channel'],
}
def clean_ended_entries(self, request, queryset):
now = timezone.now()
for box in queryset:
ended = box.containerboxcontainers_containerboxes.filter(
date_end__lt=now
)
if ended:
ended.delete()
clean_ended_entries.short_description = _(u'Clean ended containers')
def get_list_display(self, request):
list_display = getattr(self, 'list_display', [])
if request.user.is_superuser:
return list_display + ['is_dynamic']
return list_display
def get_list_filter(self, request):
list_filter = super(ContainerBoxAdmin, self).list_filter
if request.user.is_superuser:
list_filter = [HasQuerySet] + list_filter
return list_filter
def is_dynamic(self, obj):
if obj.queryset:
return True
else:
return False
is_dynamic.short_description = _(u'Dynamic')
is_dynamic.boolean = True
actions = ('clean_ended_entries',)
class HideContainerAdmin(PublishableAdmin, AdminViewPermission):
list_display = ['image_thumb', 'get_child_class', 'title',
'channel_name', 'date_available',
'published']
readonly_fields = ['image_thumb']
def get_child_class(self, obj):
return _(obj.child_class)
get_child_class.short_description = _(u'Child class')
def get_model_perms(self, *args, **kwargs):
return {}
def has_add_permission(self, request):
return False
def get_list_filter(self, request):
list_filter = super(HideContainerAdmin, self).list_filter
list_filter = [ChannelListFilter] + list(list_filter)
return list_filter
def queryset(self, request):
qs = super(HideContainerAdmin, self).queryset(request)
# TODO: Document this
blacklist = getattr(settings, 'OPPS_CONTAINERS_BLACKLIST', [])
if blacklist:
qs = qs.exclude(child_class__in=blacklist)
return qs
admin.site.register(Container, HideContainerAdmin)
admin.site.register(ContainerBox, ContainerBoxAdmin)
admin.site.register(Mirror, HideContainerAdmin)
| 35.005464 | 78 | 0.642523 |
import json
from django.contrib import admin
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from .models import Container, ContainerImage, Mirror
from .models import ContainerBox, ContainerBoxContainers
from .forms import ContainerBoxContainersInlineForm
from opps.core.admin import PublishableAdmin, apply_opps_rules, BaseBoxAdmin
from opps.contrib.multisite.admin import AdminViewPermission
from opps.core.filters import ChannelListFilter, HasQuerySet
from opps.images.generate import image_url
from opps.fields.models import Field, FieldOption
@apply_opps_rules('containers')
class ContainerImageInline(admin.TabularInline):
model = ContainerImage
fk_name = 'container'
raw_id_fields = ['image']
sortable_field_name = "order"
actions = None
extra = 0
verbose_name = _(u"Container image")
verbose_name_plural = _(u"Container images")
fieldsets = [(None, {'fields': ('image', 'image_thumb',
'order', 'caption')})]
ordering = ('order',)
readonly_fields = ['image_thumb']
def image_thumb(self, obj):
if obj.image:
return u'<img width="60px" height="60px" src="{0}" />'.format(
image_url(obj.image.archive.url, width=60, height=60))
return _(u'No Image')
image_thumb.short_description = _(u'Thumbnail')
image_thumb.allow_tags = True
@apply_opps_rules('containers')
class ContainerBoxContainersInline(admin.StackedInline):
model = ContainerBoxContainers
form = ContainerBoxContainersInlineForm
fk_name = 'containerbox'
raw_id_fields = ['container', 'main_image']
sortable_field_name = "order"
actions = None
ordering = ('order',)
extra = 0
fieldsets = [(None, {
'classes': ('collapse',),
'fields': ('container', 'aggregate', 'highlight', 'order',
'date_available', 'date_end', 'hat', 'title',
'main_image', 'main_image_caption', 'url', 'url_target')})]
@apply_opps_rules('containers')
class ContainerAdmin(PublishableAdmin, AdminViewPermission):
prepopulated_fields = {"slug": ["title"]}
readonly_fields = ['get_http_absolute_url', 'short_url',
'in_containerboxes', 'image_thumb']
raw_id_fields = ['main_image', 'channel', 'mirror_channel']
ordering = ('-date_available',)
autocomplete_lookup_fields = {
'fk': ['channel'],
}
def get_list_filter(self, request):
list_filter = super(ContainerAdmin, self).list_filter
list_filter = [ChannelListFilter] + list(list_filter)
return list_filter
def save_model(self, request, obj, form, change):
super(ContainerAdmin, self).save_model(request, obj, form, change)
_json = {}
for field in Field.objects.filter(
application__contains=obj.__class__.__name__):
if field.type == 'checkbox':
for fo in FieldOption.objects.filter(field=field):
key = "{0}_{1}".format(field.slug, fo.option.slug)
_json[key] = request.POST.get('json_{0}'.format(key), '')
else:
_json[field.slug] = request.POST.get(
'json_{0}'.format(field.slug), '')
obj.json = json.dumps(_json)
obj.save()
@apply_opps_rules('containers')
class ContainerBoxAdmin(BaseBoxAdmin, AdminViewPermission):
inlines = [ContainerBoxContainersInline]
raw_id_fields = ['channel', 'queryset', 'main_image']
list_display = ['name', 'site', 'channel_name', 'date_available',
'published']
save_as = True
fieldsets = (
(_(u'Identification'), {
'fields': ('site', 'name', 'slug', 'title', 'title_url',
'main_image', 'main_image_caption')}),
(_(u'Relationships'), {
'fields': ('channel', 'queryset')}),
(_(u'Publication'), {
'classes': ('extrapretty'),
'fields': ('content_group', 'published', 'date_available')}),
)
autocomplete_lookup_fields = {
'fk': ['channel'],
}
def clean_ended_entries(self, request, queryset):
now = timezone.now()
for box in queryset:
ended = box.containerboxcontainers_containerboxes.filter(
date_end__lt=now
)
if ended:
ended.delete()
clean_ended_entries.short_description = _(u'Clean ended containers')
def get_list_display(self, request):
list_display = getattr(self, 'list_display', [])
if request.user.is_superuser:
return list_display + ['is_dynamic']
return list_display
def get_list_filter(self, request):
list_filter = super(ContainerBoxAdmin, self).list_filter
if request.user.is_superuser:
list_filter = [HasQuerySet] + list_filter
return list_filter
def is_dynamic(self, obj):
if obj.queryset:
return True
else:
return False
is_dynamic.short_description = _(u'Dynamic')
is_dynamic.boolean = True
actions = ('clean_ended_entries',)
class HideContainerAdmin(PublishableAdmin, AdminViewPermission):
list_display = ['image_thumb', 'get_child_class', 'title',
'channel_name', 'date_available',
'published']
readonly_fields = ['image_thumb']
def get_child_class(self, obj):
return _(obj.child_class)
get_child_class.short_description = _(u'Child class')
def get_model_perms(self, *args, **kwargs):
return {}
def has_add_permission(self, request):
return False
def get_list_filter(self, request):
list_filter = super(HideContainerAdmin, self).list_filter
list_filter = [ChannelListFilter] + list(list_filter)
return list_filter
def queryset(self, request):
qs = super(HideContainerAdmin, self).queryset(request)
blacklist = getattr(settings, 'OPPS_CONTAINERS_BLACKLIST', [])
if blacklist:
qs = qs.exclude(child_class__in=blacklist)
return qs
admin.site.register(Container, HideContainerAdmin)
admin.site.register(ContainerBox, ContainerBoxAdmin)
admin.site.register(Mirror, HideContainerAdmin)
| true | true |
f713d8ddadb695ef35b334d9bdb44a3532a72d22 | 448 | py | Python | map_objects/tile.py | MikeChunko/rogue | 7974a55f732ec03c47321ccbd8950ff8856bd5f2 | [
"BSD-2-Clause"
] | null | null | null | map_objects/tile.py | MikeChunko/rogue | 7974a55f732ec03c47321ccbd8950ff8856bd5f2 | [
"BSD-2-Clause"
] | null | null | null | map_objects/tile.py | MikeChunko/rogue | 7974a55f732ec03c47321ccbd8950ff8856bd5f2 | [
"BSD-2-Clause"
] | null | null | null | # File name: tile.py
# Author: Michael Chunko
# Python Version: 3.7
# This file contains the class representing a tile on a map
class Tile:
def __init__(self, blocked=True, block_sight=None, seen=False):
self.blocked = blocked
# By default, a blocked tile also blocks sight
if block_sight is None:
self.block_sight = blocked
else:
self.block_sight = blocked
self.seen = seen
| 23.578947 | 67 | 0.640625 |
class Tile:
def __init__(self, blocked=True, block_sight=None, seen=False):
self.blocked = blocked
if block_sight is None:
self.block_sight = blocked
else:
self.block_sight = blocked
self.seen = seen
| true | true |
f713d947567df750231c71b544d1b9bc0cef1b63 | 1,978 | py | Python | panoptes_aggregation/running_reducers/tess_user_reducer.py | alnah005/aggregation-for-caesar | b2422f4c007857531ac3ff2636b567adb667dd0c | [
"Apache-2.0"
] | 9 | 2018-04-11T13:44:32.000Z | 2022-03-09T16:39:26.000Z | panoptes_aggregation/running_reducers/tess_user_reducer.py | alnah005/aggregation-for-caesar | b2422f4c007857531ac3ff2636b567adb667dd0c | [
"Apache-2.0"
] | 217 | 2017-07-27T09:20:15.000Z | 2022-03-21T11:15:33.000Z | panoptes_aggregation/running_reducers/tess_user_reducer.py | hughdickinson/aggregation-for-caesar | d6bca0a1126e0397315d5773401c71075c33ee2f | [
"Apache-2.0"
] | 10 | 2018-11-12T21:36:48.000Z | 2022-02-07T11:50:03.000Z | '''
TESS User Reducer
-----------------
This module porvides functions to calculate uesr weights for the TESS project.
Extracts are from Ceasars `PluckFieldExtractor`.
'''
from .running_reducer_wrapper import running_reducer_wrapper
import numpy as np
@running_reducer_wrapper(relevant_reduction=True)
def tess_user_reducer(data, **kwargs):
'''Calculate TESS user weights
Parameters
----------
data : list
A list with one item containing the extract with the user's feedback on a
gold standard subject
store : keyword, dict
A dictinary with two keys:
* `seed`: sum of all previous `seed` values
* `count`: sum of all previous gold standard transits seen
relevant_reduction : keyword, list
A list with one item containing the results of the current subject's stats reducer.
This item is a dictinary with two keys:
* `True`: number of users who correctly identified the gold standard transits in the subject
* `False`: number of users who incorrectly identified the gold standard transits in the subject
Returns
-------
reduction : dict
A dictinary with two keys:
* `data`: A dictionary with the `skill` value as the only item
* `store`: The updated store for the user
'''
success = [d['success'] for d in data[0]['feedback']]
store = kwargs.pop('store')
relevant_reduction = kwargs.pop('relevant_reduction')[0]
try:
d_subject = relevant_reduction['data']['difficulty']
except:
d_subject = 0
seed_current = (np.where(success, 2, -1) * d_subject).sum()
seed = store.get('seed', 0) + seed_current
count = store.get('count', 0) + len(success)
store = {
'seed': seed,
'count': count
}
c0 = 1
skill = c0 * pow((1.0 + np.log10(count)), (seed / count))
skill = min([3.0, max([0.05, skill])])
return {
'skill': skill,
'_store': store
}
| 31.903226 | 103 | 0.636502 | from .running_reducer_wrapper import running_reducer_wrapper
import numpy as np
@running_reducer_wrapper(relevant_reduction=True)
def tess_user_reducer(data, **kwargs):
success = [d['success'] for d in data[0]['feedback']]
store = kwargs.pop('store')
relevant_reduction = kwargs.pop('relevant_reduction')[0]
try:
d_subject = relevant_reduction['data']['difficulty']
except:
d_subject = 0
seed_current = (np.where(success, 2, -1) * d_subject).sum()
seed = store.get('seed', 0) + seed_current
count = store.get('count', 0) + len(success)
store = {
'seed': seed,
'count': count
}
c0 = 1
skill = c0 * pow((1.0 + np.log10(count)), (seed / count))
skill = min([3.0, max([0.05, skill])])
return {
'skill': skill,
'_store': store
}
| true | true |
f713d95566b79d1732ae7300f0c55116e9f79c8b | 1,017 | bzl | Python | tools/bazel_deps.bzl | rockwotj/run_binary_example | cf613bdb2cd2c1e411e50ad53980d1c022498a88 | [
"MIT"
] | null | null | null | tools/bazel_deps.bzl | rockwotj/run_binary_example | cf613bdb2cd2c1e411e50ad53980d1c022498a88 | [
"MIT"
] | null | null | null | tools/bazel_deps.bzl | rockwotj/run_binary_example | cf613bdb2cd2c1e411e50ad53980d1c022498a88 | [
"MIT"
] | null | null | null | # Third-party dependencies fetched by Bazel
# Unlike WORKSPACE, the content of this file is unordered.
# We keep them separate to make the WORKSPACE file more maintainable.
# Install the nodejs "bootstrap" package
# This provides the basic tools for running and packaging nodejs programs in Bazel
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def fetch_dependencies():
http_archive(
name = "build_bazel_rules_nodejs",
sha256 = "e79c08a488cc5ac40981987d862c7320cee8741122a2649e9b08e850b6f20442",
urls = ["https://github.com/bazelbuild/rules_nodejs/releases/download/3.8.0/rules_nodejs-3.8.0.tar.gz"],
)
# rules_nodejs doesn't depend on skylib, but it's a useful dependency anyway.
http_archive(
name = "bazel_skylib",
urls = [
"https://github.com/bazelbuild/bazel-skylib/releases/download/1.0.3/bazel-skylib-1.0.3.tar.gz",
],
sha256 = "1c531376ac7e5a180e0237938a2536de0c54d93f5c278634818e0efc952dd56c",
)
| 44.217391 | 112 | 0.72468 |
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def fetch_dependencies():
http_archive(
name = "build_bazel_rules_nodejs",
sha256 = "e79c08a488cc5ac40981987d862c7320cee8741122a2649e9b08e850b6f20442",
urls = ["https://github.com/bazelbuild/rules_nodejs/releases/download/3.8.0/rules_nodejs-3.8.0.tar.gz"],
)
http_archive(
name = "bazel_skylib",
urls = [
"https://github.com/bazelbuild/bazel-skylib/releases/download/1.0.3/bazel-skylib-1.0.3.tar.gz",
],
sha256 = "1c531376ac7e5a180e0237938a2536de0c54d93f5c278634818e0efc952dd56c",
)
| true | true |
f713d960b564dbd5eb8dda394cd9127243da4a02 | 70 | py | Python | preprocess/__init__.py | failyang/tensorflow-examples | 48da6cd25138d448a4ddf7710e8abab0392c453c | [
"MIT"
] | null | null | null | preprocess/__init__.py | failyang/tensorflow-examples | 48da6cd25138d448a4ddf7710e8abab0392c453c | [
"MIT"
] | null | null | null | preprocess/__init__.py | failyang/tensorflow-examples | 48da6cd25138d448a4ddf7710e8abab0392c453c | [
"MIT"
] | null | null | null | from __future__ import absolute_import
from .data_prep import img_pad | 23.333333 | 38 | 0.871429 | from __future__ import absolute_import
from .data_prep import img_pad | true | true |
f713d990afcddb0cf000499c6b9a2e37b17481d0 | 171 | py | Python | cors/resources/checkandremove.py | ziransun/wpt | ab8f451eb39eb198584d547f5d965ef54df2a86a | [
"BSD-3-Clause"
] | 8 | 2019-04-09T21:13:05.000Z | 2021-11-23T17:25:18.000Z | cors/resources/checkandremove.py | ziransun/wpt | ab8f451eb39eb198584d547f5d965ef54df2a86a | [
"BSD-3-Clause"
] | 21 | 2021-03-31T19:48:22.000Z | 2022-03-12T00:24:53.000Z | cors/resources/checkandremove.py | ziransun/wpt | ab8f451eb39eb198584d547f5d965ef54df2a86a | [
"BSD-3-Clause"
] | 11 | 2019-04-12T01:20:16.000Z | 2021-11-23T17:25:02.000Z | def main(request, response):
token = request.GET.first("token")
if request.server.stash.remove(token) is not None:
return "1"
else:
return "0"
| 24.428571 | 54 | 0.614035 | def main(request, response):
token = request.GET.first("token")
if request.server.stash.remove(token) is not None:
return "1"
else:
return "0"
| true | true |
f713d9fffdd9e54ff53f7c4fe224e681bcfb2723 | 8,280 | py | Python | pcdet/models/backbones_3d/spconv_unet.py | StarGazer1995/OpenPCDet | 4af33e8badb0c8e68c7c94c71b0ec5667aad2348 | [
"Apache-2.0"
] | null | null | null | pcdet/models/backbones_3d/spconv_unet.py | StarGazer1995/OpenPCDet | 4af33e8badb0c8e68c7c94c71b0ec5667aad2348 | [
"Apache-2.0"
] | null | null | null | pcdet/models/backbones_3d/spconv_unet.py | StarGazer1995/OpenPCDet | 4af33e8badb0c8e68c7c94c71b0ec5667aad2348 | [
"Apache-2.0"
] | null | null | null | import torch
import torch.nn as nn
import spconv
from functools import partial
from .spconv_backbone import post_act_block
from ...utils import common_utils
class SparseBasicBlock(spconv.SparseModule):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None, indice_key=None, norm_fn=None):
super(SparseBasicBlock, self).__init__()
self.conv1 = spconv.SubMConv3d(
inplanes, planes, kernel_size=3, stride=stride, padding=1, bias=False, indice_key=indice_key
)
self.bn1 = norm_fn(planes)
self.relu = nn.ReLU()
self.conv2 = spconv.SubMConv3d(
planes, planes, kernel_size=3, stride=1, padding=1, bias=False, indice_key=indice_key
)
self.bn2 = norm_fn(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
identity = x.features
assert x.features.dim() == 2, 'x.features.dim()=%d' % x.features.dim()
out = self.conv1(x)
out.features = self.bn1(out.features)
out.features = self.relu(out.features)
out = self.conv2(out)
out.features = self.bn2(out.features)
if self.downsample is not None:
identity = self.downsample(x)
out.features += identity
out.features = self.relu(out.features)
return out
class UNetV2(nn.Module):
"""
Sparse Convolution based UNet for point-wise feature learning.
Reference Paper: https://arxiv.org/abs/1907.03670 (Shaoshuai Shi, et. al)
From Points to Parts: 3D Object Detection from Point Cloud with Part-aware and Part-aggregation Network
"""
def __init__(self, model_cfg, input_channels, grid_size, voxel_size, point_cloud_range, **kwargs):
super().__init__()
self.model_cfg = model_cfg
self.sparse_shape = grid_size[::-1] + [1, 0, 0]
self.voxel_size = voxel_size
self.point_cloud_range = point_cloud_range
norm_fn = partial(nn.BatchNorm1d, eps=1e-3, momentum=0.01)
self.conv_input = spconv.SparseSequential(
spconv.SubMConv3d(input_channels, 16, 3, padding=1, bias=False, indice_key='subm1'),
norm_fn(16),
nn.ReLU(),
)
block = post_act_block
self.conv1 = spconv.SparseSequential(
block(16, 16, 3, norm_fn=norm_fn, padding=1, indice_key='subm1'),
)
self.conv2 = spconv.SparseSequential(
# [1600, 1408, 41] <- [800, 704, 21]
block(16, 32, 3, norm_fn=norm_fn, stride=2, padding=1, indice_key='spconv2', conv_type='spconv'),
block(32, 32, 3, norm_fn=norm_fn, padding=1, indice_key='subm2'),
block(32, 32, 3, norm_fn=norm_fn, padding=1, indice_key='subm2'),
)
self.conv3 = spconv.SparseSequential(
# [800, 704, 21] <- [400, 352, 11]
block(32, 64, 3, norm_fn=norm_fn, stride=2, padding=1, indice_key='spconv3', conv_type='spconv'),
block(64, 64, 3, norm_fn=norm_fn, padding=1, indice_key='subm3'),
block(64, 64, 3, norm_fn=norm_fn, padding=1, indice_key='subm3'),
)
self.conv4 = spconv.SparseSequential(
# [400, 352, 11] <- [200, 176, 5]
block(64, 64, 3, norm_fn=norm_fn, stride=2, padding=(0, 1, 1), indice_key='spconv4', conv_type='spconv'),
block(64, 64, 3, norm_fn=norm_fn, padding=1, indice_key='subm4'),
block(64, 64, 3, norm_fn=norm_fn, padding=1, indice_key='subm4'),
)
last_pad = 0
last_pad = self.model_cfg.get('last_pad', last_pad)
self.conv_out = spconv.SparseSequential(
# [200, 150, 5] -> [200, 150, 2]
spconv.SparseConv3d(64, 128, (3, 1, 1), stride=(2, 1, 1), padding=last_pad,
bias=False, indice_key='spconv_down2'),
norm_fn(128),
nn.ReLU(),
)
# decoder
# [400, 352, 11] <- [200, 176, 5]
self.conv_up_t4 = SparseBasicBlock(64, 64, indice_key='subm4', norm_fn=norm_fn)
self.conv_up_m4 = block(128, 64, 3, norm_fn=norm_fn, padding=1, indice_key='subm4')
self.inv_conv4 = block(64, 64, 3, norm_fn=norm_fn, indice_key='spconv4', conv_type='inverseconv')
# [800, 704, 21] <- [400, 352, 11]
self.conv_up_t3 = SparseBasicBlock(64, 64, indice_key='subm3', norm_fn=norm_fn)
self.conv_up_m3 = block(128, 64, 3, norm_fn=norm_fn, padding=1, indice_key='subm3')
self.inv_conv3 = block(64, 32, 3, norm_fn=norm_fn, indice_key='spconv3', conv_type='inverseconv')
# [1600, 1408, 41] <- [800, 704, 21]
self.conv_up_t2 = SparseBasicBlock(32, 32, indice_key='subm2', norm_fn=norm_fn)
self.conv_up_m2 = block(64, 32, 3, norm_fn=norm_fn, indice_key='subm2')
self.inv_conv2 = block(32, 16, 3, norm_fn=norm_fn, indice_key='spconv2', conv_type='inverseconv')
# [1600, 1408, 41] <- [1600, 1408, 41]
self.conv_up_t1 = SparseBasicBlock(16, 16, indice_key='subm1', norm_fn=norm_fn)
self.conv_up_m1 = block(32, 16, 3, norm_fn=norm_fn, indice_key='subm1')
self.conv5 = spconv.SparseSequential(
block(16, 16, 3, norm_fn=norm_fn, padding=1, indice_key='subm1')
)
self.num_point_features = 16
def UR_block_forward(self, x_lateral, x_bottom, conv_t, conv_m, conv_inv):
x_trans = conv_t(x_lateral)
x = x_trans
x.features = torch.cat((x_bottom.features, x_trans.features), dim=1)
x_m = conv_m(x)
x = self.channel_reduction(x, x_m.features.shape[1])
x.features = x_m.features + x.features
x = conv_inv(x)
return x
@staticmethod
def channel_reduction(x, out_channels):
"""
Args:
x: x.features (N, C1)
out_channels: C2
Returns:
"""
features = x.features
n, in_channels = features.shape
assert (in_channels % out_channels == 0) and (in_channels >= out_channels)
x.features = features.view(n, out_channels, -1).sum(dim=2)
return x
def forward(self, batch_dict):
"""
Args:
batch_dict:
batch_size: int
vfe_features: (num_voxels, C)
voxel_coords: (num_voxels, 4), [batch_idx, z_idx, y_idx, x_idx]
Returns:
batch_dict:
encoded_spconv_tensor: sparse tensor
point_features: (N, C)
"""
voxel_features, voxel_coords = batch_dict['voxel_features'], batch_dict['voxel_coords']
batch_size = batch_dict['batch_size']
input_sp_tensor = spconv.SparseConvTensor(
features=voxel_features,
indices=voxel_coords.int(),
spatial_shape=self.sparse_shape,
batch_size=batch_size
)
x = self.conv_input(input_sp_tensor)
x_conv1 = self.conv1(x)
x_conv2 = self.conv2(x_conv1)
x_conv3 = self.conv3(x_conv2)
x_conv4 = self.conv4(x_conv3)
# for detection head
# [200, 176, 5] -> [200, 176, 2]
out = self.conv_out(x_conv4)
# for segmentation head
# [400, 352, 11] <- [200, 176, 5]
x_up4 = self.UR_block_forward(x_conv4, x_conv4, self.conv_up_t4, self.conv_up_m4, self.inv_conv4)
# [800, 704, 21] <- [400, 352, 11]
x_up3 = self.UR_block_forward(x_conv3, x_up4, self.conv_up_t3, self.conv_up_m3, self.inv_conv3)
# [1600, 1408, 41] <- [800, 704, 21]
x_up2 = self.UR_block_forward(x_conv2, x_up3, self.conv_up_t2, self.conv_up_m2, self.inv_conv2)
# [1600, 1408, 41] <- [1600, 1408, 41]
x_up1 = self.UR_block_forward(x_conv1, x_up2, self.conv_up_t1, self.conv_up_m1, self.conv5)
batch_dict['point_features'] = x_up1.features
point_coords = common_utils.get_voxel_centers(
x_up1.indices[:, 1:], downsample_times=1, voxel_size=self.voxel_size,
point_cloud_range=self.point_cloud_range
)
batch_dict['point_coords'] = torch.cat((x_up1.indices[:, 0:1].float(), point_coords), dim=1)
batch_dict['encoded_spconv_tensor'] = out
batch_dict['encoded_spconv_tensor_stride'] = 8
return batch_dict
| 40 | 117 | 0.608937 | import torch
import torch.nn as nn
import spconv
from functools import partial
from .spconv_backbone import post_act_block
from ...utils import common_utils
class SparseBasicBlock(spconv.SparseModule):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None, indice_key=None, norm_fn=None):
super(SparseBasicBlock, self).__init__()
self.conv1 = spconv.SubMConv3d(
inplanes, planes, kernel_size=3, stride=stride, padding=1, bias=False, indice_key=indice_key
)
self.bn1 = norm_fn(planes)
self.relu = nn.ReLU()
self.conv2 = spconv.SubMConv3d(
planes, planes, kernel_size=3, stride=1, padding=1, bias=False, indice_key=indice_key
)
self.bn2 = norm_fn(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
identity = x.features
assert x.features.dim() == 2, 'x.features.dim()=%d' % x.features.dim()
out = self.conv1(x)
out.features = self.bn1(out.features)
out.features = self.relu(out.features)
out = self.conv2(out)
out.features = self.bn2(out.features)
if self.downsample is not None:
identity = self.downsample(x)
out.features += identity
out.features = self.relu(out.features)
return out
class UNetV2(nn.Module):
def __init__(self, model_cfg, input_channels, grid_size, voxel_size, point_cloud_range, **kwargs):
super().__init__()
self.model_cfg = model_cfg
self.sparse_shape = grid_size[::-1] + [1, 0, 0]
self.voxel_size = voxel_size
self.point_cloud_range = point_cloud_range
norm_fn = partial(nn.BatchNorm1d, eps=1e-3, momentum=0.01)
self.conv_input = spconv.SparseSequential(
spconv.SubMConv3d(input_channels, 16, 3, padding=1, bias=False, indice_key='subm1'),
norm_fn(16),
nn.ReLU(),
)
block = post_act_block
self.conv1 = spconv.SparseSequential(
block(16, 16, 3, norm_fn=norm_fn, padding=1, indice_key='subm1'),
)
self.conv2 = spconv.SparseSequential(
block(16, 32, 3, norm_fn=norm_fn, stride=2, padding=1, indice_key='spconv2', conv_type='spconv'),
block(32, 32, 3, norm_fn=norm_fn, padding=1, indice_key='subm2'),
block(32, 32, 3, norm_fn=norm_fn, padding=1, indice_key='subm2'),
)
self.conv3 = spconv.SparseSequential(
block(32, 64, 3, norm_fn=norm_fn, stride=2, padding=1, indice_key='spconv3', conv_type='spconv'),
block(64, 64, 3, norm_fn=norm_fn, padding=1, indice_key='subm3'),
block(64, 64, 3, norm_fn=norm_fn, padding=1, indice_key='subm3'),
)
self.conv4 = spconv.SparseSequential(
block(64, 64, 3, norm_fn=norm_fn, stride=2, padding=(0, 1, 1), indice_key='spconv4', conv_type='spconv'),
block(64, 64, 3, norm_fn=norm_fn, padding=1, indice_key='subm4'),
block(64, 64, 3, norm_fn=norm_fn, padding=1, indice_key='subm4'),
)
last_pad = 0
last_pad = self.model_cfg.get('last_pad', last_pad)
self.conv_out = spconv.SparseSequential(
spconv.SparseConv3d(64, 128, (3, 1, 1), stride=(2, 1, 1), padding=last_pad,
bias=False, indice_key='spconv_down2'),
norm_fn(128),
nn.ReLU(),
)
self.conv_up_t4 = SparseBasicBlock(64, 64, indice_key='subm4', norm_fn=norm_fn)
self.conv_up_m4 = block(128, 64, 3, norm_fn=norm_fn, padding=1, indice_key='subm4')
self.inv_conv4 = block(64, 64, 3, norm_fn=norm_fn, indice_key='spconv4', conv_type='inverseconv')
self.conv_up_t3 = SparseBasicBlock(64, 64, indice_key='subm3', norm_fn=norm_fn)
self.conv_up_m3 = block(128, 64, 3, norm_fn=norm_fn, padding=1, indice_key='subm3')
self.inv_conv3 = block(64, 32, 3, norm_fn=norm_fn, indice_key='spconv3', conv_type='inverseconv')
self.conv_up_t2 = SparseBasicBlock(32, 32, indice_key='subm2', norm_fn=norm_fn)
self.conv_up_m2 = block(64, 32, 3, norm_fn=norm_fn, indice_key='subm2')
self.inv_conv2 = block(32, 16, 3, norm_fn=norm_fn, indice_key='spconv2', conv_type='inverseconv')
self.conv_up_t1 = SparseBasicBlock(16, 16, indice_key='subm1', norm_fn=norm_fn)
self.conv_up_m1 = block(32, 16, 3, norm_fn=norm_fn, indice_key='subm1')
self.conv5 = spconv.SparseSequential(
block(16, 16, 3, norm_fn=norm_fn, padding=1, indice_key='subm1')
)
self.num_point_features = 16
def UR_block_forward(self, x_lateral, x_bottom, conv_t, conv_m, conv_inv):
x_trans = conv_t(x_lateral)
x = x_trans
x.features = torch.cat((x_bottom.features, x_trans.features), dim=1)
x_m = conv_m(x)
x = self.channel_reduction(x, x_m.features.shape[1])
x.features = x_m.features + x.features
x = conv_inv(x)
return x
@staticmethod
def channel_reduction(x, out_channels):
features = x.features
n, in_channels = features.shape
assert (in_channels % out_channels == 0) and (in_channels >= out_channels)
x.features = features.view(n, out_channels, -1).sum(dim=2)
return x
def forward(self, batch_dict):
voxel_features, voxel_coords = batch_dict['voxel_features'], batch_dict['voxel_coords']
batch_size = batch_dict['batch_size']
input_sp_tensor = spconv.SparseConvTensor(
features=voxel_features,
indices=voxel_coords.int(),
spatial_shape=self.sparse_shape,
batch_size=batch_size
)
x = self.conv_input(input_sp_tensor)
x_conv1 = self.conv1(x)
x_conv2 = self.conv2(x_conv1)
x_conv3 = self.conv3(x_conv2)
x_conv4 = self.conv4(x_conv3)
out = self.conv_out(x_conv4)
x_up4 = self.UR_block_forward(x_conv4, x_conv4, self.conv_up_t4, self.conv_up_m4, self.inv_conv4)
x_up3 = self.UR_block_forward(x_conv3, x_up4, self.conv_up_t3, self.conv_up_m3, self.inv_conv3)
x_up2 = self.UR_block_forward(x_conv2, x_up3, self.conv_up_t2, self.conv_up_m2, self.inv_conv2)
x_up1 = self.UR_block_forward(x_conv1, x_up2, self.conv_up_t1, self.conv_up_m1, self.conv5)
batch_dict['point_features'] = x_up1.features
point_coords = common_utils.get_voxel_centers(
x_up1.indices[:, 1:], downsample_times=1, voxel_size=self.voxel_size,
point_cloud_range=self.point_cloud_range
)
batch_dict['point_coords'] = torch.cat((x_up1.indices[:, 0:1].float(), point_coords), dim=1)
batch_dict['encoded_spconv_tensor'] = out
batch_dict['encoded_spconv_tensor_stride'] = 8
return batch_dict
| true | true |
f713dab102d61680c2f1adddccfa93c5ec27ff39 | 1,194 | py | Python | rich/_lru_cache.py | hultner-technologies/rich | 7a50f9d3f299a2ed44e36aa241e0a15323add317 | [
"MIT"
] | 2 | 2021-05-11T19:27:06.000Z | 2021-05-12T06:08:08.000Z | rich/_lru_cache.py | hultner-technologies/rich | 7a50f9d3f299a2ed44e36aa241e0a15323add317 | [
"MIT"
] | 2 | 2020-05-09T12:42:28.000Z | 2020-05-09T14:44:04.000Z | rich/_lru_cache.py | hultner-technologies/rich | 7a50f9d3f299a2ed44e36aa241e0a15323add317 | [
"MIT"
] | 1 | 2020-05-04T08:13:02.000Z | 2020-05-04T08:13:02.000Z | from collections import OrderedDict
from typing import Dict, Generic, Mapping, TypeVar
CacheKey = TypeVar("CacheKey")
CacheValue = TypeVar("CacheValue")
class LRUCache(Generic[CacheKey, CacheValue], OrderedDict):
"""
A dictionary-like container that stores a given maximum items.
If an additional item is added when the LRUCache is full, the least
recently used key is discarded to make room for the new item.
"""
def __init__(self, cache_size: int) -> None:
self.cache_size = cache_size
super(LRUCache, self).__init__()
def __setitem__(self, key: CacheKey, value: CacheValue) -> None:
"""Store a new views, potentially discarding an old value."""
if key not in self:
if len(self) >= self.cache_size:
self.popitem(last=False)
OrderedDict.__setitem__(self, key, value)
def __getitem__(self: Dict[CacheKey, CacheValue], key: CacheKey) -> CacheValue:
"""Gets the item, but also makes it most recent."""
value: CacheValue = OrderedDict.__getitem__(self, key)
OrderedDict.__delitem__(self, key)
OrderedDict.__setitem__(self, key, value)
return value
| 34.114286 | 83 | 0.675042 | from collections import OrderedDict
from typing import Dict, Generic, Mapping, TypeVar
CacheKey = TypeVar("CacheKey")
CacheValue = TypeVar("CacheValue")
class LRUCache(Generic[CacheKey, CacheValue], OrderedDict):
def __init__(self, cache_size: int) -> None:
self.cache_size = cache_size
super(LRUCache, self).__init__()
def __setitem__(self, key: CacheKey, value: CacheValue) -> None:
if key not in self:
if len(self) >= self.cache_size:
self.popitem(last=False)
OrderedDict.__setitem__(self, key, value)
def __getitem__(self: Dict[CacheKey, CacheValue], key: CacheKey) -> CacheValue:
value: CacheValue = OrderedDict.__getitem__(self, key)
OrderedDict.__delitem__(self, key)
OrderedDict.__setitem__(self, key, value)
return value
| true | true |
f713dac0b4da63916336159fcfe479841e59456e | 1,950 | py | Python | hdfs_namenode/setup.py | byronwolfman/integrations-core | e89d31cf760cb0829a524929104eb400ec93ec2e | [
"BSD-3-Clause"
] | null | null | null | hdfs_namenode/setup.py | byronwolfman/integrations-core | e89d31cf760cb0829a524929104eb400ec93ec2e | [
"BSD-3-Clause"
] | null | null | null | hdfs_namenode/setup.py | byronwolfman/integrations-core | e89d31cf760cb0829a524929104eb400ec93ec2e | [
"BSD-3-Clause"
] | null | null | null | # (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
# Always prefer setuptools over distutils
from setuptools import setup
# To use a consistent encoding
from codecs import open
from os import path
HERE = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(HERE, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
# Get version info
ABOUT = {}
with open(path.join(HERE, "datadog_checks", "hdfs_namenode", "__about__.py")) as f:
exec(f.read(), ABOUT)
def get_requirements(fpath):
with open(path.join(HERE, fpath), encoding='utf-8') as f:
return f.readlines()
CHECKS_BASE_REQ = 'datadog_checks_base'
setup(
name='datadog-hdfs_namenode',
version=ABOUT['__version__'],
description='The HDFS Namenode check',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='datadog agent hdfs_namenode check',
# The project's main homepage.
url='https://github.com/DataDog/integrations-core',
# Author details
author='Datadog',
author_email='packages@datadoghq.com',
# License
license='BSD',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: System :: Monitoring',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
# The package we're going to ship
packages=['datadog_checks.hdfs_namenode'],
# Run-time dependencies
install_requires=[CHECKS_BASE_REQ],
tests_require=get_requirements('requirements-dev.txt'),
# Extra files to ship with the wheel package
include_package_data=True,
)
| 28.26087 | 83 | 0.691282 |
from setuptools import setup
from codecs import open
from os import path
HERE = path.abspath(path.dirname(__file__))
with open(path.join(HERE, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
ABOUT = {}
with open(path.join(HERE, "datadog_checks", "hdfs_namenode", "__about__.py")) as f:
exec(f.read(), ABOUT)
def get_requirements(fpath):
with open(path.join(HERE, fpath), encoding='utf-8') as f:
return f.readlines()
CHECKS_BASE_REQ = 'datadog_checks_base'
setup(
name='datadog-hdfs_namenode',
version=ABOUT['__version__'],
description='The HDFS Namenode check',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='datadog agent hdfs_namenode check',
url='https://github.com/DataDog/integrations-core',
# Author details
author='Datadog',
author_email='packages@datadoghq.com',
# License
license='BSD',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: System :: Monitoring',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
# The package we're going to ship
packages=['datadog_checks.hdfs_namenode'],
install_requires=[CHECKS_BASE_REQ],
tests_require=get_requirements('requirements-dev.txt'),
include_package_data=True,
)
| true | true |
f713dbe3cb55df4e874d577344ebd20f05b89914 | 5,140 | py | Python | temp/train_cnn.py | ChenLi0830/Clevo-Categorization-Service | 44b509786849a6dce610171d86e5da68ad748b4b | [
"Apache-2.0"
] | null | null | null | temp/train_cnn.py | ChenLi0830/Clevo-Categorization-Service | 44b509786849a6dce610171d86e5da68ad748b4b | [
"Apache-2.0"
] | null | null | null | temp/train_cnn.py | ChenLi0830/Clevo-Categorization-Service | 44b509786849a6dce610171d86e5da68ad748b4b | [
"Apache-2.0"
] | null | null | null | '''This example demonstrates the use of Convolution1D for text classification.
'''
from __future__ import print_function
import sys
sys.path.append('/Users/wangwei/anaconda2/envs/python3_keras/lib/python3.6/site-packages')
from keras.preprocessing import sequence
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from keras.layers import Embedding
from keras.layers import Conv1D, GlobalMaxPooling1D
from keras import backend as K
#os.chdir('/Users/wangwei/cuda_keras_projets/keras/examples/')
import six.moves.cPickle as pickle # for python 3
#import cPickle for python 2.7
import pandas as pd
import numpy as np
import jieba
# set parameters:
maxlen = 64 #11
batch_size = 5
embedding_dims = 300
filters = 50 # 100
kernel_size = 3
hidden_dims = 100
epochs = 10
def get_idx_from_sent(sent, word_idx_map, k=300):
"""
Transforms sentence into a list of indices.
"""
x = []
words = list(jieba.cut(sent, cut_all=False))
for word in words:
if word in word_idx_map:
x.append(word_idx_map[word])
return x
def make_idx_data_cv(revs, word_idx_map, cv, k=300):
"""
Transforms sentences into a 2-d matrix.
"""
train, test = [], []
train_y, test_y = [],[]
for rev in revs:
sent = get_idx_from_sent(rev['text'], word_idx_map, k)
if rev["split"]==cv:
test.append(sent)
test_y.append(rev["y"])
else:
train.append(sent)
train_y.append(rev["y"])
#train = np.array(train, dtype='int')
#test = np.array(test, dtype='int')
return [train, test, train_y, test_y]
if __name__=="__main__":
print('The script that is running is :', __file__)
print('Depending on the training datasets: \n maximum length of a sentence is :', maxlen)
######### Main code starts here ###########
print("loading data...")
x = pickle.load(open("mr_folder/mr.p","rb"), encoding='latin1')
revs, W, W2, word_idx_map, word_idx_map2, vocab = x[0], x[1], x[2], x[3], x[4],x[5]
print("data loaded!")
print("using: word2vec vectors")
tmp = pd.DataFrame(revs)
max_l = np.max(tmp["num_words"])
print("number of sentences: " , str(len(revs)))
print("vocab size: " , str(len(vocab)))
print("max sentence length: " + str(max_l))
max_features = len(vocab)#50
#### Make datasets
datasets = make_idx_data_cv(revs, word_idx_map2, 1, k=300)
x_train = datasets[0]
x_test = datasets[1]
y_train = datasets[2]
y_test = datasets[3]
print('Pad sequences (samples x time)')
x_train = sequence.pad_sequences(x_train, maxlen=maxlen)
x_test = sequence.pad_sequences(x_test, maxlen=maxlen)
print('x_train shape:', x_train.shape)
print('x_test shape:', x_test.shape)
############# modelling with CNN
import keras
num_classes = 9
# convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
print('lengh of y_train is :', y_train.shape[0])
print('Build model...')
K.clear_session()
model = Sequential()
# we start off with an efficient embedding layer which maps
# our vocab indices into embedding_dims dimensions
model.add(Embedding(max_features+1,
embedding_dims,
weights=[W],
input_length=maxlen,
trainable=False))
model.add(Dropout(0.2))
# we add a Convolution1D, which will learn filters
# word group filters of size filter_length:
model.add(Conv1D(filters,
kernel_size,
padding='valid',
activation='relu',
strides=1))
# we use max pooling:
model.add(GlobalMaxPooling1D())
# We add a vanilla hidden layer:
model.add(Dense(hidden_dims))
model.add(Dropout(0.2))
#model.add(Activation('relu'))
# We project onto a single unit output layer, and squash it with a sigmoid:
#model.add(Dense(1))
model.add(Activation('sigmoid'))
######################
model.add(Dropout(0.2))
model.add(Dense(num_classes, activation='softmax'))
# model.compile(loss=keras.losses.categorical_crossentropy,
# optimizer=keras.optimizers.Adadelta(),
# metrics=['accuracy'])
model.compile(optimizer='rmsprop',
loss='categorical_crossentropy',
metrics=['accuracy'])
model.fit(x_train, y_train,
batch_size=batch_size,
epochs=epochs,
verbose=1,
validation_data=(x_test, y_test))
score = model.evaluate(x_test, y_test, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
# serialize model to JSON
model_json = model.to_json()
with open("mr_folder/model.json", "w") as json_file:
json_file.write(model_json)
# serialize weights to HDF5
model.save_weights("mr_folder/model.h5")
print("Saved model to disk") | 29.54023 | 93 | 0.630739 |
from __future__ import print_function
import sys
sys.path.append('/Users/wangwei/anaconda2/envs/python3_keras/lib/python3.6/site-packages')
from keras.preprocessing import sequence
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from keras.layers import Embedding
from keras.layers import Conv1D, GlobalMaxPooling1D
from keras import backend as K
import six.moves.cPickle as pickle
import pandas as pd
import numpy as np
import jieba
maxlen = 64
batch_size = 5
embedding_dims = 300
filters = 50
kernel_size = 3
hidden_dims = 100
epochs = 10
def get_idx_from_sent(sent, word_idx_map, k=300):
x = []
words = list(jieba.cut(sent, cut_all=False))
for word in words:
if word in word_idx_map:
x.append(word_idx_map[word])
return x
def make_idx_data_cv(revs, word_idx_map, cv, k=300):
train, test = [], []
train_y, test_y = [],[]
for rev in revs:
sent = get_idx_from_sent(rev['text'], word_idx_map, k)
if rev["split"]==cv:
test.append(sent)
test_y.append(rev["y"])
else:
train.append(sent)
train_y.append(rev["y"])
return [train, test, train_y, test_y]
if __name__=="__main__":
print('The script that is running is :', __file__)
print('Depending on the training datasets: \n maximum length of a sentence is :', maxlen)
rint("vocab size: " , str(len(vocab)))
print("max sentence length: " + str(max_l))
max_features = len(vocab)
map2, 1, k=300)
x_train = datasets[0]
x_test = datasets[1]
y_train = datasets[2]
y_test = datasets[3]
print('Pad sequences (samples x time)')
x_train = sequence.pad_sequences(x_train, maxlen=maxlen)
x_test = sequence.pad_sequences(x_test, maxlen=maxlen)
print('x_train shape:', x_train.shape)
print('x_test shape:', x_test.shape)
model.add(Embedding(max_features+1,
embedding_dims,
weights=[W],
input_length=maxlen,
trainable=False))
model.add(Dropout(0.2))
model.add(Conv1D(filters,
kernel_size,
padding='valid',
activation='relu',
strides=1))
model.add(GlobalMaxPooling1D())
model.add(Dense(hidden_dims))
model.add(Dropout(0.2))
model.add(Activation('sigmoid'))
odel.fit(x_train, y_train,
batch_size=batch_size,
epochs=epochs,
verbose=1,
validation_data=(x_test, y_test))
score = model.evaluate(x_test, y_test, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
model_json = model.to_json()
with open("mr_folder/model.json", "w") as json_file:
json_file.write(model_json)
model.save_weights("mr_folder/model.h5")
print("Saved model to disk") | true | true |
f713dda96bfadd28ecfe9efd69a5b821b0f53e52 | 19,840 | py | Python | cogs/boards.py | wpmjones/donationbot | dd2913df09b61d4fe4738fe9df14ff8bc4ec6f95 | [
"MIT"
] | null | null | null | cogs/boards.py | wpmjones/donationbot | dd2913df09b61d4fe4738fe9df14ff8bc4ec6f95 | [
"MIT"
] | null | null | null | cogs/boards.py | wpmjones/donationbot | dd2913df09b61d4fe4738fe9df14ff8bc4ec6f95 | [
"MIT"
] | null | null | null | import asyncio
import asyncpg
import coc
import discord
import logging
import math
from collections import namedtuple
from datetime import datetime
from discord.ext import commands, tasks
from cogs.utils.db_objects import DatabaseMessage
from cogs.utils.formatters import CLYTable, get_render_type
from cogs.utils import checks
log = logging.getLogger(__name__)
MockPlayer = namedtuple('MockPlayer', 'clan name')
mock = MockPlayer('Unknown', 'Unknown')
class DonationBoard(commands.Cog):
"""Contains all DonationBoard Configurations.
"""
def __init__(self, bot):
self.bot = bot
self.clan_updates = []
self._to_be_deleted = set()
self.bot.coc.add_events(
self.on_clan_member_donation,
self.on_clan_member_received,
self.on_clan_member_trophies_change,
self.on_clan_member_join
)
self.bot.coc._clan_retry_interval = 60
self.bot.coc.start_updates('clan')
self._batch_lock = asyncio.Lock(loop=bot.loop)
self._data_batch = {}
self._clan_events = set()
self.bulk_insert_loop.add_exception_type(asyncpg.PostgresConnectionError)
self.bulk_insert_loop.start()
self.update_board_loops.add_exception_type(asyncpg.PostgresConnectionError, coc.ClashOfClansException)
self.update_board_loops.start()
def cog_unload(self):
self.bulk_insert_loop.cancel()
self.update_board_loops.cancel()
self.bot.coc.remove_events(
self.on_clan_member_donation,
self.on_clan_member_received,
self.on_clan_member_trophies_change,
self.on_clan_member_join
)
@tasks.loop(seconds=30.0)
async def bulk_insert_loop(self):
async with self._batch_lock:
await self.bulk_insert()
@tasks.loop(seconds=60.0)
async def update_board_loops(self):
async with self._batch_lock:
clan_tags = list(self._clan_events)
self._clan_events.clear()
query = """SELECT DISTINCT boards.channel_id
FROM boards
INNER JOIN clans
ON clans.guild_id = boards.guild_id
WHERE clans.clan_tag = ANY($1::TEXT[])
"""
fetch = await self.bot.pool.fetch(query, clan_tags)
for n in fetch:
try:
await self.update_board(n['channel_id'])
except:
pass
async def bulk_insert(self):
query = """UPDATE players SET donations = players.donations + x.donations,
received = players.received + x.received,
trophies = x.trophies
FROM(
SELECT x.player_tag, x.donations, x.received, x.trophies
FROM jsonb_to_recordset($1::jsonb)
AS x(player_tag TEXT,
donations INTEGER,
received INTEGER,
trophies INTEGER)
)
AS x
WHERE players.player_tag = x.player_tag
AND players.season_id=$2
"""
query2 = """UPDATE eventplayers SET donations = eventplayers.donations + x.donations,
received = eventplayers.received + x.received,
trophies = x.trophies
FROM(
SELECT x.player_tag, x.donations, x.received, x.trophies
FROM jsonb_to_recordset($1::jsonb)
AS x(player_tag TEXT,
donations INTEGER,
received INTEGER,
trophies INTEGER)
)
AS x
WHERE eventplayers.player_tag = x.player_tag
AND eventplayers.live = true
"""
if self._data_batch:
response = await self.bot.pool.execute(query, list(self._data_batch.values()),
await self.bot.seasonconfig.get_season_id())
log.debug(f'Registered donations/received to the database. Status Code {response}.')
response = await self.bot.pool.execute(query2, list(self._data_batch.values()))
log.debug(f'Registered donations/received to the events database. Status Code {response}.')
self._data_batch.clear()
@commands.Cog.listener()
async def on_guild_channel_delete(self, channel):
if not isinstance(channel, discord.TextChannel):
return
query = "DELETE FROM messages WHERE channel_id = $1;"
query2 = """UPDATE boards
SET channel_id = NULL,
toggle = False
WHERE channel_id = $1;
"""
await self.bot.pool.execute(query, channel.id)
await self.bot.pool.execute(query2, channel.id)
self.bot.utils.board_config.invalidate(self.bot.utils, channel.id)
@commands.Cog.listener()
async def on_raw_message_delete(self, payload):
config = await self.bot.utils.board_config(payload.channel_id)
if not config:
return
if config.channel_id != payload.channel_id:
return
if payload.message_id in self._to_be_deleted:
self._to_be_deleted.discard(payload.message_id)
return
self.bot.utils.get_message.invalidate(self.bot.utils, payload.message_id)
message = await self.safe_delete(message_id=payload.message_id, delete_message=False)
if message:
await self.new_board_message(self.bot.get_channel(payload.channel_id), config.type)
@commands.Cog.listener()
async def on_raw_bulk_message_delete(self, payload):
config = await self.bot.utils.board_config(payload.channel_id)
if not config:
return
if config.channel_id != payload.channel_id:
return
for n in payload.message_ids:
if n in self._to_be_deleted:
self._to_be_deleted.discard(n)
continue
self.bot.utils.get_message.invalidate(self, n)
message = await self.safe_delete(message_id=n, delete_message=False)
if message:
await self.new_board_message(self.bot.get_channel(payload.channel_id), config.type)
async def on_clan_member_donation(self, old_donations, new_donations, player, clan):
log.debug(f'Received on_clan_member_donation event for player {player} of clan {clan}')
if old_donations > new_donations:
donations = new_donations
else:
donations = new_donations - old_donations
async with self._batch_lock:
try:
self._data_batch[player.tag]['donations'] = donations
except KeyError:
self._data_batch[player.tag] = {
'player_tag': player.tag,
'donations': donations,
'received': 0,
'trophies': player.trophies
}
self._clan_events.add(clan.tag)
async def on_clan_member_received(self, old_received, new_received, player, clan):
log.debug(f'Received on_clan_member_received event for player {player} of clan {clan}')
if old_received > new_received:
received = new_received
else:
received = new_received - old_received
async with self._batch_lock:
try:
self._data_batch[player.tag]['received'] = received
except KeyError:
self._data_batch[player.tag] = {
'player_tag': player.tag,
'donations': 0,
'received': received,
'trophies': player.trophies
}
self._clan_events.add(clan.tag)
async def on_clan_member_trophies_change(self, _, new_trophies, player, clan):
log.debug(f'Received on_clan_member_trophy_change event for player {player} of clan {clan}.')
async with self._batch_lock:
try:
self._data_batch[player.tag]['trophies'] = new_trophies
except KeyError:
self._data_batch[player.tag] = {
'player_tag': player.tag,
'donations': 0,
'received': 0,
'trophies': new_trophies
}
self._clan_events.add(clan.tag)
async def on_clan_member_join(self, member, clan):
player = await self.bot.coc.get_player(member.tag)
player_query = """INSERT INTO players (
player_tag,
donations,
received,
trophies,
start_trophies,
season_id,
start_friend_in_need,
start_sharing_is_caring,
start_attacks,
start_defenses,
start_best_trophies,
start_update
)
VALUES ($1,$2,$3,$4,$4,$5,$6,$7,$8,$9,$10,True)
ON CONFLICT (player_tag, season_id)
DO NOTHING
"""
response = await self.bot.pool.execute(
player_query,
player.tag,
player.donations,
player.received,
player.trophies,
await self.bot.seasonconfig.get_season_id(),
player.achievements_dict['Friend in Need'].value,
player.achievements_dict['Sharing is caring'].value,
player.attack_wins,
player.defense_wins,
player.best_trophies
)
log.debug(f'New member {member} joined clan {clan}. Performed a query to insert them into players. '
f'Status Code: {response}')
query = """SELECT events.id
FROM events
INNER JOIN clans
ON clans.guild_id = events.guild_id
WHERE clans.clan_tag = $1
AND events.start <= now()
AND events.finish >= now()
"""
fetch = await self.bot.pool.fetch(query, clan.tag)
if not fetch:
return
event_query = """INSERT INTO eventplayers (
player_tag,
trophies,
event_id,
start_friend_in_need,
start_sharing_is_caring,
start_attacks,
start_defenses,
start_trophies,
start_best_trophies,
start_update,
live
)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, True, True)
ON CONFLICT (player_tag, event_id)
DO UPDATE
SET live=True
WHERE eventplayers.player_tag = $1
AND eventplayers.event_id = $2
"""
for n in fetch:
response = await self.bot.pool.execute(
event_query,
player.tag,
player.trophies,
n['id'],
player.achievements_dict['Friend in Need'].value,
player.achievements_dict['Sharing is caring'].value,
player.attack_wins,
player.defense_wins,
player.trophies,
player.best_trophies
)
log.debug(f'New member {member} joined clan {clan}. '
f'Performed a query to insert them into eventplayers. Status Code: {response}')
async def new_board_message(self, channel, board_type):
if not channel:
return
try:
new_msg = await channel.send('Placeholder')
except (discord.NotFound, discord.Forbidden):
return
query = "INSERT INTO messages (guild_id, message_id, channel_id) VALUES ($1, $2, $3)"
await self.bot.pool.execute(query, new_msg.guild.id, new_msg.id, new_msg.channel.id)
event_config = await self.bot.utils.event_config(channel.id)
if event_config:
await self.bot.background.remove_event_msg(event_config.id, channel, board_type)
await self.bot.background.new_event_message(event_config, channel.guild.id, channel.id, board_type)
return new_msg
async def safe_delete(self, message_id, delete_message=True):
query = "DELETE FROM messages WHERE message_id = $1 RETURNING id, guild_id, message_id, channel_id"
fetch = await self.bot.pool.fetchrow(query, message_id)
if not fetch:
return None
message = DatabaseMessage(bot=self.bot, record=fetch)
if not delete_message:
return message
self._to_be_deleted.add(message_id)
m = await message.get_message()
if not m:
return
await m.delete()
async def get_board_messages(self, channel_id, number_of_msg=None):
config = await self.bot.utils.board_config(channel_id)
if not (config.channel or config.toggle):
return
fetch = await config.messages()
messages = [await n.get_message() for n in fetch if await n.get_message()]
size_of = len(messages)
if not number_of_msg or size_of == number_of_msg:
return messages
if size_of > number_of_msg:
for n in messages[number_of_msg:]:
await self.safe_delete(n.id)
return messages[:number_of_msg]
if not config.channel:
return
for _ in range(number_of_msg - size_of):
m = await self.new_board_message(config.channel, config.type)
if not m:
return
messages.append(m)
return messages
async def get_top_players(self, players, board_type, sort_by, in_event, season_id=None):
season_id = season_id or await self.bot.seasonconfig.get_season_id()
if board_type == 'donation':
column_1 = 'donations'
column_2 = 'received'
sort_by = 'donations' if sort_by == 'donation' else sort_by
elif board_type == 'trophy':
column_1 = 'trophies'
column_2 = 'trophies - start_trophies'
sort_by = column_2 if sort_by == 'gain' else column_1
else:
return
# this should be ok since columns can only be a choice of 4 defined names
if in_event:
query = f"""SELECT player_tag, {column_1}, {column_2}
FROM eventplayers
WHERE player_tag=ANY($1::TEXT[])
AND live=true
ORDER BY {sort_by} DESC NULLS LAST
LIMIT 100;
"""
fetch = await self.bot.pool.fetch(query, [n.tag for n in players])
else:
query = f"""SELECT player_tag, {column_1}, {column_2}
FROM players
WHERE player_tag=ANY($1::TEXT[])
AND season_id=$2
ORDER BY {sort_by} DESC NULLS LAST
LIMIT 100;
"""
fetch = await self.bot.pool.fetch(query, [n.tag for n in players], season_id)
return fetch
async def update_board(self, channel_id):
config = await self.bot.utils.board_config(channel_id)
if not config:
return
if not config.toggle:
return
if not config.channel:
return
if config.in_event:
query = """SELECT DISTINCT clan_tag FROM clans WHERE guild_id=$1 AND in_event=$2"""
fetch = await self.bot.pool.fetch(query, config.guild_id, config.in_event)
else:
query = "SELECT DISTINCT clan_tag FROM clans WHERE guild_id=$1"
fetch = await self.bot.pool.fetch(query, config.guild_id)
clans = await self.bot.coc.get_clans((n[0] for n in fetch)).flatten()
players = []
for n in clans:
players.extend(p for p in n.itermembers)
try:
top_players = await self.get_top_players(players, config.type, config.sort_by, config.in_event)
except:
log.error(
f"{clans} channelid: {channel_id}, guildid: {config.guild_id},"
f" sort: {config.sort_by}, event: {config.in_event}, type: {config.type}"
)
return
players = {n.tag: n for n in players if n.tag in set(x['player_tag'] for x in top_players)}
message_count = math.ceil(len(top_players) / 20)
messages = await self.get_board_messages(channel_id, number_of_msg=message_count)
if not messages:
return
for i, v in enumerate(messages):
player_data = top_players[i*20:(i+1)*20]
table = CLYTable()
for x, y in enumerate(player_data):
index = i*20 + x
if config.render == 2:
table.add_row([index,
y[1],
players.get(y['player_tag'], mock).name])
else:
table.add_row([index,
y[1],
y[2],
players.get(y['player_tag'], mock).name])
render = get_render_type(config, table)
fmt = render()
e = discord.Embed(colour=self.get_colour(config.type, config.in_event),
description=fmt,
timestamp=datetime.utcnow()
)
e.set_author(name=f'Event in Progress!' if config.in_event
else config.title,
icon_url=config.icon_url or 'https://cdn.discordapp.com/'
'emojis/592028799768592405.png?v=1')
e.set_footer(text='Last Updated')
await v.edit(embed=e, content=None)
@staticmethod
def get_colour(board_type, in_event):
if board_type == 'donation':
if in_event:
return discord.Colour.gold()
return discord.Colour.blue()
if in_event:
return discord.Colour.purple()
return discord.Colour.green()
@commands.command(hidden=True)
@commands.is_owner()
async def forceboard(self, ctx, channel_id: int = None):
await self.update_board(channel_id or ctx.channel.id)
await ctx.confirm()
def setup(bot):
bot.add_cog(DonationBoard(bot))
| 39.055118 | 111 | 0.526563 | import asyncio
import asyncpg
import coc
import discord
import logging
import math
from collections import namedtuple
from datetime import datetime
from discord.ext import commands, tasks
from cogs.utils.db_objects import DatabaseMessage
from cogs.utils.formatters import CLYTable, get_render_type
from cogs.utils import checks
log = logging.getLogger(__name__)
MockPlayer = namedtuple('MockPlayer', 'clan name')
mock = MockPlayer('Unknown', 'Unknown')
class DonationBoard(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.clan_updates = []
self._to_be_deleted = set()
self.bot.coc.add_events(
self.on_clan_member_donation,
self.on_clan_member_received,
self.on_clan_member_trophies_change,
self.on_clan_member_join
)
self.bot.coc._clan_retry_interval = 60
self.bot.coc.start_updates('clan')
self._batch_lock = asyncio.Lock(loop=bot.loop)
self._data_batch = {}
self._clan_events = set()
self.bulk_insert_loop.add_exception_type(asyncpg.PostgresConnectionError)
self.bulk_insert_loop.start()
self.update_board_loops.add_exception_type(asyncpg.PostgresConnectionError, coc.ClashOfClansException)
self.update_board_loops.start()
def cog_unload(self):
self.bulk_insert_loop.cancel()
self.update_board_loops.cancel()
self.bot.coc.remove_events(
self.on_clan_member_donation,
self.on_clan_member_received,
self.on_clan_member_trophies_change,
self.on_clan_member_join
)
@tasks.loop(seconds=30.0)
async def bulk_insert_loop(self):
async with self._batch_lock:
await self.bulk_insert()
@tasks.loop(seconds=60.0)
async def update_board_loops(self):
async with self._batch_lock:
clan_tags = list(self._clan_events)
self._clan_events.clear()
query = """SELECT DISTINCT boards.channel_id
FROM boards
INNER JOIN clans
ON clans.guild_id = boards.guild_id
WHERE clans.clan_tag = ANY($1::TEXT[])
"""
fetch = await self.bot.pool.fetch(query, clan_tags)
for n in fetch:
try:
await self.update_board(n['channel_id'])
except:
pass
async def bulk_insert(self):
query = """UPDATE players SET donations = players.donations + x.donations,
received = players.received + x.received,
trophies = x.trophies
FROM(
SELECT x.player_tag, x.donations, x.received, x.trophies
FROM jsonb_to_recordset($1::jsonb)
AS x(player_tag TEXT,
donations INTEGER,
received INTEGER,
trophies INTEGER)
)
AS x
WHERE players.player_tag = x.player_tag
AND players.season_id=$2
"""
query2 = """UPDATE eventplayers SET donations = eventplayers.donations + x.donations,
received = eventplayers.received + x.received,
trophies = x.trophies
FROM(
SELECT x.player_tag, x.donations, x.received, x.trophies
FROM jsonb_to_recordset($1::jsonb)
AS x(player_tag TEXT,
donations INTEGER,
received INTEGER,
trophies INTEGER)
)
AS x
WHERE eventplayers.player_tag = x.player_tag
AND eventplayers.live = true
"""
if self._data_batch:
response = await self.bot.pool.execute(query, list(self._data_batch.values()),
await self.bot.seasonconfig.get_season_id())
log.debug(f'Registered donations/received to the database. Status Code {response}.')
response = await self.bot.pool.execute(query2, list(self._data_batch.values()))
log.debug(f'Registered donations/received to the events database. Status Code {response}.')
self._data_batch.clear()
@commands.Cog.listener()
async def on_guild_channel_delete(self, channel):
if not isinstance(channel, discord.TextChannel):
return
query = "DELETE FROM messages WHERE channel_id = $1;"
query2 = """UPDATE boards
SET channel_id = NULL,
toggle = False
WHERE channel_id = $1;
"""
await self.bot.pool.execute(query, channel.id)
await self.bot.pool.execute(query2, channel.id)
self.bot.utils.board_config.invalidate(self.bot.utils, channel.id)
@commands.Cog.listener()
async def on_raw_message_delete(self, payload):
config = await self.bot.utils.board_config(payload.channel_id)
if not config:
return
if config.channel_id != payload.channel_id:
return
if payload.message_id in self._to_be_deleted:
self._to_be_deleted.discard(payload.message_id)
return
self.bot.utils.get_message.invalidate(self.bot.utils, payload.message_id)
message = await self.safe_delete(message_id=payload.message_id, delete_message=False)
if message:
await self.new_board_message(self.bot.get_channel(payload.channel_id), config.type)
@commands.Cog.listener()
async def on_raw_bulk_message_delete(self, payload):
config = await self.bot.utils.board_config(payload.channel_id)
if not config:
return
if config.channel_id != payload.channel_id:
return
for n in payload.message_ids:
if n in self._to_be_deleted:
self._to_be_deleted.discard(n)
continue
self.bot.utils.get_message.invalidate(self, n)
message = await self.safe_delete(message_id=n, delete_message=False)
if message:
await self.new_board_message(self.bot.get_channel(payload.channel_id), config.type)
async def on_clan_member_donation(self, old_donations, new_donations, player, clan):
log.debug(f'Received on_clan_member_donation event for player {player} of clan {clan}')
if old_donations > new_donations:
donations = new_donations
else:
donations = new_donations - old_donations
async with self._batch_lock:
try:
self._data_batch[player.tag]['donations'] = donations
except KeyError:
self._data_batch[player.tag] = {
'player_tag': player.tag,
'donations': donations,
'received': 0,
'trophies': player.trophies
}
self._clan_events.add(clan.tag)
async def on_clan_member_received(self, old_received, new_received, player, clan):
log.debug(f'Received on_clan_member_received event for player {player} of clan {clan}')
if old_received > new_received:
received = new_received
else:
received = new_received - old_received
async with self._batch_lock:
try:
self._data_batch[player.tag]['received'] = received
except KeyError:
self._data_batch[player.tag] = {
'player_tag': player.tag,
'donations': 0,
'received': received,
'trophies': player.trophies
}
self._clan_events.add(clan.tag)
async def on_clan_member_trophies_change(self, _, new_trophies, player, clan):
log.debug(f'Received on_clan_member_trophy_change event for player {player} of clan {clan}.')
async with self._batch_lock:
try:
self._data_batch[player.tag]['trophies'] = new_trophies
except KeyError:
self._data_batch[player.tag] = {
'player_tag': player.tag,
'donations': 0,
'received': 0,
'trophies': new_trophies
}
self._clan_events.add(clan.tag)
async def on_clan_member_join(self, member, clan):
player = await self.bot.coc.get_player(member.tag)
player_query = """INSERT INTO players (
player_tag,
donations,
received,
trophies,
start_trophies,
season_id,
start_friend_in_need,
start_sharing_is_caring,
start_attacks,
start_defenses,
start_best_trophies,
start_update
)
VALUES ($1,$2,$3,$4,$4,$5,$6,$7,$8,$9,$10,True)
ON CONFLICT (player_tag, season_id)
DO NOTHING
"""
response = await self.bot.pool.execute(
player_query,
player.tag,
player.donations,
player.received,
player.trophies,
await self.bot.seasonconfig.get_season_id(),
player.achievements_dict['Friend in Need'].value,
player.achievements_dict['Sharing is caring'].value,
player.attack_wins,
player.defense_wins,
player.best_trophies
)
log.debug(f'New member {member} joined clan {clan}. Performed a query to insert them into players. '
f'Status Code: {response}')
query = """SELECT events.id
FROM events
INNER JOIN clans
ON clans.guild_id = events.guild_id
WHERE clans.clan_tag = $1
AND events.start <= now()
AND events.finish >= now()
"""
fetch = await self.bot.pool.fetch(query, clan.tag)
if not fetch:
return
event_query = """INSERT INTO eventplayers (
player_tag,
trophies,
event_id,
start_friend_in_need,
start_sharing_is_caring,
start_attacks,
start_defenses,
start_trophies,
start_best_trophies,
start_update,
live
)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, True, True)
ON CONFLICT (player_tag, event_id)
DO UPDATE
SET live=True
WHERE eventplayers.player_tag = $1
AND eventplayers.event_id = $2
"""
for n in fetch:
response = await self.bot.pool.execute(
event_query,
player.tag,
player.trophies,
n['id'],
player.achievements_dict['Friend in Need'].value,
player.achievements_dict['Sharing is caring'].value,
player.attack_wins,
player.defense_wins,
player.trophies,
player.best_trophies
)
log.debug(f'New member {member} joined clan {clan}. '
f'Performed a query to insert them into eventplayers. Status Code: {response}')
async def new_board_message(self, channel, board_type):
if not channel:
return
try:
new_msg = await channel.send('Placeholder')
except (discord.NotFound, discord.Forbidden):
return
query = "INSERT INTO messages (guild_id, message_id, channel_id) VALUES ($1, $2, $3)"
await self.bot.pool.execute(query, new_msg.guild.id, new_msg.id, new_msg.channel.id)
event_config = await self.bot.utils.event_config(channel.id)
if event_config:
await self.bot.background.remove_event_msg(event_config.id, channel, board_type)
await self.bot.background.new_event_message(event_config, channel.guild.id, channel.id, board_type)
return new_msg
async def safe_delete(self, message_id, delete_message=True):
query = "DELETE FROM messages WHERE message_id = $1 RETURNING id, guild_id, message_id, channel_id"
fetch = await self.bot.pool.fetchrow(query, message_id)
if not fetch:
return None
message = DatabaseMessage(bot=self.bot, record=fetch)
if not delete_message:
return message
self._to_be_deleted.add(message_id)
m = await message.get_message()
if not m:
return
await m.delete()
async def get_board_messages(self, channel_id, number_of_msg=None):
config = await self.bot.utils.board_config(channel_id)
if not (config.channel or config.toggle):
return
fetch = await config.messages()
messages = [await n.get_message() for n in fetch if await n.get_message()]
size_of = len(messages)
if not number_of_msg or size_of == number_of_msg:
return messages
if size_of > number_of_msg:
for n in messages[number_of_msg:]:
await self.safe_delete(n.id)
return messages[:number_of_msg]
if not config.channel:
return
for _ in range(number_of_msg - size_of):
m = await self.new_board_message(config.channel, config.type)
if not m:
return
messages.append(m)
return messages
async def get_top_players(self, players, board_type, sort_by, in_event, season_id=None):
season_id = season_id or await self.bot.seasonconfig.get_season_id()
if board_type == 'donation':
column_1 = 'donations'
column_2 = 'received'
sort_by = 'donations' if sort_by == 'donation' else sort_by
elif board_type == 'trophy':
column_1 = 'trophies'
column_2 = 'trophies - start_trophies'
sort_by = column_2 if sort_by == 'gain' else column_1
else:
return
if in_event:
query = f"""SELECT player_tag, {column_1}, {column_2}
FROM eventplayers
WHERE player_tag=ANY($1::TEXT[])
AND live=true
ORDER BY {sort_by} DESC NULLS LAST
LIMIT 100;
"""
fetch = await self.bot.pool.fetch(query, [n.tag for n in players])
else:
query = f"""SELECT player_tag, {column_1}, {column_2}
FROM players
WHERE player_tag=ANY($1::TEXT[])
AND season_id=$2
ORDER BY {sort_by} DESC NULLS LAST
LIMIT 100;
"""
fetch = await self.bot.pool.fetch(query, [n.tag for n in players], season_id)
return fetch
async def update_board(self, channel_id):
config = await self.bot.utils.board_config(channel_id)
if not config:
return
if not config.toggle:
return
if not config.channel:
return
if config.in_event:
query = """SELECT DISTINCT clan_tag FROM clans WHERE guild_id=$1 AND in_event=$2"""
fetch = await self.bot.pool.fetch(query, config.guild_id, config.in_event)
else:
query = "SELECT DISTINCT clan_tag FROM clans WHERE guild_id=$1"
fetch = await self.bot.pool.fetch(query, config.guild_id)
clans = await self.bot.coc.get_clans((n[0] for n in fetch)).flatten()
players = []
for n in clans:
players.extend(p for p in n.itermembers)
try:
top_players = await self.get_top_players(players, config.type, config.sort_by, config.in_event)
except:
log.error(
f"{clans} channelid: {channel_id}, guildid: {config.guild_id},"
f" sort: {config.sort_by}, event: {config.in_event}, type: {config.type}"
)
return
players = {n.tag: n for n in players if n.tag in set(x['player_tag'] for x in top_players)}
message_count = math.ceil(len(top_players) / 20)
messages = await self.get_board_messages(channel_id, number_of_msg=message_count)
if not messages:
return
for i, v in enumerate(messages):
player_data = top_players[i*20:(i+1)*20]
table = CLYTable()
for x, y in enumerate(player_data):
index = i*20 + x
if config.render == 2:
table.add_row([index,
y[1],
players.get(y['player_tag'], mock).name])
else:
table.add_row([index,
y[1],
y[2],
players.get(y['player_tag'], mock).name])
render = get_render_type(config, table)
fmt = render()
e = discord.Embed(colour=self.get_colour(config.type, config.in_event),
description=fmt,
timestamp=datetime.utcnow()
)
e.set_author(name=f'Event in Progress!' if config.in_event
else config.title,
icon_url=config.icon_url or 'https://cdn.discordapp.com/'
'emojis/592028799768592405.png?v=1')
e.set_footer(text='Last Updated')
await v.edit(embed=e, content=None)
@staticmethod
def get_colour(board_type, in_event):
if board_type == 'donation':
if in_event:
return discord.Colour.gold()
return discord.Colour.blue()
if in_event:
return discord.Colour.purple()
return discord.Colour.green()
@commands.command(hidden=True)
@commands.is_owner()
async def forceboard(self, ctx, channel_id: int = None):
await self.update_board(channel_id or ctx.channel.id)
await ctx.confirm()
def setup(bot):
bot.add_cog(DonationBoard(bot))
| true | true |
f713de87a046a2be2116ea80ff02e9b22a5d08de | 24,907 | py | Python | modules/s3/s3notify.py | sahana/eden-core | 6af086d0617e84483aa74317c897ecffbe232596 | [
"MIT"
] | 2 | 2018-04-06T08:18:21.000Z | 2021-09-17T11:45:00.000Z | modules/s3/s3notify.py | sahana/eden-core | 6af086d0617e84483aa74317c897ecffbe232596 | [
"MIT"
] | 2 | 2015-02-14T18:58:17.000Z | 2015-02-24T07:22:39.000Z | modules/s3/s3notify.py | sahana/eden-core | 6af086d0617e84483aa74317c897ecffbe232596 | [
"MIT"
] | 3 | 2015-02-11T10:31:26.000Z | 2021-09-17T11:46:29.000Z | # -*- coding: utf-8 -*-
""" S3 Notifications
@copyright: 2011-2021 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import datetime
import json
import os
import string
import sys
from io import StringIO
from urllib.parse import urlencode
from urllib import parse as urlparse
from urllib import request as urllib2
from urllib.request import urlopen
from urllib.error import HTTPError
from uuid import uuid4
from gluon import current, TABLE, THEAD, TBODY, TR, TD, TH, XML
from .s3datetime import s3_decode_iso_datetime, s3_encode_iso_datetime, s3_utc
from .s3utils import s3_str, s3_truncate, s3_unicode
# =============================================================================
class S3Notifications(object):
""" Framework to send notifications about subscribed events """
# -------------------------------------------------------------------------
@classmethod
def check_subscriptions(cls):
"""
Scheduler entry point, creates notification tasks for all
active subscriptions which (may) have updates.
"""
_debug = current.log.debug
now = datetime.datetime.utcnow()
_debug("S3Notifications.check_subscriptions(now=%s)" % now)
subscriptions = cls._subscriptions(now)
if subscriptions:
run_async = current.s3task.run_async
for row in subscriptions:
# Create asynchronous notification task.
row.update_record(locked = True)
run_async("notify_notify", args=[row.id])
message = "%s notifications scheduled." % len(subscriptions)
else:
message = "No notifications to schedule."
_debug(message)
return message
# -------------------------------------------------------------------------
@classmethod
def notify(cls, resource_id):
"""
Asynchronous task to notify a subscriber about updates,
runs a POST?format=msg request against the subscribed
controller which extracts the data and renders and sends
the notification message (see send()).
@param resource_id: the pr_subscription_resource record ID
"""
_debug = current.log.debug
_debug("S3Notifications.notify(resource_id=%s)" % resource_id)
db = current.db
s3db = current.s3db
stable = s3db.pr_subscription
rtable = db.pr_subscription_resource
ftable = s3db.pr_filter
# Extract the subscription data
join = stable.on(rtable.subscription_id == stable.id)
left = ftable.on(ftable.id == stable.filter_id)
# @todo: should not need rtable.resource here
row = db(rtable.id == resource_id).select(stable.id,
stable.pe_id,
stable.frequency,
stable.notify_on,
stable.method,
stable.email_format,
stable.attachment,
rtable.id,
rtable.resource,
rtable.url,
rtable.last_check_time,
ftable.query,
join = join,
left = left,
).first()
if not row:
return True
s = getattr(row, "pr_subscription")
r = getattr(row, "pr_subscription_resource")
f = getattr(row, "pr_filter")
# Create a temporary token to authorize the lookup request
auth_token = str(uuid4())
# Store the auth_token in the subscription record
r.update_record(auth_token = auth_token)
db.commit()
# Construct the send-URL
public_url = current.deployment_settings.get_base_public_url()
lookup_url = "%s/%s/%s" % (public_url,
current.request.application,
r.url.lstrip("/"))
# Break up the URL into its components
purl = list(urlparse.urlparse(lookup_url))
# Subscription parameters
# Date (must ensure we pass to REST as tz-aware)
last_check_time = s3_encode_iso_datetime(r.last_check_time)
query = {"subscription": auth_token, "format": "msg"}
if "upd" in s.notify_on:
query["~.modified_on__ge"] = "%sZ" % last_check_time
else:
query["~.created_on__ge"] = "%sZ" % last_check_time
# Filters
if f.query:
from .s3filter import S3FilterString
resource = s3db.resource(r.resource)
fstring = S3FilterString(resource, f.query)
for k, v in fstring.get_vars.items():
if v is not None:
if k in query:
value = query[k]
if type(value) is list:
value.append(v)
else:
query[k] = [value, v]
else:
query[k] = v
query_nice = s3_unicode(fstring.represent())
else:
query_nice = None
# Add subscription parameters and filters to the URL query, and
# put the URL back together
query = urlencode(query)
if purl[4]:
query = "&".join((purl[4], query))
page_url = urlparse.urlunparse([purl[0], # scheme
purl[1], # netloc
purl[2], # path
purl[3], # params
query, # query
purl[5], # fragment
])
# Serialize data for send (avoid second lookup in send)
data = json.dumps({"pe_id": s.pe_id,
"notify_on": s.notify_on,
"method": s.method,
"email_format": s.email_format,
"attachment": s.attachment,
"resource": r.resource,
"last_check_time": last_check_time,
"filter_query": query_nice,
"page_url": lookup_url,
"item_url": None,
})
# Send the request
_debug("Requesting %s" % page_url)
req = urllib2.Request(page_url, data=data.encode("utf-8"))
req.add_header("Content-Type", "application/json")
success = False
try:
response = json.loads(urlopen(req).read())
message = response["message"]
if response["status"] == "success":
success = True
except HTTPError as e:
message = ("HTTP %s: %s" % (e.code, e.read()))
except:
exc_info = sys.exc_info()[:2]
message = ("%s: %s" % (exc_info[0].__name__, exc_info[1]))
_debug(message)
# Update time stamps and unlock, invalidate auth token
intervals = s3db.pr_subscription_check_intervals
interval = datetime.timedelta(minutes=intervals.get(s.frequency, 0))
if success:
last_check_time = datetime.datetime.utcnow()
next_check_time = last_check_time + interval
r.update_record(auth_token = None,
locked = False,
last_check_time = last_check_time,
next_check_time = next_check_time,
)
else:
r.update_record(auth_token = None,
locked = False,
)
db.commit()
# Done
return message
# -------------------------------------------------------------------------
@classmethod
def send(cls, r, resource):
"""
Method to retrieve updates for a subscription, render the
notification message and send it - responds to POST?format=msg
requests to the respective resource.
@param r: the S3Request
@param resource: the S3Resource
"""
_debug = current.log.debug
_debug("S3Notifications.send()")
json_message = current.xml.json_message
# Read subscription data
source = r.body
source.seek(0)
data = source.read()
subscription = json.loads(data)
#_debug("Notify PE #%s by %s on %s of %s since %s" % \
# (subscription["pe_id"],
# str(subscription["method"]),
# str(subscription["notify_on"]),
# subscription["resource"],
# subscription["last_check_time"],
# ))
# Check notification settings
notify_on = subscription["notify_on"]
methods = subscription["method"]
if not notify_on or not methods:
return json_message(message = "No notifications configured "
"for this subscription")
# Authorization (pe_id must not be None)
pe_id = subscription["pe_id"]
if not pe_id:
r.unauthorised()
# Fields to extract
fields = resource.list_fields(key="notify_fields")
if "created_on" not in fields:
fields.append("created_on")
# Extract the data
data = resource.select(fields,
represent = True,
raw_data = True)
rows = data["rows"]
# How many records do we have?
numrows = len(rows)
if not numrows:
return json_message(message = "No records found")
#_debug("%s rows:" % numrows)
# Prepare meta-data
get_config = resource.get_config
settings = current.deployment_settings
page_url = subscription["page_url"]
crud_strings = current.response.s3.crud_strings.get(resource.tablename)
if crud_strings:
resource_name = crud_strings.title_list
else:
resource_name = string.capwords(resource.name, "_")
last_check_time = s3_decode_iso_datetime(subscription["last_check_time"])
email_format = subscription["email_format"]
if not email_format:
email_format = settings.get_msg_notify_email_format()
filter_query = subscription.get("filter_query")
meta_data = {"systemname": settings.get_system_name(),
"systemname_short": settings.get_system_name_short(),
"resource": resource_name,
"page_url": page_url,
"notify_on": notify_on,
"last_check_time": last_check_time,
"filter_query": filter_query,
"total_rows": numrows,
}
# Render contents for the message template(s)
renderer = get_config("notify_renderer")
if not renderer:
renderer = settings.get_msg_notify_renderer()
if not renderer:
renderer = cls._render
contents = {}
if email_format == "html" and "EMAIL" in methods:
contents["html"] = renderer(resource, data, meta_data, "html")
contents["default"] = contents["html"]
if email_format != "html" or "EMAIL" not in methods or len(methods) > 1:
contents["text"] = renderer(resource, data, meta_data, "text")
contents["default"] = contents["text"]
# Subject line
subject = get_config("notify_subject")
if not subject:
subject = settings.get_msg_notify_subject()
if callable(subject):
subject = subject(resource, data, meta_data)
from string import Template
subject = Template(subject).safe_substitute(S = "%(systemname)s",
s = "%(systemname_short)s",
r = "%(resource)s")
subject = subject % meta_data
# Attachment
attachment = subscription.get("attachment", False)
document_ids = None
if attachment:
attachment_fnc = settings.get_msg_notify_attachment()
if attachment_fnc:
document_ids = attachment_fnc(resource, data, meta_data)
# **data for send_by_pe_id function in s3msg
send_data = {}
send_data_fnc = settings.get_msg_notify_send_data()
if callable(send_data_fnc):
send_data = send_data_fnc(resource, data, meta_data)
# Helper function to find message templates from a priority list
join = lambda *f: os.path.join(current.request.folder, *f)
def get_msg_template(path, filenames):
for fn in filenames:
filepath = join(path, fn)
if os.path.exists(filepath):
try:
return open(filepath, "rb")
except:
pass
return None
# Render and send the message(s)
templates = settings.get_template()
if templates != "default" and not isinstance(templates, (tuple, list)):
templates = (templates,)
prefix = resource.get_config("notify_template", "notify")
send = current.msg.send_by_pe_id
success = False
errors = []
for method in methods:
error = None
# Get the message template
msg_template = None
filenames = ["%s_%s.html" % (prefix, method.lower())]
if method == "EMAIL" and email_format:
filenames.insert(0, "%s_email_%s.html" % (prefix, email_format))
if templates != "default":
for template in templates[::-1]:
path = join("modules", "templates", template, "views", "msg")
msg_template = get_msg_template(path, filenames)
if msg_template is not None:
break
if msg_template is None:
path = join("views", "msg")
msg_template = get_msg_template(path, filenames)
if msg_template is None:
msg_template = StringIO(s3_str(current.T("New updates are available.")))
# Select contents format
if method == "EMAIL" and email_format == "html":
output = contents["html"]
else:
output = contents["text"]
# Render the message
try:
message = current.response.render(msg_template, output)
except:
exc_info = sys.exc_info()[:2]
error = ("%s: %s" % (exc_info[0].__name__, exc_info[1]))
errors.append(error)
continue
finally:
if hasattr(msg_template, "close"):
msg_template.close()
if not message:
continue
# Send the message
#_debug("Sending message per %s" % method)
#_debug(message)
try:
sent = send(pe_id,
# RFC 2822
subject = s3_truncate(subject, 78),
message = message,
contact_method = method,
system_generated = True,
document_ids = document_ids,
**send_data
)
except:
exc_info = sys.exc_info()[:2]
error = ("%s: %s" % (exc_info[0].__name__, exc_info[1]))
sent = False
if sent:
# Successful if at least one notification went out
success = True
else:
if not error:
error = current.session.error
if isinstance(error, list):
error = "/".join(error)
if error:
errors.append(error)
# Done
if errors:
message = ", ".join(errors)
else:
message = "Success"
return json_message(success = success,
statuscode = 200 if success else 403,
message = message)
# -------------------------------------------------------------------------
@classmethod
def _subscriptions(cls, now):
"""
Helper method to find all subscriptions which need to be
notified now.
@param now: current datetime (UTC)
@return: joined Rows pr_subscription/pr_subscription_resource,
or None if no due subscriptions could be found
@todo: take notify_on into account when checking
"""
db = current.db
s3db = current.s3db
stable = s3db.pr_subscription
rtable = db.pr_subscription_resource
# Find all resources with due subscriptions
next_check = rtable.next_check_time
locked_deleted = (rtable.locked != True) & \
(rtable.deleted == False)
query = ((next_check == None) |
(next_check <= now)) & \
locked_deleted
tname = rtable.resource
last_check = rtable.last_check_time
mtime = last_check.min()
rows = db(query).select(tname,
mtime,
groupby = tname,
)
if not rows:
return None
# Select those which have updates
resources = set()
radd = resources.add
for row in rows:
tablename = row[tname]
table = s3db.table(tablename)
if not table or not "modified_on" in table.fields:
# Can't notify updates in resources without modified_on
continue
modified_on = table.modified_on
msince = row[mtime]
if msince is None:
query = (table.id > 0)
else:
query = (modified_on >= msince)
update = db(query).select(modified_on,
orderby = ~(modified_on),
limitby = (0, 1)
).first()
if update:
radd((tablename, update.modified_on))
if not resources:
return None
# Get all active subscriptions to these resources which
# may need to be notified now:
join = rtable.on((rtable.subscription_id == stable.id) & \
locked_deleted)
query = None
for rname, modified_on in resources:
q = (tname == rname) & \
((last_check == None) |
(last_check <= modified_on))
if query is None:
query = q
else:
query |= q
query = (stable.frequency != "never") & \
(stable.deleted == False) & \
((next_check == None) | \
(next_check <= now)) & \
query
return db(query).select(rtable.id,
join = join,
)
# -------------------------------------------------------------------------
@classmethod
def _render(cls, resource, data, meta_data, format=None):
"""
Method to pre-render the contents for the message template
@param resource: the S3Resource
@param data: the data returned from S3Resource.select
@param meta_data: the meta data for the notification
@param format: the contents format ("text" or "html")
"""
created_on_selector = resource.prefix_selector("created_on")
created_on_colname = None
notify_on = meta_data["notify_on"]
last_check_time = meta_data["last_check_time"]
rows = data["rows"]
rfields = data["rfields"]
output = {}
new, upd = [], []
if format == "html":
# Pre-formatted HTML
colnames = []
new_headers = TR()
mod_headers = TR()
for rfield in rfields:
if rfield.selector == created_on_selector:
created_on_colname = rfield.colname
elif rfield.ftype != "id":
colnames.append(rfield.colname)
label = rfield.label
new_headers.append(TH(label))
mod_headers.append(TH(label))
for row in rows:
append_record = upd.append
if created_on_colname:
try:
created_on = row["_row"][created_on_colname]
except (KeyError, AttributeError):
pass
else:
if s3_utc(created_on) >= last_check_time:
append_record = new.append
tr = TR([TD(XML(row[colname])) for colname in colnames])
append_record(tr)
if "new" in notify_on and len(new):
output["new"] = len(new)
output["new_records"] = TABLE(THEAD(new_headers), TBODY(new))
else:
output["new"] = None
if "upd" in notify_on and len(upd):
output["upd"] = len(upd)
output["upd_records"] = TABLE(THEAD(new_headers), TBODY(upd))
else:
output["upd"] = None
else:
# Standard text format
labels = []
append = labels.append
for rfield in rfields:
if rfield.selector == created_on_selector:
created_on_colname = rfield.colname
elif rfield.ftype != "id":
append((rfield.colname, rfield.label))
for row in rows:
append_record = upd.append
if created_on_colname:
try:
created_on = row["_row"][created_on_colname]
except (KeyError, AttributeError):
pass
else:
if s3_utc(created_on) >= last_check_time:
append_record = new.append
record = []
append_column = record.append
for colname, label in labels:
append_column((label, row[colname]))
append_record(record)
if "new" in notify_on and len(new):
output["new"] = len(new)
output["new_records"] = new
else:
output["new"] = None
if "upd" in notify_on and len(upd):
output["upd"] = len(upd)
output["upd_records"] = upd
else:
output["upd"] = None
output.update(meta_data)
return output
# END =========================================================================
| 37.680787 | 88 | 0.500462 |
import datetime
import json
import os
import string
import sys
from io import StringIO
from urllib.parse import urlencode
from urllib import parse as urlparse
from urllib import request as urllib2
from urllib.request import urlopen
from urllib.error import HTTPError
from uuid import uuid4
from gluon import current, TABLE, THEAD, TBODY, TR, TD, TH, XML
from .s3datetime import s3_decode_iso_datetime, s3_encode_iso_datetime, s3_utc
from .s3utils import s3_str, s3_truncate, s3_unicode
class S3Notifications(object):
@classmethod
def check_subscriptions(cls):
_debug = current.log.debug
now = datetime.datetime.utcnow()
_debug("S3Notifications.check_subscriptions(now=%s)" % now)
subscriptions = cls._subscriptions(now)
if subscriptions:
run_async = current.s3task.run_async
for row in subscriptions:
row.update_record(locked = True)
run_async("notify_notify", args=[row.id])
message = "%s notifications scheduled." % len(subscriptions)
else:
message = "No notifications to schedule."
_debug(message)
return message
@classmethod
def notify(cls, resource_id):
_debug = current.log.debug
_debug("S3Notifications.notify(resource_id=%s)" % resource_id)
db = current.db
s3db = current.s3db
stable = s3db.pr_subscription
rtable = db.pr_subscription_resource
ftable = s3db.pr_filter
join = stable.on(rtable.subscription_id == stable.id)
left = ftable.on(ftable.id == stable.filter_id)
row = db(rtable.id == resource_id).select(stable.id,
stable.pe_id,
stable.frequency,
stable.notify_on,
stable.method,
stable.email_format,
stable.attachment,
rtable.id,
rtable.resource,
rtable.url,
rtable.last_check_time,
ftable.query,
join = join,
left = left,
).first()
if not row:
return True
s = getattr(row, "pr_subscription")
r = getattr(row, "pr_subscription_resource")
f = getattr(row, "pr_filter")
auth_token = str(uuid4())
r.update_record(auth_token = auth_token)
db.commit()
public_url = current.deployment_settings.get_base_public_url()
lookup_url = "%s/%s/%s" % (public_url,
current.request.application,
r.url.lstrip("/"))
purl = list(urlparse.urlparse(lookup_url))
last_check_time = s3_encode_iso_datetime(r.last_check_time)
query = {"subscription": auth_token, "format": "msg"}
if "upd" in s.notify_on:
query["~.modified_on__ge"] = "%sZ" % last_check_time
else:
query["~.created_on__ge"] = "%sZ" % last_check_time
if f.query:
from .s3filter import S3FilterString
resource = s3db.resource(r.resource)
fstring = S3FilterString(resource, f.query)
for k, v in fstring.get_vars.items():
if v is not None:
if k in query:
value = query[k]
if type(value) is list:
value.append(v)
else:
query[k] = [value, v]
else:
query[k] = v
query_nice = s3_unicode(fstring.represent())
else:
query_nice = None
query = urlencode(query)
if purl[4]:
query = "&".join((purl[4], query))
page_url = urlparse.urlunparse([purl[0],
purl[1],
purl[2],
purl[3],
query,
purl[5],
])
data = json.dumps({"pe_id": s.pe_id,
"notify_on": s.notify_on,
"method": s.method,
"email_format": s.email_format,
"attachment": s.attachment,
"resource": r.resource,
"last_check_time": last_check_time,
"filter_query": query_nice,
"page_url": lookup_url,
"item_url": None,
})
_debug("Requesting %s" % page_url)
req = urllib2.Request(page_url, data=data.encode("utf-8"))
req.add_header("Content-Type", "application/json")
success = False
try:
response = json.loads(urlopen(req).read())
message = response["message"]
if response["status"] == "success":
success = True
except HTTPError as e:
message = ("HTTP %s: %s" % (e.code, e.read()))
except:
exc_info = sys.exc_info()[:2]
message = ("%s: %s" % (exc_info[0].__name__, exc_info[1]))
_debug(message)
intervals = s3db.pr_subscription_check_intervals
interval = datetime.timedelta(minutes=intervals.get(s.frequency, 0))
if success:
last_check_time = datetime.datetime.utcnow()
next_check_time = last_check_time + interval
r.update_record(auth_token = None,
locked = False,
last_check_time = last_check_time,
next_check_time = next_check_time,
)
else:
r.update_record(auth_token = None,
locked = False,
)
db.commit()
return message
@classmethod
def send(cls, r, resource):
_debug = current.log.debug
_debug("S3Notifications.send()")
json_message = current.xml.json_message
source = r.body
source.seek(0)
data = source.read()
subscription = json.loads(data)
notify_on = subscription["notify_on"]
methods = subscription["method"]
if not notify_on or not methods:
return json_message(message = "No notifications configured "
"for this subscription")
pe_id = subscription["pe_id"]
if not pe_id:
r.unauthorised()
fields = resource.list_fields(key="notify_fields")
if "created_on" not in fields:
fields.append("created_on")
data = resource.select(fields,
represent = True,
raw_data = True)
rows = data["rows"]
numrows = len(rows)
if not numrows:
return json_message(message = "No records found")
get_config = resource.get_config
settings = current.deployment_settings
page_url = subscription["page_url"]
crud_strings = current.response.s3.crud_strings.get(resource.tablename)
if crud_strings:
resource_name = crud_strings.title_list
else:
resource_name = string.capwords(resource.name, "_")
last_check_time = s3_decode_iso_datetime(subscription["last_check_time"])
email_format = subscription["email_format"]
if not email_format:
email_format = settings.get_msg_notify_email_format()
filter_query = subscription.get("filter_query")
meta_data = {"systemname": settings.get_system_name(),
"systemname_short": settings.get_system_name_short(),
"resource": resource_name,
"page_url": page_url,
"notify_on": notify_on,
"last_check_time": last_check_time,
"filter_query": filter_query,
"total_rows": numrows,
}
renderer = get_config("notify_renderer")
if not renderer:
renderer = settings.get_msg_notify_renderer()
if not renderer:
renderer = cls._render
contents = {}
if email_format == "html" and "EMAIL" in methods:
contents["html"] = renderer(resource, data, meta_data, "html")
contents["default"] = contents["html"]
if email_format != "html" or "EMAIL" not in methods or len(methods) > 1:
contents["text"] = renderer(resource, data, meta_data, "text")
contents["default"] = contents["text"]
subject = get_config("notify_subject")
if not subject:
subject = settings.get_msg_notify_subject()
if callable(subject):
subject = subject(resource, data, meta_data)
from string import Template
subject = Template(subject).safe_substitute(S = "%(systemname)s",
s = "%(systemname_short)s",
r = "%(resource)s")
subject = subject % meta_data
attachment = subscription.get("attachment", False)
document_ids = None
if attachment:
attachment_fnc = settings.get_msg_notify_attachment()
if attachment_fnc:
document_ids = attachment_fnc(resource, data, meta_data)
send_data = {}
send_data_fnc = settings.get_msg_notify_send_data()
if callable(send_data_fnc):
send_data = send_data_fnc(resource, data, meta_data)
join = lambda *f: os.path.join(current.request.folder, *f)
def get_msg_template(path, filenames):
for fn in filenames:
filepath = join(path, fn)
if os.path.exists(filepath):
try:
return open(filepath, "rb")
except:
pass
return None
templates = settings.get_template()
if templates != "default" and not isinstance(templates, (tuple, list)):
templates = (templates,)
prefix = resource.get_config("notify_template", "notify")
send = current.msg.send_by_pe_id
success = False
errors = []
for method in methods:
error = None
msg_template = None
filenames = ["%s_%s.html" % (prefix, method.lower())]
if method == "EMAIL" and email_format:
filenames.insert(0, "%s_email_%s.html" % (prefix, email_format))
if templates != "default":
for template in templates[::-1]:
path = join("modules", "templates", template, "views", "msg")
msg_template = get_msg_template(path, filenames)
if msg_template is not None:
break
if msg_template is None:
path = join("views", "msg")
msg_template = get_msg_template(path, filenames)
if msg_template is None:
msg_template = StringIO(s3_str(current.T("New updates are available.")))
if method == "EMAIL" and email_format == "html":
output = contents["html"]
else:
output = contents["text"]
try:
message = current.response.render(msg_template, output)
except:
exc_info = sys.exc_info()[:2]
error = ("%s: %s" % (exc_info[0].__name__, exc_info[1]))
errors.append(error)
continue
finally:
if hasattr(msg_template, "close"):
msg_template.close()
if not message:
continue
try:
sent = send(pe_id,
subject = s3_truncate(subject, 78),
message = message,
contact_method = method,
system_generated = True,
document_ids = document_ids,
**send_data
)
except:
exc_info = sys.exc_info()[:2]
error = ("%s: %s" % (exc_info[0].__name__, exc_info[1]))
sent = False
if sent:
success = True
else:
if not error:
error = current.session.error
if isinstance(error, list):
error = "/".join(error)
if error:
errors.append(error)
if errors:
message = ", ".join(errors)
else:
message = "Success"
return json_message(success = success,
statuscode = 200 if success else 403,
message = message)
@classmethod
def _subscriptions(cls, now):
db = current.db
s3db = current.s3db
stable = s3db.pr_subscription
rtable = db.pr_subscription_resource
next_check = rtable.next_check_time
locked_deleted = (rtable.locked != True) & \
(rtable.deleted == False)
query = ((next_check == None) |
(next_check <= now)) & \
locked_deleted
tname = rtable.resource
last_check = rtable.last_check_time
mtime = last_check.min()
rows = db(query).select(tname,
mtime,
groupby = tname,
)
if not rows:
return None
resources = set()
radd = resources.add
for row in rows:
tablename = row[tname]
table = s3db.table(tablename)
if not table or not "modified_on" in table.fields:
continue
modified_on = table.modified_on
msince = row[mtime]
if msince is None:
query = (table.id > 0)
else:
query = (modified_on >= msince)
update = db(query).select(modified_on,
orderby = ~(modified_on),
limitby = (0, 1)
).first()
if update:
radd((tablename, update.modified_on))
if not resources:
return None
# Get all active subscriptions to these resources which
# may need to be notified now:
join = rtable.on((rtable.subscription_id == stable.id) & \
locked_deleted)
query = None
for rname, modified_on in resources:
q = (tname == rname) & \
((last_check == None) |
(last_check <= modified_on))
if query is None:
query = q
else:
query |= q
query = (stable.frequency != "never") & \
(stable.deleted == False) & \
((next_check == None) | \
(next_check <= now)) & \
query
return db(query).select(rtable.id,
join = join,
)
# -------------------------------------------------------------------------
@classmethod
def _render(cls, resource, data, meta_data, format=None):
created_on_selector = resource.prefix_selector("created_on")
created_on_colname = None
notify_on = meta_data["notify_on"]
last_check_time = meta_data["last_check_time"]
rows = data["rows"]
rfields = data["rfields"]
output = {}
new, upd = [], []
if format == "html":
# Pre-formatted HTML
colnames = []
new_headers = TR()
mod_headers = TR()
for rfield in rfields:
if rfield.selector == created_on_selector:
created_on_colname = rfield.colname
elif rfield.ftype != "id":
colnames.append(rfield.colname)
label = rfield.label
new_headers.append(TH(label))
mod_headers.append(TH(label))
for row in rows:
append_record = upd.append
if created_on_colname:
try:
created_on = row["_row"][created_on_colname]
except (KeyError, AttributeError):
pass
else:
if s3_utc(created_on) >= last_check_time:
append_record = new.append
tr = TR([TD(XML(row[colname])) for colname in colnames])
append_record(tr)
if "new" in notify_on and len(new):
output["new"] = len(new)
output["new_records"] = TABLE(THEAD(new_headers), TBODY(new))
else:
output["new"] = None
if "upd" in notify_on and len(upd):
output["upd"] = len(upd)
output["upd_records"] = TABLE(THEAD(new_headers), TBODY(upd))
else:
output["upd"] = None
else:
# Standard text format
labels = []
append = labels.append
for rfield in rfields:
if rfield.selector == created_on_selector:
created_on_colname = rfield.colname
elif rfield.ftype != "id":
append((rfield.colname, rfield.label))
for row in rows:
append_record = upd.append
if created_on_colname:
try:
created_on = row["_row"][created_on_colname]
except (KeyError, AttributeError):
pass
else:
if s3_utc(created_on) >= last_check_time:
append_record = new.append
record = []
append_column = record.append
for colname, label in labels:
append_column((label, row[colname]))
append_record(record)
if "new" in notify_on and len(new):
output["new"] = len(new)
output["new_records"] = new
else:
output["new"] = None
if "upd" in notify_on and len(upd):
output["upd"] = len(upd)
output["upd_records"] = upd
else:
output["upd"] = None
output.update(meta_data)
return output
# END =========================================================================
| true | true |
f713de958e85322d46d14b9bb7f2720bd65ba18e | 577 | py | Python | tests/test_int.py | stephend017/pyparcel | addef8f5da9fce8d0204fb264f7735bd607c9619 | [
"MIT"
] | null | null | null | tests/test_int.py | stephend017/pyparcel | addef8f5da9fce8d0204fb264f7735bd607c9619 | [
"MIT"
] | null | null | null | tests/test_int.py | stephend017/pyparcel | addef8f5da9fce8d0204fb264f7735bd607c9619 | [
"MIT"
] | null | null | null | import struct
import unittest
from typing import List
import pyparcel
DATA: List[int] = [
-1 << 31,
-1000,
-57,
-26,
-20,
-5,
-2,
-1,
0,
1,
2,
5,
20,
57,
1000,
(1 << 31) - 1,
]
class MyTestCase(unittest.TestCase):
def test_pack(self):
for i in DATA:
self.assertEqual(pyparcel.pack(i), struct.pack("i", i))
def test_pack_unpack(self):
for i in DATA:
self.assertEqual(i, pyparcel.unpack(pyparcel.pack(i), int()))
if __name__ == "__main__":
unittest.main()
| 14.794872 | 73 | 0.535529 | import struct
import unittest
from typing import List
import pyparcel
DATA: List[int] = [
-1 << 31,
-1000,
-57,
-26,
-20,
-5,
-2,
-1,
0,
1,
2,
5,
20,
57,
1000,
(1 << 31) - 1,
]
class MyTestCase(unittest.TestCase):
def test_pack(self):
for i in DATA:
self.assertEqual(pyparcel.pack(i), struct.pack("i", i))
def test_pack_unpack(self):
for i in DATA:
self.assertEqual(i, pyparcel.unpack(pyparcel.pack(i), int()))
if __name__ == "__main__":
unittest.main()
| true | true |
f713df1edf0e8bc7dd726bc88abbe1a02611bb76 | 2,984 | py | Python | adamp/sgdp.py | MOONJOOYOUNG/AdamP | 64a63106a2ac62bcbe90627f2a83ec1b488f3416 | [
"MIT"
] | 1 | 2021-07-09T21:00:57.000Z | 2021-07-09T21:00:57.000Z | adamp/sgdp.py | MOONJOOYOUNG/AdamP | 64a63106a2ac62bcbe90627f2a83ec1b488f3416 | [
"MIT"
] | null | null | null | adamp/sgdp.py | MOONJOOYOUNG/AdamP | 64a63106a2ac62bcbe90627f2a83ec1b488f3416 | [
"MIT"
] | null | null | null | """
AdamP
Copyright (c) 2020-present NAVER Corp.
MIT license
"""
import torch
import torch.nn as nn
from torch.optim.optimizer import Optimizer, required
import math
class SGDP(Optimizer):
def __init__(self, params, lr=required, momentum=0, dampening=0,
weight_decay=0, nesterov=False, eps=1e-8, delta=0.1, wd_ratio=0.1):
defaults = dict(lr=lr, momentum=momentum, dampening=dampening, weight_decay=weight_decay,
nesterov=nesterov, eps=eps, delta=delta, wd_ratio=wd_ratio)
super(SGDP, self).__init__(params, defaults)
def _channel_view(self, x):
return x.view(x.size(0), -1)
def _layer_view(self, x):
return x.view(1, -1)
def _cosine_similarity(self, x, y, eps, view_func):
x = view_func(x)
y = view_func(y)
x_norm = x.norm(dim=1).add_(eps)
y_norm = y.norm(dim=1).add_(eps)
dot = (x * y).sum(dim=1)
return dot.abs() / x_norm / y_norm
def _projection(self, p, grad, perturb, delta, wd_ratio, eps):
wd = 1
expand_size = [-1] + [1] * (len(p.shape) - 1)
for view_func in [self._channel_view, self._layer_view]:
cosine_sim = self._cosine_similarity(grad, p.data, eps, view_func)
if cosine_sim.max() < delta / math.sqrt(view_func(p.data).size(1)):
p_n = p.data / view_func(p.data).norm(dim=1).view(expand_size).add_(eps)
perturb -= p_n * view_func(p_n * perturb).sum(dim=1).view(expand_size)
wd = wd_ratio
return perturb, wd
return perturb, wd
def step(self, closure=None):
loss = None
if closure is not None:
loss = closure()
for group in self.param_groups:
weight_decay = group['weight_decay']
momentum = group['momentum']
dampening = group['dampening']
nesterov = group['nesterov']
for p in group['params']:
if p.grad is None:
continue
grad = p.grad.data
state = self.state[p]
# State initialization
if len(state) == 0:
state['momentum'] = torch.zeros_like(p.data)
# SGD
buf = state['momentum']
buf.mul_(momentum).add_(1 - dampening, grad)
if nesterov:
d_p = grad + momentum * buf
else:
d_p = buf
# Projection
wd_ratio = 1
if len(p.shape) > 1:
d_p, wd_ratio = self._projection(p, grad, d_p, group['delta'], group['wd_ratio'], group['eps'])
# Weight decay
if weight_decay != 0:
p.data.mul_(1 - group['lr'] * group['weight_decay'] * wd_ratio / (1-momentum))
# Step
p.data.add_(-group['lr'], d_p)
return loss
| 32.086022 | 115 | 0.529491 |
import torch
import torch.nn as nn
from torch.optim.optimizer import Optimizer, required
import math
class SGDP(Optimizer):
def __init__(self, params, lr=required, momentum=0, dampening=0,
weight_decay=0, nesterov=False, eps=1e-8, delta=0.1, wd_ratio=0.1):
defaults = dict(lr=lr, momentum=momentum, dampening=dampening, weight_decay=weight_decay,
nesterov=nesterov, eps=eps, delta=delta, wd_ratio=wd_ratio)
super(SGDP, self).__init__(params, defaults)
def _channel_view(self, x):
return x.view(x.size(0), -1)
def _layer_view(self, x):
return x.view(1, -1)
def _cosine_similarity(self, x, y, eps, view_func):
x = view_func(x)
y = view_func(y)
x_norm = x.norm(dim=1).add_(eps)
y_norm = y.norm(dim=1).add_(eps)
dot = (x * y).sum(dim=1)
return dot.abs() / x_norm / y_norm
def _projection(self, p, grad, perturb, delta, wd_ratio, eps):
wd = 1
expand_size = [-1] + [1] * (len(p.shape) - 1)
for view_func in [self._channel_view, self._layer_view]:
cosine_sim = self._cosine_similarity(grad, p.data, eps, view_func)
if cosine_sim.max() < delta / math.sqrt(view_func(p.data).size(1)):
p_n = p.data / view_func(p.data).norm(dim=1).view(expand_size).add_(eps)
perturb -= p_n * view_func(p_n * perturb).sum(dim=1).view(expand_size)
wd = wd_ratio
return perturb, wd
return perturb, wd
def step(self, closure=None):
loss = None
if closure is not None:
loss = closure()
for group in self.param_groups:
weight_decay = group['weight_decay']
momentum = group['momentum']
dampening = group['dampening']
nesterov = group['nesterov']
for p in group['params']:
if p.grad is None:
continue
grad = p.grad.data
state = self.state[p]
if len(state) == 0:
state['momentum'] = torch.zeros_like(p.data)
buf = state['momentum']
buf.mul_(momentum).add_(1 - dampening, grad)
if nesterov:
d_p = grad + momentum * buf
else:
d_p = buf
wd_ratio = 1
if len(p.shape) > 1:
d_p, wd_ratio = self._projection(p, grad, d_p, group['delta'], group['wd_ratio'], group['eps'])
if weight_decay != 0:
p.data.mul_(1 - group['lr'] * group['weight_decay'] * wd_ratio / (1-momentum))
p.data.add_(-group['lr'], d_p)
return loss
| true | true |
f713dfb9ff4a0320ae7a2df342bfd77d79da60df | 1,422 | py | Python | api/api.py | PranavPusarla/WeightTracker | 16a4241b44535555d924ec39d1969b6e13c4d0a9 | [
"MIT"
] | null | null | null | api/api.py | PranavPusarla/WeightTracker | 16a4241b44535555d924ec39d1969b6e13c4d0a9 | [
"MIT"
] | null | null | null | api/api.py | PranavPusarla/WeightTracker | 16a4241b44535555d924ec39d1969b6e13c4d0a9 | [
"MIT"
] | null | null | null | from flask import Flask, jsonify, send_file, url_for
import data_functions
from datetime import datetime
app = Flask(__name__) #__name__ is a special variable in python that creates an instance of the web app
@app.route("/", methods =['GET'])
def hello():
return ("Hello World")
@app.route("/hello")
def helloo():
return "Hello Not World"
@app.route("/total_loss/<first_name>/<last_name>", methods = ['GET'])
def total_loss(first_name, last_name):
return str(data_functions.total_loss(first_name, last_name))
@app.route("/total_weights_graph/<first_name>/<last_name>", methods = ['GET'])
def total_weights(first_name, last_name):
data_functions.get_total_weights(first_name, last_name)
filename = '/Users/Pranav/PycharmProjects/WeightTracker'
return "http://127.0.0.1:5000" + url_for('static', filename="total_weights_graph.png")
@app.route("/weekly_weights_graph/<first_name>/<last_name>", methods = ['GET'])
def week_weights(first_name, last_name):
data_functions.get_week_weights(first_name, last_name)
return "http://127.0.0.1:5000" + url_for('static', filename="week_weights_graph.png")
@app.route("/add_weight/<first_name>/<last_name>/<weight>", methods = ['GET','POST'])
def add_weight(first_name, last_name, weight):
data_functions.add_weight(first_name, last_name, int(weight), datetime.now().timestamp())
return
if __name__ == '__main__':
app.run(debug=True)
| 37.421053 | 103 | 0.734177 | from flask import Flask, jsonify, send_file, url_for
import data_functions
from datetime import datetime
app = Flask(__name__)
@app.route("/", methods =['GET'])
def hello():
return ("Hello World")
@app.route("/hello")
def helloo():
return "Hello Not World"
@app.route("/total_loss/<first_name>/<last_name>", methods = ['GET'])
def total_loss(first_name, last_name):
return str(data_functions.total_loss(first_name, last_name))
@app.route("/total_weights_graph/<first_name>/<last_name>", methods = ['GET'])
def total_weights(first_name, last_name):
data_functions.get_total_weights(first_name, last_name)
filename = '/Users/Pranav/PycharmProjects/WeightTracker'
return "http://127.0.0.1:5000" + url_for('static', filename="total_weights_graph.png")
@app.route("/weekly_weights_graph/<first_name>/<last_name>", methods = ['GET'])
def week_weights(first_name, last_name):
data_functions.get_week_weights(first_name, last_name)
return "http://127.0.0.1:5000" + url_for('static', filename="week_weights_graph.png")
@app.route("/add_weight/<first_name>/<last_name>/<weight>", methods = ['GET','POST'])
def add_weight(first_name, last_name, weight):
data_functions.add_weight(first_name, last_name, int(weight), datetime.now().timestamp())
return
if __name__ == '__main__':
app.run(debug=True)
| true | true |
f713e15ef2d53b7068d6ee22edb42b05f24f1d54 | 337 | py | Python | torch_nlp_utils/common/checks.py | Nemexur/torch_data_utils | c3949f29ffb0b206ddee15dd8b83a34654ff11f3 | [
"Apache-2.0"
] | 2 | 2020-06-14T15:00:25.000Z | 2020-06-15T06:03:28.000Z | torch_nlp_utils/common/checks.py | Nemexur/torch-nlp-utils | c3949f29ffb0b206ddee15dd8b83a34654ff11f3 | [
"Apache-2.0"
] | null | null | null | torch_nlp_utils/common/checks.py | Nemexur/torch-nlp-utils | c3949f29ffb0b206ddee15dd8b83a34654ff11f3 | [
"Apache-2.0"
] | 1 | 2020-06-15T06:03:12.000Z | 2020-06-15T06:03:12.000Z | class ConfigurationError(Exception):
"""
The exception raised by any object when it's misconfigured
(e.g. missing properties, invalid properties, unknown properties).
"""
def __init__(self, message):
super().__init__()
self.message = message
def __str__(self):
return repr(self.message)
| 25.923077 | 70 | 0.658754 | class ConfigurationError(Exception):
def __init__(self, message):
super().__init__()
self.message = message
def __str__(self):
return repr(self.message)
| true | true |
f713e2f59198bac6d20cc010ae1e11de776dd8f8 | 2,655 | py | Python | databuilder/setup.py | abhishek-ch/amundsen | 0988547cd4e551488001f8327dd5db67198d9060 | [
"Apache-2.0"
] | null | null | null | databuilder/setup.py | abhishek-ch/amundsen | 0988547cd4e551488001f8327dd5db67198d9060 | [
"Apache-2.0"
] | null | null | null | databuilder/setup.py | abhishek-ch/amundsen | 0988547cd4e551488001f8327dd5db67198d9060 | [
"Apache-2.0"
] | 1 | 2021-07-19T11:30:55.000Z | 2021-07-19T11:30:55.000Z | # Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
from setuptools import find_packages, setup
__version__ = '4.3.1'
requirements = [
"neo4j-driver>=1.7.2,<4.0",
"pytz>=2018.4",
"statsd>=3.2.1",
"retrying>=1.3.3",
"requests>=2.23.0,<3.0",
"elasticsearch>=6.2.0,<7.0",
"pyhocon>=0.3.42",
"unidecode",
"Jinja2>=2.10.0,<2.12",
"pandas>=0.21.0,<1.2.0",
"amundsen-rds>=0.0.4"
]
kafka = ['confluent-kafka==1.0.0']
cassandra = ['cassandra-driver==3.20.1']
glue = ['boto3==1.10.1']
snowflake = [
'snowflake-connector-python',
'snowflake-sqlalchemy'
]
athena = ['PyAthena[SQLAlchemy]>=1.0.0, <2.0.0']
# Python API client for google
# License: Apache Software License
# Upstream url: https://github.com/googleapis/google-api-python-client
bigquery = [
'google-api-python-client>=1.6.0, <2.0.0dev',
'google-auth-httplib2>=0.0.1',
'google-auth>=1.0.0, <2.0.0dev'
]
jsonpath = ['jsonpath_rw==1.4.0']
db2 = [
'ibm_db==3.0.1',
'ibm-db-sa-py3==0.3.1-1'
]
dremio = [
'pyodbc==4.0.30'
]
druid = [
'pydruid'
]
spark = [
'pyspark == 3.0.1'
]
neptune = [
'amundsen-gremlin>=0.0.9',
'Flask==1.0.2',
'gremlinpython==3.4.3',
'requests-aws4auth==0.9',
'typing-extensions==3.7.4',
'overrides==2.5',
'boto3==1.10.1'
]
feast = [
'feast==0.8.0'
]
atlas = [
'pyatlasclient==1.1.2'
]
rds = [
'sqlalchemy>=1.3.6,<1.4',
'mysqlclient>=1.3.6,<3'
]
all_deps = requirements + kafka + cassandra + glue + snowflake + athena + \
bigquery + jsonpath + db2 + dremio + druid + spark + feast + neptune + rds
setup(
name='amundsen-databuilder',
version=__version__,
description='Amundsen Data builder',
url='https://www.github.com/amundsen-io/amundsendatabuilder',
maintainer='Amundsen TSC',
maintainer_email='amundsen-tsc@lists.lfai.foundation',
packages=find_packages(exclude=['tests*']),
dependency_links=[],
install_requires=requirements,
python_requires='>=3.6',
extras_require={
'all': all_deps,
'kafka': kafka, # To use with Kafka source extractor
'cassandra': cassandra,
'glue': glue,
'snowflake': snowflake,
'athena': athena,
'bigquery': bigquery,
'jsonpath': jsonpath,
'db2': db2,
'dremio': dremio,
'druid': druid,
'neptune': neptune,
'delta': spark,
'feast': feast,
'atlas': atlas,
'rds': rds
},
classifiers=[
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
)
| 21.24 | 78 | 0.583804 |
from setuptools import find_packages, setup
__version__ = '4.3.1'
requirements = [
"neo4j-driver>=1.7.2,<4.0",
"pytz>=2018.4",
"statsd>=3.2.1",
"retrying>=1.3.3",
"requests>=2.23.0,<3.0",
"elasticsearch>=6.2.0,<7.0",
"pyhocon>=0.3.42",
"unidecode",
"Jinja2>=2.10.0,<2.12",
"pandas>=0.21.0,<1.2.0",
"amundsen-rds>=0.0.4"
]
kafka = ['confluent-kafka==1.0.0']
cassandra = ['cassandra-driver==3.20.1']
glue = ['boto3==1.10.1']
snowflake = [
'snowflake-connector-python',
'snowflake-sqlalchemy'
]
athena = ['PyAthena[SQLAlchemy]>=1.0.0, <2.0.0']
bigquery = [
'google-api-python-client>=1.6.0, <2.0.0dev',
'google-auth-httplib2>=0.0.1',
'google-auth>=1.0.0, <2.0.0dev'
]
jsonpath = ['jsonpath_rw==1.4.0']
db2 = [
'ibm_db==3.0.1',
'ibm-db-sa-py3==0.3.1-1'
]
dremio = [
'pyodbc==4.0.30'
]
druid = [
'pydruid'
]
spark = [
'pyspark == 3.0.1'
]
neptune = [
'amundsen-gremlin>=0.0.9',
'Flask==1.0.2',
'gremlinpython==3.4.3',
'requests-aws4auth==0.9',
'typing-extensions==3.7.4',
'overrides==2.5',
'boto3==1.10.1'
]
feast = [
'feast==0.8.0'
]
atlas = [
'pyatlasclient==1.1.2'
]
rds = [
'sqlalchemy>=1.3.6,<1.4',
'mysqlclient>=1.3.6,<3'
]
all_deps = requirements + kafka + cassandra + glue + snowflake + athena + \
bigquery + jsonpath + db2 + dremio + druid + spark + feast + neptune + rds
setup(
name='amundsen-databuilder',
version=__version__,
description='Amundsen Data builder',
url='https://www.github.com/amundsen-io/amundsendatabuilder',
maintainer='Amundsen TSC',
maintainer_email='amundsen-tsc@lists.lfai.foundation',
packages=find_packages(exclude=['tests*']),
dependency_links=[],
install_requires=requirements,
python_requires='>=3.6',
extras_require={
'all': all_deps,
'kafka': kafka,
'cassandra': cassandra,
'glue': glue,
'snowflake': snowflake,
'athena': athena,
'bigquery': bigquery,
'jsonpath': jsonpath,
'db2': db2,
'dremio': dremio,
'druid': druid,
'neptune': neptune,
'delta': spark,
'feast': feast,
'atlas': atlas,
'rds': rds
},
classifiers=[
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
)
| true | true |
f713e34b34f9d2fa0ac1d2261cc79b95c3f4b50f | 27,503 | py | Python | performer/fast_attention/jax/fast_attention.py | deepneuralmachine/google-research | d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231 | [
"Apache-2.0"
] | 7 | 2021-06-15T05:54:29.000Z | 2022-02-21T06:57:06.000Z | performer/fast_attention/jax/fast_attention.py | deepneuralmachine/google-research | d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231 | [
"Apache-2.0"
] | 12 | 2021-08-25T16:15:31.000Z | 2022-02-10T05:10:37.000Z | performer/fast_attention/jax/fast_attention.py | deepneuralmachine/google-research | d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231 | [
"Apache-2.0"
] | 5 | 2021-11-25T07:40:17.000Z | 2022-03-22T11:13:39.000Z | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Core Fast Attention Module for Flax.
Implementation of the approximate fast softmax and generalized
attention mechanism leveraging structured random feature maps [RFM] techniques
and low rank decomposition of the attention matrix.
"""
# pylint: disable=invalid-name, missing-function-docstring, line-too-long
import abc
from collections.abc import Iterable # pylint: disable=g-importing-member
import functools
from absl import logging
import gin
import jax
from jax import lax
from jax import random
import jax.numpy as jnp
import numpy as onp
# Nonlinear mappings encoding different attention kernels.
gin.external_configurable(jnp.cos, 'jcos')
gin.external_configurable(jnp.sin, 'jsin')
gin.external_configurable(jnp.tanh, 'jtanh')
gin.external_configurable(jax.nn.sigmoid, 'jsigmoid')
gin.external_configurable(
lambda x: jax.nn.gelu(x, approximate=False), 'jgelu'
) # Needs to be exact, although might be slower. See https://github.com/google/jax/issues/4428.
gin.external_configurable(lambda x: x * x * (x > 0.0), 'jrequ')
gin.external_configurable(jnp.exp, 'jexp')
gin.external_configurable(lambda x: x, 'jidentity')
gin.external_configurable(
lambda x: (jnp.exp(x)) * (x <= 0.0) + (x + 1.0) * (x > 0.0), 'jshiftedelu'
) # Nonlinearity used in "Transformers are RNNs: Fast Autoregressive Transformers with Linear Attention" (https://arxiv.org/abs/2006.16236).
def nonnegative_softmax_kernel_feature_creator(data,
projection_matrix,
attention_dims_t,
batch_dims_t,
precision,
is_query,
normalize_data=True,
eps=0.0001):
"""Constructs nonnegative kernel features for fast softmax attention.
Args:
data: input for which features are computes
projection_matrix: random matrix used to compute features
attention_dims_t: tuple of attention dimensions
batch_dims_t: tuple of batch dimensions
precision: precision parameter
is_query: predicate indicating whether input data corresponds to queries or
keys
normalize_data: predicate indicating whether data should be normalized,
eps: numerical stabilizer.
Returns:
Random features for fast softmax attention.
"""
if normalize_data:
# We have e^{qk^T/sqrt{d}} = e^{q_norm k_norm^T}, where
# w_norm = w * data_normalizer for w in {q,k}.
data_normalizer = 1.0 / (jnp.sqrt(jnp.sqrt(data.shape[-1])))
else:
data_normalizer = 1.0
ratio = 1.0 / jnp.sqrt(projection_matrix.shape[0])
data_mod_shape = data.shape[0:len(batch_dims_t)] + projection_matrix.shape
data_thick_random_matrix = jnp.zeros(data_mod_shape) + projection_matrix
data_dash = lax.dot_general(
data_normalizer * data,
data_thick_random_matrix,
(((data.ndim - 1,), (data_thick_random_matrix.ndim - 1,)),
(batch_dims_t, batch_dims_t)),
precision=precision)
diag_data = jnp.square(data)
diag_data = jnp.sum(diag_data, axis=data.ndim - 1)
diag_data = (diag_data / 2.0) * data_normalizer * data_normalizer
diag_data = jnp.expand_dims(diag_data, axis=data.ndim - 1)
last_dims_t = (len(data_dash.shape) - 1,)
if is_query:
data_dash = ratio * (
jnp.exp(data_dash - diag_data -
jnp.max(data_dash, axis=last_dims_t, keepdims=True)) + eps)
else:
data_dash = ratio * (
jnp.exp(data_dash - diag_data - jnp.max(
data_dash, axis=last_dims_t + attention_dims_t, keepdims=True)) +
eps)
return data_dash
def sincos_softmax_kernel_feature_creator(data,
projection_matrix,
attention_dims_t,
batch_dims_t,
precision,
normalize_data=True):
"""Constructs kernel sin-cos features for fast softmax attention.
Args:
data: input for which features are computes
projection_matrix: random matrix used to compute features
attention_dims_t: tuple of attention dimensions
batch_dims_t: tuple of batch dimensions
precision: precision parameter
normalize_data: predicate indicating whether data should be normalized.
Returns:
Random features for fast softmax attention.
"""
if normalize_data:
# We have: exp(qk^T/sqrt{d}) = exp(|q|^2/2sqrt{d}) * exp(|k|^2/2sqrt{d}) *
# exp(-(|q*c-k*c|^2)/2), where c = 1.0 / sqrt{sqrt{d}}.
data_normalizer = 1.0 / (jnp.sqrt(jnp.sqrt(data.shape[-1])))
else:
data_normalizer = 1.0
ratio = 1.0 / jnp.sqrt(projection_matrix.shape[0])
data_mod_shape = data.shape[0:len(batch_dims_t)] + projection_matrix.shape
data_thick_random_matrix = jnp.zeros(data_mod_shape) + projection_matrix
data_dash = lax.dot_general(
data_normalizer * data,
data_thick_random_matrix,
(((data.ndim - 1,), (data_thick_random_matrix.ndim - 1,)),
(batch_dims_t, batch_dims_t)),
precision=precision)
data_dash_cos = ratio * jnp.cos(data_dash)
data_dash_sin = ratio * jnp.sin(data_dash)
data_dash = jnp.concatenate((data_dash_cos, data_dash_sin), axis=-1)
# Constructing D_data and data^{'}
diag_data = jnp.square(data)
diag_data = jnp.sum(diag_data, axis=data.ndim - 1)
diag_data = (diag_data / 2.0) * data_normalizer * data_normalizer
diag_data = jnp.expand_dims(diag_data, axis=data.ndim - 1)
# Additional renormalization for numerical stability
data_renormalizer = jnp.max(diag_data, attention_dims_t, keepdims=True)
diag_data -= data_renormalizer
diag_data = jnp.exp(diag_data)
data_prime = data_dash * diag_data
return data_prime
def generalized_kernel_feature_creator(data, projection_matrix, batch_dims_t,
precision, kernel_fn, kernel_epsilon,
normalize_data):
"""Constructs kernel features for fast generalized attention.
Args:
data: input for which features are computes
projection_matrix: matrix used to compute features
batch_dims_t: tuple of batch dimensions
precision: precision parameter
kernel_fn: kernel function used
kernel_epsilon: additive positive term added to every feature for numerical
stability
normalize_data: predicate indicating whether data should be normalized.
Returns:
Random features for fast generalized attention.
"""
if normalize_data:
data_normalizer = 1.0 / (jnp.sqrt(jnp.sqrt(data.shape[-1])))
else:
data_normalizer = 1.0
if projection_matrix is None:
return kernel_fn(data_normalizer * data) + kernel_epsilon
else:
data_mod_shape = data.shape[0:len(batch_dims_t)] + projection_matrix.shape
data_thick_random_matrix = jnp.zeros(data_mod_shape) + projection_matrix
data_dash = lax.dot_general(
data_normalizer * data,
data_thick_random_matrix,
(((data.ndim - 1,), (data_thick_random_matrix.ndim - 1,)),
(batch_dims_t, batch_dims_t)),
precision=precision)
data_prime = kernel_fn(data_dash) + kernel_epsilon
return data_prime
@gin.configurable
def make_fast_softmax_attention(qkv_dim,
renormalize_attention=True,
numerical_stabilizer=0.000001,
nb_features=256,
ortho_features=True,
ortho_scaling=0.0,
redraw_features=True,
unidirectional=False,
nonnegative_features=True,
lax_scan_unroll=1):
"""Construct a fast softmax attention method."""
logging.info(
'Fast softmax attention: %s features and orthogonal=%s, renormalize=%s',
nb_features, ortho_features, renormalize_attention)
if ortho_features:
matrix_creator = functools.partial(
GaussianOrthogonalRandomMatrix,
nb_features,
qkv_dim,
scaling=ortho_scaling)
else:
matrix_creator = functools.partial(GaussianUnstructuredRandomMatrix,
nb_features, qkv_dim)
if nonnegative_features:
def kernel_feature_creator(data,
projection_matrix,
attention_dims_t,
batch_dims_t,
precision,
is_query,
normalize_data=True):
return nonnegative_softmax_kernel_feature_creator(
data, projection_matrix, attention_dims_t, batch_dims_t, precision,
is_query, normalize_data, numerical_stabilizer)
else:
def kernel_feature_creator(data,
projection_matrix,
attention_dims_t,
batch_dims_t,
precision,
is_query,
normalize_data=True):
del is_query
return sincos_softmax_kernel_feature_creator(data, projection_matrix,
attention_dims_t,
batch_dims_t, precision,
normalize_data)
attention_fn = FastAttentionviaLowRankDecomposition(
matrix_creator,
kernel_feature_creator,
renormalize_attention=renormalize_attention,
numerical_stabilizer=numerical_stabilizer,
redraw_features=redraw_features,
unidirectional=unidirectional,
lax_scan_unroll=lax_scan_unroll).dot_product_attention
return attention_fn
@gin.configurable
def make_fast_generalized_attention(qkv_dim,
renormalize_attention=True,
numerical_stabilizer=0.0,
nb_features=256,
features_type='deterministic',
kernel_fn=jax.nn.relu,
kernel_epsilon=0.001,
redraw_features=False,
unidirectional=False,
lax_scan_unroll=1):
"""Construct a fast generalized attention menthod."""
logging.info('Fast generalized attention.: %s features and renormalize=%s',
nb_features, renormalize_attention)
if features_type == 'ortho':
matrix_creator = functools.partial(
GaussianOrthogonalRandomMatrix, nb_features, qkv_dim, scaling=False)
elif features_type == 'iid':
matrix_creator = functools.partial(GaussianUnstructuredRandomMatrix,
nb_features, qkv_dim)
elif features_type == 'deterministic':
matrix_creator = None
else:
raise ValueError('Unknown feature value type')
def kernel_feature_creator(data,
projection_matrix,
attention_dims_t,
batch_dims_t,
precision,
is_query,
normalize_data=False):
del attention_dims_t
del is_query
return generalized_kernel_feature_creator(data, projection_matrix,
batch_dims_t, precision,
kernel_fn, kernel_epsilon,
normalize_data)
attention_fn = FastAttentionviaLowRankDecomposition(
matrix_creator,
kernel_feature_creator,
renormalize_attention=renormalize_attention,
numerical_stabilizer=numerical_stabilizer,
redraw_features=redraw_features,
unidirectional=unidirectional,
lax_scan_unroll=lax_scan_unroll).dot_product_attention
return attention_fn
class RandomMatrix(object):
r"""Abstract class providing a method for constructing 2D random arrays.
Class is responsible for constructing 2D random arrays.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_2d_array(self):
raise NotImplementedError('Abstract method')
class GaussianUnstructuredRandomMatrix(RandomMatrix):
def __init__(self, nb_rows, nb_columns, key):
self.nb_rows = nb_rows
self.nb_columns = nb_columns
self.key = key
def get_2d_array(self):
return random.normal(self.key, (self.nb_rows, self.nb_columns))
class GaussianOrthogonalRandomMatrix(RandomMatrix):
r"""Class providing a method to create Gaussian orthogonal matrix.
Class is responsible for constructing 2D Gaussian orthogonal arrays.
"""
def __init__(self, nb_rows, nb_columns, key, scaling=0):
self.nb_rows = nb_rows
self.nb_columns = nb_columns
self.key = key
self.scaling = scaling
def get_2d_array(self):
nb_full_blocks = int(self.nb_rows / self.nb_columns)
block_list = []
rng = self.key
for _ in range(nb_full_blocks):
rng, rng_input = jax.random.split(rng)
unstructured_block = random.normal(rng_input,
(self.nb_columns, self.nb_columns))
q, _ = jnp.linalg.qr(unstructured_block)
q = jnp.transpose(q)
block_list.append(q)
remaining_rows = self.nb_rows - nb_full_blocks * self.nb_columns
if remaining_rows > 0:
rng, rng_input = jax.random.split(rng)
unstructured_block = random.normal(rng_input,
(self.nb_columns, self.nb_columns))
q, _ = jnp.linalg.qr(unstructured_block)
q = jnp.transpose(q)
block_list.append(q[0:remaining_rows])
final_matrix = jnp.vstack(block_list)
if self.scaling == 0:
multiplier = jnp.linalg.norm(
random.normal(self.key, (self.nb_rows, self.nb_columns)), axis=1)
elif self.scaling == 1:
multiplier = jnp.sqrt(float(self.nb_columns)) * jnp.ones((self.nb_rows))
else:
raise ValueError('Scaling must be one of {0, 1}. Was %s' % self._scaling)
return jnp.matmul(jnp.diag(multiplier), final_matrix)
class FastAttention(object):
r"""Abstract class providing a method for fast attention.
Class is responsible for providing a method <dot_product_attention> for fast
approximate attention.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def dot_product_attention(self,
query,
key,
value,
dtype=jnp.float32,
bias=None,
axis=None,
broadcast_dropout=True,
dropout_rng=None,
dropout_rate=0.,
deterministic=False,
precision=None):
"""Computes dot-product attention given query, key, and value.
This is the core function for applying fast approximate dot-product
attention. It calculates the attention weights given query and key and
combines the values using the attention weights. This function supports
multi-dimensional inputs.
Args:
query: queries for calculating attention with shape of [batch_size, dim1,
dim2, ..., dimN, num_heads, mem_channels].
key: keys for calculating attention with shape of [batch_size, dim1, dim2,
..., dimN, num_heads, mem_channels].
value: values to be used in attention with shape of [batch_size, dim1,
dim2,..., dimN, num_heads, value_channels].
dtype: the dtype of the computation (default: float32)
bias: bias for the attention weights. This can be used for incorporating
autoregressive mask, padding mask, proximity bias.
axis: axises over which the attention is applied.
broadcast_dropout: bool: use a broadcasted dropout along batch dims.
dropout_rng: JAX PRNGKey: to be used for dropout.
dropout_rate: dropout rate.
deterministic: bool, deterministic or not (to apply dropout).
precision: numerical precision of the computation see `jax.lax.Precision`
for details.
Returns:
Output of shape [bs, dim1, dim2, ..., dimN,, num_heads, value_channels].
"""
raise NotImplementedError('Abstract method')
def _numerator(z_slice_shape, precision, unroll=1):
def fwd(qs, ks, vs):
def body(p, qkv):
(q, k, v) = qkv
p += jnp.einsum('...m,...d->...md', k, v, precision=precision)
X_slice = jnp.einsum('...m,...md->...d', q, p, precision=precision)
return p, X_slice
init_value = jnp.zeros(z_slice_shape)
p, W = lax.scan(body, init_value, (qs, ks, vs), unroll=unroll)
return W, (p, qs, ks, vs)
def bwd(pqkv, W_ct):
def body(carry, qkv_xct):
p, p_ct = carry
q, k, v, x_ct = qkv_xct
q_ct = jnp.einsum('...d,...md->...m', x_ct, p, precision=precision)
p_ct += jnp.einsum('...d,...m->...md', x_ct, q, precision=precision)
k_ct = jnp.einsum('...md,...d->...m', p_ct, v, precision=precision)
v_ct = jnp.einsum('...md,...m->...d', p_ct, k, precision=precision)
p -= jnp.einsum('...m,...d->...md', k, v, precision=precision)
return (p, p_ct), (q_ct, k_ct, v_ct)
p, qs, ks, vs = pqkv
_, (qs_ct, ks_ct, vs_ct) = lax.scan(
body, (p, jnp.zeros_like(p)), (qs, ks, vs, W_ct),
reverse=True,
unroll=unroll)
return qs_ct, ks_ct, vs_ct
@jax.custom_vjp
def _numerator_impl(qs, ks, vs):
W, _ = fwd(qs, ks, vs)
return W
_numerator_impl.defvjp(fwd, bwd)
return _numerator_impl
def _denominator(t_slice_shape, precision, unroll=1):
def fwd(qs, ks):
def body(p, qk):
q, k = qk
p += k
x = jnp.einsum('...m,...m->...', q, p, precision=precision)
return p, x
p = jnp.zeros(t_slice_shape)
p, R = lax.scan(body, p, (qs, ks), unroll=unroll)
return R, (qs, ks, p)
def bwd(qkp, R_ct):
def body(carry, qkx):
p, p_ct = carry
q, k, x_ct = qkx
q_ct = jnp.einsum('...,...m->...m', x_ct, p, precision=precision)
p_ct += jnp.einsum('...,...m->...m', x_ct, q, precision=precision)
k_ct = p_ct
p -= k
return (p, p_ct), (q_ct, k_ct)
qs, ks, p = qkp
_, (qs_ct, ks_ct) = lax.scan(
body, (p, jnp.zeros_like(p)), (qs, ks, R_ct),
reverse=True,
unroll=unroll)
return (qs_ct, ks_ct)
@jax.custom_vjp
def _denominator_impl(qs, ks):
R, _ = fwd(qs, ks)
return R
_denominator_impl.defvjp(fwd, bwd)
return _denominator_impl
class FastAttentionviaLowRankDecomposition(FastAttention):
r"""Class providing a method for fast attention via low rank decomposition.
Class is responsible for providing a method <dot_product_attention> for fast
dot-product attention with the use of low rank decomposition (e.g. with
random feature maps).
"""
def __init__(self,
matrix_creator,
kernel_feature_creator,
renormalize_attention,
numerical_stabilizer,
redraw_features,
unidirectional,
lax_scan_unroll=1): # For optimal GPU performance, set to 16.
rng = random.PRNGKey(0)
self.matrix_creator = matrix_creator
self.projection_matrix = self.draw_weights(rng)
self.kernel_feature_creator = kernel_feature_creator
self.renormalize_attention = renormalize_attention
self.numerical_stabilizer = numerical_stabilizer
self.redraw_features = redraw_features
self.unidirectional = unidirectional
self.lax_scan_unroll = lax_scan_unroll
def draw_weights(self, key):
if self.matrix_creator is None:
return None
matrixrng, _ = random.split(key)
projection_matrix = self.matrix_creator(key=matrixrng).get_2d_array()
return projection_matrix
def dot_product_attention(self,
query,
key,
value,
dtype=jnp.float32,
bias=None,
axis=None,
broadcast_dropout=True,
dropout_rng=None,
dropout_rate=0.,
deterministic=False,
precision=None):
assert key.shape[:-1] == value.shape[:-1]
assert (query.shape[0:1] == key.shape[0:1] and
query.shape[-1] == key.shape[-1])
if axis is None:
axis = tuple(range(1, key.ndim - 2))
if not isinstance(axis, Iterable):
axis = (axis,)
assert key.ndim == query.ndim
assert key.ndim == value.ndim
for ax in axis:
if not (query.ndim >= 3 and 1 <= ax < query.ndim - 2):
raise ValueError('Attention axis must be between the batch '
'axis and the last-two axes.')
n = key.ndim
# Constructing projection tensor.
if self.redraw_features:
# TODO(kchoro): Get rid of the constant below.
query_seed = lax.convert_element_type(
jnp.ceil(jnp.sum(query) * 10000000.0), jnp.int32)
rng = random.PRNGKey(query_seed)
self.projection_matrix = self.draw_weights(rng)
# batch_dims is <bs, <non-attention dims>, num_heads>
batch_dims = tuple(onp.delete(range(n), axis + (n - 1,)))
# q & k -> (bs, <non-attention dims>, num_heads, <attention dims>, channels)
qk_perm = batch_dims + axis + (n - 1,)
k_extra_perm = axis + batch_dims + (n - 1,)
key_extra = key.transpose(k_extra_perm)
key = key.transpose(qk_perm)
query = query.transpose(qk_perm)
# v -> (bs, <non-attention dims>, num_heads, <attention dims>, channels)
v_perm = batch_dims + axis + (n - 1,)
value = value.transpose(v_perm)
batch_dims_t = tuple(range(len(batch_dims)))
attention_dims_t = tuple(
range(len(batch_dims),
len(batch_dims) + len(axis)))
# Constructing tensors Q^{'} and K^{'}.
query_prime = self.kernel_feature_creator(query, self.projection_matrix,
attention_dims_t, batch_dims_t,
precision, True)
key_prime = self.kernel_feature_creator(key, self.projection_matrix,
attention_dims_t, batch_dims_t,
precision, False)
if self.unidirectional:
index = attention_dims_t[0]
z_slice_shape = key_prime.shape[0:len(batch_dims_t)] + (
key_prime.shape[-1],) + (value.shape[-1],)
numerator_fn = _numerator(z_slice_shape, precision, self.lax_scan_unroll)
W = numerator_fn(
jnp.moveaxis(query_prime, index, 0),
jnp.moveaxis(key_prime, index, 0), jnp.moveaxis(value, index, 0))
# Constructing W = (Q^{'}(K^{'})^{T})_{masked}V
W = jnp.moveaxis(W, 0, index)
if not self.renormalize_attention:
# Unidirectional, not-normalized attention.
perm_inv = _invert_perm(qk_perm)
result = W.transpose(perm_inv)
return result
else:
# Unidirectional, normalized attention.
thick_all_ones = jnp.zeros(key.shape[0:-1]) + jnp.ones(
key_extra.shape[0:len(axis)])
index = attention_dims_t[0]
t_slice_shape = key_prime.shape[0:len(batch_dims_t)] + (
key_prime.shape[-1],)
denominator_fn = _denominator(t_slice_shape, precision,
self.lax_scan_unroll)
R = denominator_fn(
jnp.moveaxis(query_prime, index, 0),
jnp.moveaxis(key_prime, index, 0))
R = jnp.moveaxis(R, 0, index)
else:
contract_query = tuple(
range(len(batch_dims) + len(axis),
len(batch_dims) + len(axis) + 1))
contract_z = tuple(range(len(batch_dims), len(batch_dims) + 1))
# Constructing Z = (K^{'})^{T}V
# Z (bs, <non-attention dims>, num_heads, channels_m, channels_v)
Z = lax.dot_general(
key_prime,
value,
((attention_dims_t, attention_dims_t), (batch_dims_t, batch_dims_t)),
precision=precision)
# Constructing W = Q^{'}Z = Q^{'}(K^{'})^{T}V
# q (bs, <non-attention dims>, num_heads, <attention dims>, channels_m)
# Z (bs, <non-attention dims>, num_heads, channels_m, channels_v)
# W (bs, <non-attention dims>, num_heads, <attention dims>, channels_v)
W = lax.dot_general(
query_prime,
Z, ((contract_query, contract_z), (batch_dims_t, batch_dims_t)),
precision=precision)
if not self.renormalize_attention:
# Bidirectional, not-normalized attention.
perm_inv = _invert_perm(qk_perm)
result = W.transpose(perm_inv)
return result
else:
# Bidirectional, normalized attention.
thick_all_ones = jnp.zeros(key.shape[0:-1]) + jnp.ones(
key_extra.shape[0:len(axis)])
contract_key = tuple(
range(len(batch_dims),
len(batch_dims) + len(axis)))
contract_thick_all_ones = tuple(
range(thick_all_ones.ndim - len(axis), thick_all_ones.ndim))
# Construct T = (K^{'})^{T} 1_L
# k (bs, <non-attention dims>, num_heads, <attention dims>, channels)
T = lax.dot_general(
key_prime,
thick_all_ones, ((contract_key, contract_thick_all_ones),
(batch_dims_t, batch_dims_t)),
precision=precision)
# Construct partition function: R = Q^{'} T = Q^{'}(K^{'})^{T} 1_L
# q_p (bs, <non-attention dims>, num_heads, <attention dims>, channs_m)
# T (bs, <non-attention dims>, num_heads, channels_m)
R = lax.dot_general(
query_prime,
T, (((query_prime.ndim - 1,), (T.ndim - 1,)),
(batch_dims_t, range(0,
len(T.shape) - 1))),
precision=precision)
R = R + 2 * self.numerical_stabilizer * (
jnp.abs(R) <= self.numerical_stabilizer)
R = jnp.reciprocal(R)
R = jnp.expand_dims(R, len(R.shape))
# W (bs, <non-attention dims>, num_heads, <attention dims>, channels_v)
# R (bs, <non-attention dims>, num_heads, <attention dims>, extra_channel)
result = W * R
# back to (bs, dim1, dim2, ..., dimN, num_heads, channels)
perm_inv = _invert_perm(qk_perm)
result = result.transpose(perm_inv)
return result
def _invert_perm(perm):
perm_inv = [0] * len(perm)
for i, j in enumerate(perm):
perm_inv[j] = i
return tuple(perm_inv)
| 38.198611 | 141 | 0.612406 |
import abc
from collections.abc import Iterable
import functools
from absl import logging
import gin
import jax
from jax import lax
from jax import random
import jax.numpy as jnp
import numpy as onp
gin.external_configurable(jnp.cos, 'jcos')
gin.external_configurable(jnp.sin, 'jsin')
gin.external_configurable(jnp.tanh, 'jtanh')
gin.external_configurable(jax.nn.sigmoid, 'jsigmoid')
gin.external_configurable(
lambda x: jax.nn.gelu(x, approximate=False), 'jgelu'
)
gin.external_configurable(lambda x: x * x * (x > 0.0), 'jrequ')
gin.external_configurable(jnp.exp, 'jexp')
gin.external_configurable(lambda x: x, 'jidentity')
gin.external_configurable(
lambda x: (jnp.exp(x)) * (x <= 0.0) + (x + 1.0) * (x > 0.0), 'jshiftedelu'
)
def nonnegative_softmax_kernel_feature_creator(data,
projection_matrix,
attention_dims_t,
batch_dims_t,
precision,
is_query,
normalize_data=True,
eps=0.0001):
if normalize_data:
data_normalizer = 1.0 / (jnp.sqrt(jnp.sqrt(data.shape[-1])))
else:
data_normalizer = 1.0
ratio = 1.0 / jnp.sqrt(projection_matrix.shape[0])
data_mod_shape = data.shape[0:len(batch_dims_t)] + projection_matrix.shape
data_thick_random_matrix = jnp.zeros(data_mod_shape) + projection_matrix
data_dash = lax.dot_general(
data_normalizer * data,
data_thick_random_matrix,
(((data.ndim - 1,), (data_thick_random_matrix.ndim - 1,)),
(batch_dims_t, batch_dims_t)),
precision=precision)
diag_data = jnp.square(data)
diag_data = jnp.sum(diag_data, axis=data.ndim - 1)
diag_data = (diag_data / 2.0) * data_normalizer * data_normalizer
diag_data = jnp.expand_dims(diag_data, axis=data.ndim - 1)
last_dims_t = (len(data_dash.shape) - 1,)
if is_query:
data_dash = ratio * (
jnp.exp(data_dash - diag_data -
jnp.max(data_dash, axis=last_dims_t, keepdims=True)) + eps)
else:
data_dash = ratio * (
jnp.exp(data_dash - diag_data - jnp.max(
data_dash, axis=last_dims_t + attention_dims_t, keepdims=True)) +
eps)
return data_dash
def sincos_softmax_kernel_feature_creator(data,
projection_matrix,
attention_dims_t,
batch_dims_t,
precision,
normalize_data=True):
if normalize_data:
data_normalizer = 1.0 / (jnp.sqrt(jnp.sqrt(data.shape[-1])))
else:
data_normalizer = 1.0
ratio = 1.0 / jnp.sqrt(projection_matrix.shape[0])
data_mod_shape = data.shape[0:len(batch_dims_t)] + projection_matrix.shape
data_thick_random_matrix = jnp.zeros(data_mod_shape) + projection_matrix
data_dash = lax.dot_general(
data_normalizer * data,
data_thick_random_matrix,
(((data.ndim - 1,), (data_thick_random_matrix.ndim - 1,)),
(batch_dims_t, batch_dims_t)),
precision=precision)
data_dash_cos = ratio * jnp.cos(data_dash)
data_dash_sin = ratio * jnp.sin(data_dash)
data_dash = jnp.concatenate((data_dash_cos, data_dash_sin), axis=-1)
diag_data = jnp.square(data)
diag_data = jnp.sum(diag_data, axis=data.ndim - 1)
diag_data = (diag_data / 2.0) * data_normalizer * data_normalizer
diag_data = jnp.expand_dims(diag_data, axis=data.ndim - 1)
# Additional renormalization for numerical stability
data_renormalizer = jnp.max(diag_data, attention_dims_t, keepdims=True)
diag_data -= data_renormalizer
diag_data = jnp.exp(diag_data)
data_prime = data_dash * diag_data
return data_prime
def generalized_kernel_feature_creator(data, projection_matrix, batch_dims_t,
precision, kernel_fn, kernel_epsilon,
normalize_data):
if normalize_data:
data_normalizer = 1.0 / (jnp.sqrt(jnp.sqrt(data.shape[-1])))
else:
data_normalizer = 1.0
if projection_matrix is None:
return kernel_fn(data_normalizer * data) + kernel_epsilon
else:
data_mod_shape = data.shape[0:len(batch_dims_t)] + projection_matrix.shape
data_thick_random_matrix = jnp.zeros(data_mod_shape) + projection_matrix
data_dash = lax.dot_general(
data_normalizer * data,
data_thick_random_matrix,
(((data.ndim - 1,), (data_thick_random_matrix.ndim - 1,)),
(batch_dims_t, batch_dims_t)),
precision=precision)
data_prime = kernel_fn(data_dash) + kernel_epsilon
return data_prime
@gin.configurable
def make_fast_softmax_attention(qkv_dim,
renormalize_attention=True,
numerical_stabilizer=0.000001,
nb_features=256,
ortho_features=True,
ortho_scaling=0.0,
redraw_features=True,
unidirectional=False,
nonnegative_features=True,
lax_scan_unroll=1):
logging.info(
'Fast softmax attention: %s features and orthogonal=%s, renormalize=%s',
nb_features, ortho_features, renormalize_attention)
if ortho_features:
matrix_creator = functools.partial(
GaussianOrthogonalRandomMatrix,
nb_features,
qkv_dim,
scaling=ortho_scaling)
else:
matrix_creator = functools.partial(GaussianUnstructuredRandomMatrix,
nb_features, qkv_dim)
if nonnegative_features:
def kernel_feature_creator(data,
projection_matrix,
attention_dims_t,
batch_dims_t,
precision,
is_query,
normalize_data=True):
return nonnegative_softmax_kernel_feature_creator(
data, projection_matrix, attention_dims_t, batch_dims_t, precision,
is_query, normalize_data, numerical_stabilizer)
else:
def kernel_feature_creator(data,
projection_matrix,
attention_dims_t,
batch_dims_t,
precision,
is_query,
normalize_data=True):
del is_query
return sincos_softmax_kernel_feature_creator(data, projection_matrix,
attention_dims_t,
batch_dims_t, precision,
normalize_data)
attention_fn = FastAttentionviaLowRankDecomposition(
matrix_creator,
kernel_feature_creator,
renormalize_attention=renormalize_attention,
numerical_stabilizer=numerical_stabilizer,
redraw_features=redraw_features,
unidirectional=unidirectional,
lax_scan_unroll=lax_scan_unroll).dot_product_attention
return attention_fn
@gin.configurable
def make_fast_generalized_attention(qkv_dim,
renormalize_attention=True,
numerical_stabilizer=0.0,
nb_features=256,
features_type='deterministic',
kernel_fn=jax.nn.relu,
kernel_epsilon=0.001,
redraw_features=False,
unidirectional=False,
lax_scan_unroll=1):
logging.info('Fast generalized attention.: %s features and renormalize=%s',
nb_features, renormalize_attention)
if features_type == 'ortho':
matrix_creator = functools.partial(
GaussianOrthogonalRandomMatrix, nb_features, qkv_dim, scaling=False)
elif features_type == 'iid':
matrix_creator = functools.partial(GaussianUnstructuredRandomMatrix,
nb_features, qkv_dim)
elif features_type == 'deterministic':
matrix_creator = None
else:
raise ValueError('Unknown feature value type')
def kernel_feature_creator(data,
projection_matrix,
attention_dims_t,
batch_dims_t,
precision,
is_query,
normalize_data=False):
del attention_dims_t
del is_query
return generalized_kernel_feature_creator(data, projection_matrix,
batch_dims_t, precision,
kernel_fn, kernel_epsilon,
normalize_data)
attention_fn = FastAttentionviaLowRankDecomposition(
matrix_creator,
kernel_feature_creator,
renormalize_attention=renormalize_attention,
numerical_stabilizer=numerical_stabilizer,
redraw_features=redraw_features,
unidirectional=unidirectional,
lax_scan_unroll=lax_scan_unroll).dot_product_attention
return attention_fn
class RandomMatrix(object):
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_2d_array(self):
raise NotImplementedError('Abstract method')
class GaussianUnstructuredRandomMatrix(RandomMatrix):
def __init__(self, nb_rows, nb_columns, key):
self.nb_rows = nb_rows
self.nb_columns = nb_columns
self.key = key
def get_2d_array(self):
return random.normal(self.key, (self.nb_rows, self.nb_columns))
class GaussianOrthogonalRandomMatrix(RandomMatrix):
def __init__(self, nb_rows, nb_columns, key, scaling=0):
self.nb_rows = nb_rows
self.nb_columns = nb_columns
self.key = key
self.scaling = scaling
def get_2d_array(self):
nb_full_blocks = int(self.nb_rows / self.nb_columns)
block_list = []
rng = self.key
for _ in range(nb_full_blocks):
rng, rng_input = jax.random.split(rng)
unstructured_block = random.normal(rng_input,
(self.nb_columns, self.nb_columns))
q, _ = jnp.linalg.qr(unstructured_block)
q = jnp.transpose(q)
block_list.append(q)
remaining_rows = self.nb_rows - nb_full_blocks * self.nb_columns
if remaining_rows > 0:
rng, rng_input = jax.random.split(rng)
unstructured_block = random.normal(rng_input,
(self.nb_columns, self.nb_columns))
q, _ = jnp.linalg.qr(unstructured_block)
q = jnp.transpose(q)
block_list.append(q[0:remaining_rows])
final_matrix = jnp.vstack(block_list)
if self.scaling == 0:
multiplier = jnp.linalg.norm(
random.normal(self.key, (self.nb_rows, self.nb_columns)), axis=1)
elif self.scaling == 1:
multiplier = jnp.sqrt(float(self.nb_columns)) * jnp.ones((self.nb_rows))
else:
raise ValueError('Scaling must be one of {0, 1}. Was %s' % self._scaling)
return jnp.matmul(jnp.diag(multiplier), final_matrix)
class FastAttention(object):
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def dot_product_attention(self,
query,
key,
value,
dtype=jnp.float32,
bias=None,
axis=None,
broadcast_dropout=True,
dropout_rng=None,
dropout_rate=0.,
deterministic=False,
precision=None):
raise NotImplementedError('Abstract method')
def _numerator(z_slice_shape, precision, unroll=1):
def fwd(qs, ks, vs):
def body(p, qkv):
(q, k, v) = qkv
p += jnp.einsum('...m,...d->...md', k, v, precision=precision)
X_slice = jnp.einsum('...m,...md->...d', q, p, precision=precision)
return p, X_slice
init_value = jnp.zeros(z_slice_shape)
p, W = lax.scan(body, init_value, (qs, ks, vs), unroll=unroll)
return W, (p, qs, ks, vs)
def bwd(pqkv, W_ct):
def body(carry, qkv_xct):
p, p_ct = carry
q, k, v, x_ct = qkv_xct
q_ct = jnp.einsum('...d,...md->...m', x_ct, p, precision=precision)
p_ct += jnp.einsum('...d,...m->...md', x_ct, q, precision=precision)
k_ct = jnp.einsum('...md,...d->...m', p_ct, v, precision=precision)
v_ct = jnp.einsum('...md,...m->...d', p_ct, k, precision=precision)
p -= jnp.einsum('...m,...d->...md', k, v, precision=precision)
return (p, p_ct), (q_ct, k_ct, v_ct)
p, qs, ks, vs = pqkv
_, (qs_ct, ks_ct, vs_ct) = lax.scan(
body, (p, jnp.zeros_like(p)), (qs, ks, vs, W_ct),
reverse=True,
unroll=unroll)
return qs_ct, ks_ct, vs_ct
@jax.custom_vjp
def _numerator_impl(qs, ks, vs):
W, _ = fwd(qs, ks, vs)
return W
_numerator_impl.defvjp(fwd, bwd)
return _numerator_impl
def _denominator(t_slice_shape, precision, unroll=1):
def fwd(qs, ks):
def body(p, qk):
q, k = qk
p += k
x = jnp.einsum('...m,...m->...', q, p, precision=precision)
return p, x
p = jnp.zeros(t_slice_shape)
p, R = lax.scan(body, p, (qs, ks), unroll=unroll)
return R, (qs, ks, p)
def bwd(qkp, R_ct):
def body(carry, qkx):
p, p_ct = carry
q, k, x_ct = qkx
q_ct = jnp.einsum('...,...m->...m', x_ct, p, precision=precision)
p_ct += jnp.einsum('...,...m->...m', x_ct, q, precision=precision)
k_ct = p_ct
p -= k
return (p, p_ct), (q_ct, k_ct)
qs, ks, p = qkp
_, (qs_ct, ks_ct) = lax.scan(
body, (p, jnp.zeros_like(p)), (qs, ks, R_ct),
reverse=True,
unroll=unroll)
return (qs_ct, ks_ct)
@jax.custom_vjp
def _denominator_impl(qs, ks):
R, _ = fwd(qs, ks)
return R
_denominator_impl.defvjp(fwd, bwd)
return _denominator_impl
class FastAttentionviaLowRankDecomposition(FastAttention):
def __init__(self,
matrix_creator,
kernel_feature_creator,
renormalize_attention,
numerical_stabilizer,
redraw_features,
unidirectional,
lax_scan_unroll=1): # For optimal GPU performance, set to 16.
rng = random.PRNGKey(0)
self.matrix_creator = matrix_creator
self.projection_matrix = self.draw_weights(rng)
self.kernel_feature_creator = kernel_feature_creator
self.renormalize_attention = renormalize_attention
self.numerical_stabilizer = numerical_stabilizer
self.redraw_features = redraw_features
self.unidirectional = unidirectional
self.lax_scan_unroll = lax_scan_unroll
def draw_weights(self, key):
if self.matrix_creator is None:
return None
matrixrng, _ = random.split(key)
projection_matrix = self.matrix_creator(key=matrixrng).get_2d_array()
return projection_matrix
def dot_product_attention(self,
query,
key,
value,
dtype=jnp.float32,
bias=None,
axis=None,
broadcast_dropout=True,
dropout_rng=None,
dropout_rate=0.,
deterministic=False,
precision=None):
assert key.shape[:-1] == value.shape[:-1]
assert (query.shape[0:1] == key.shape[0:1] and
query.shape[-1] == key.shape[-1])
if axis is None:
axis = tuple(range(1, key.ndim - 2))
if not isinstance(axis, Iterable):
axis = (axis,)
assert key.ndim == query.ndim
assert key.ndim == value.ndim
for ax in axis:
if not (query.ndim >= 3 and 1 <= ax < query.ndim - 2):
raise ValueError('Attention axis must be between the batch '
'axis and the last-two axes.')
n = key.ndim
# Constructing projection tensor.
if self.redraw_features:
# TODO(kchoro): Get rid of the constant below.
query_seed = lax.convert_element_type(
jnp.ceil(jnp.sum(query) * 10000000.0), jnp.int32)
rng = random.PRNGKey(query_seed)
self.projection_matrix = self.draw_weights(rng)
# batch_dims is <bs, <non-attention dims>, num_heads>
batch_dims = tuple(onp.delete(range(n), axis + (n - 1,)))
# q & k -> (bs, <non-attention dims>, num_heads, <attention dims>, channels)
qk_perm = batch_dims + axis + (n - 1,)
k_extra_perm = axis + batch_dims + (n - 1,)
key_extra = key.transpose(k_extra_perm)
key = key.transpose(qk_perm)
query = query.transpose(qk_perm)
# v -> (bs, <non-attention dims>, num_heads, <attention dims>, channels)
v_perm = batch_dims + axis + (n - 1,)
value = value.transpose(v_perm)
batch_dims_t = tuple(range(len(batch_dims)))
attention_dims_t = tuple(
range(len(batch_dims),
len(batch_dims) + len(axis)))
# Constructing tensors Q^{'} and K^{'}.
query_prime = self.kernel_feature_creator(query, self.projection_matrix,
attention_dims_t, batch_dims_t,
precision, True)
key_prime = self.kernel_feature_creator(key, self.projection_matrix,
attention_dims_t, batch_dims_t,
precision, False)
if self.unidirectional:
index = attention_dims_t[0]
z_slice_shape = key_prime.shape[0:len(batch_dims_t)] + (
key_prime.shape[-1],) + (value.shape[-1],)
numerator_fn = _numerator(z_slice_shape, precision, self.lax_scan_unroll)
W = numerator_fn(
jnp.moveaxis(query_prime, index, 0),
jnp.moveaxis(key_prime, index, 0), jnp.moveaxis(value, index, 0))
# Constructing W = (Q^{'}(K^{'})^{T})_{masked}V
W = jnp.moveaxis(W, 0, index)
if not self.renormalize_attention:
# Unidirectional, not-normalized attention.
perm_inv = _invert_perm(qk_perm)
result = W.transpose(perm_inv)
return result
else:
# Unidirectional, normalized attention.
thick_all_ones = jnp.zeros(key.shape[0:-1]) + jnp.ones(
key_extra.shape[0:len(axis)])
index = attention_dims_t[0]
t_slice_shape = key_prime.shape[0:len(batch_dims_t)] + (
key_prime.shape[-1],)
denominator_fn = _denominator(t_slice_shape, precision,
self.lax_scan_unroll)
R = denominator_fn(
jnp.moveaxis(query_prime, index, 0),
jnp.moveaxis(key_prime, index, 0))
R = jnp.moveaxis(R, 0, index)
else:
contract_query = tuple(
range(len(batch_dims) + len(axis),
len(batch_dims) + len(axis) + 1))
contract_z = tuple(range(len(batch_dims), len(batch_dims) + 1))
# Constructing Z = (K^{'})^{T}V
Z = lax.dot_general(
key_prime,
value,
((attention_dims_t, attention_dims_t), (batch_dims_t, batch_dims_t)),
precision=precision)
# q (bs, <non-attention dims>, num_heads, <attention dims>, channels_m)
# Z (bs, <non-attention dims>, num_heads, channels_m, channels_v)
# W (bs, <non-attention dims>, num_heads, <attention dims>, channels_v)
W = lax.dot_general(
query_prime,
Z, ((contract_query, contract_z), (batch_dims_t, batch_dims_t)),
precision=precision)
if not self.renormalize_attention:
# Bidirectional, not-normalized attention.
perm_inv = _invert_perm(qk_perm)
result = W.transpose(perm_inv)
return result
else:
# Bidirectional, normalized attention.
thick_all_ones = jnp.zeros(key.shape[0:-1]) + jnp.ones(
key_extra.shape[0:len(axis)])
contract_key = tuple(
range(len(batch_dims),
len(batch_dims) + len(axis)))
contract_thick_all_ones = tuple(
range(thick_all_ones.ndim - len(axis), thick_all_ones.ndim))
# Construct T = (K^{'})^{T} 1_L
T = lax.dot_general(
key_prime,
thick_all_ones, ((contract_key, contract_thick_all_ones),
(batch_dims_t, batch_dims_t)),
precision=precision)
# q_p (bs, <non-attention dims>, num_heads, <attention dims>, channs_m)
# T (bs, <non-attention dims>, num_heads, channels_m)
R = lax.dot_general(
query_prime,
T, (((query_prime.ndim - 1,), (T.ndim - 1,)),
(batch_dims_t, range(0,
len(T.shape) - 1))),
precision=precision)
R = R + 2 * self.numerical_stabilizer * (
jnp.abs(R) <= self.numerical_stabilizer)
R = jnp.reciprocal(R)
R = jnp.expand_dims(R, len(R.shape))
# W (bs, <non-attention dims>, num_heads, <attention dims>, channels_v)
# R (bs, <non-attention dims>, num_heads, <attention dims>, extra_channel)
result = W * R
# back to (bs, dim1, dim2, ..., dimN, num_heads, channels)
perm_inv = _invert_perm(qk_perm)
result = result.transpose(perm_inv)
return result
def _invert_perm(perm):
perm_inv = [0] * len(perm)
for i, j in enumerate(perm):
perm_inv[j] = i
return tuple(perm_inv)
| true | true |
f713e45808fac231d5822afc0ad72188540cb0ee | 788 | py | Python | mars/services/web/tests/extra_handler.py | ConanoutlooklvTBS/mars | 7030566fd9e9fc02b6b4064ef7bd86f6c24a2f60 | [
"Apache-2.0"
] | 2,413 | 2018-12-06T09:37:11.000Z | 2022-03-30T15:47:39.000Z | mars/services/web/tests/extra_handler.py | ConanoutlooklvTBS/mars | 7030566fd9e9fc02b6b4064ef7bd86f6c24a2f60 | [
"Apache-2.0"
] | 1,335 | 2018-12-07T03:06:18.000Z | 2022-03-31T11:45:57.000Z | mars/services/web/tests/extra_handler.py | ConanoutlooklvTBS/mars | 7030566fd9e9fc02b6b4064ef7bd86f6c24a2f60 | [
"Apache-2.0"
] | 329 | 2018-12-07T03:12:41.000Z | 2022-03-29T21:49:57.000Z | # Copyright 1999-2021 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ..core import MarsRequestHandler
class ExtraTestHandler(MarsRequestHandler):
def get(self):
self.write('Test')
web_handlers = {
'/api/extra_test': ExtraTestHandler
}
| 30.307692 | 74 | 0.748731 |
from ..core import MarsRequestHandler
class ExtraTestHandler(MarsRequestHandler):
def get(self):
self.write('Test')
web_handlers = {
'/api/extra_test': ExtraTestHandler
}
| true | true |
f713e54ac61b29b80119ca3f3e51c995911ef04d | 1,294 | bzl | Python | dotnet/private/actions/resx_net.bzl | nolen777/rules_mono | b49c210478c2240fcc7be655c9fc37d751610fb1 | [
"Apache-2.0"
] | null | null | null | dotnet/private/actions/resx_net.bzl | nolen777/rules_mono | b49c210478c2240fcc7be655c9fc37d751610fb1 | [
"Apache-2.0"
] | null | null | null | dotnet/private/actions/resx_net.bzl | nolen777/rules_mono | b49c210478c2240fcc7be655c9fc37d751610fb1 | [
"Apache-2.0"
] | null | null | null | load(
"@rules_mono//dotnet/private:providers.bzl",
"DotnetLibrary",
)
def _make_runner_arglist(dotnet, source, output):
args = dotnet.actions.args()
args.add("/useSourcePath")
if type(source) == "Target":
args.add_all(source.files)
else:
args.add(source)
args.add(output)
return args
def emit_resx_net(
dotnet,
name = "",
src = None,
identifier = None,
out = None,
customresgen = None):
if name == "" and out == None:
fail("either name or out must be set")
if not out:
result = dotnet.declare_file(dotnet, path = name + ".resources")
else:
result = dotnet.declare_file(dotnet, path = out)
args = _make_runner_arglist(dotnet, src, result)
inputs = src.files if type(src) == "Target" else [src]
dotnet.actions.run(
inputs = inputs,
outputs = [result],
executable = dotnet.resgen,
arguments = [args],
mnemonic = "NetResxCompile",
progress_message = (
"Compiling resoources" + dotnet.label.package + ":" + dotnet.label.name
),
)
return dotnet.new_resource(
dotnet = dotnet,
name = name,
result = result,
identifier = identifier,
)
| 23.107143 | 83 | 0.572643 | load(
"@rules_mono//dotnet/private:providers.bzl",
"DotnetLibrary",
)
def _make_runner_arglist(dotnet, source, output):
args = dotnet.actions.args()
args.add("/useSourcePath")
if type(source) == "Target":
args.add_all(source.files)
else:
args.add(source)
args.add(output)
return args
def emit_resx_net(
dotnet,
name = "",
src = None,
identifier = None,
out = None,
customresgen = None):
if name == "" and out == None:
fail("either name or out must be set")
if not out:
result = dotnet.declare_file(dotnet, path = name + ".resources")
else:
result = dotnet.declare_file(dotnet, path = out)
args = _make_runner_arglist(dotnet, src, result)
inputs = src.files if type(src) == "Target" else [src]
dotnet.actions.run(
inputs = inputs,
outputs = [result],
executable = dotnet.resgen,
arguments = [args],
mnemonic = "NetResxCompile",
progress_message = (
"Compiling resoources" + dotnet.label.package + ":" + dotnet.label.name
),
)
return dotnet.new_resource(
dotnet = dotnet,
name = name,
result = result,
identifier = identifier,
)
| true | true |
f713e55caf6b99004ecc3241794afbf437dab6fc | 1,098 | py | Python | python/main.py | jamesGadoury/robot-search | d7ec99aef71acb113240437321d3b2868c67cf30 | [
"MIT"
] | 1 | 2020-12-28T15:30:48.000Z | 2020-12-28T15:30:48.000Z | python/main.py | jamesGadoury/robot-search | d7ec99aef71acb113240437321d3b2868c67cf30 | [
"MIT"
] | null | null | null | python/main.py | jamesGadoury/robot-search | d7ec99aef71acb113240437321d3b2868c67cf30 | [
"MIT"
] | null | null | null | import pathgraph
import robotsearch
import unittest
class TestGraphMethods(unittest.TestCase):
def test_create_undirected_graph(self):
self.assertTrue(isinstance(pathgraph.graph_by_type("undirected"), pathgraph.UndirectedGraph))
def test_create_directed_graph(self):
self.assertTrue(isinstance(pathgraph.graph_by_type("directed"), pathgraph.DirectedGraph))
def test_add_duplicate_edge_undirected(self):
graph = pathgraph.graph_by_type("undirected")
destination = pathgraph.DestinationNode("B", 1)
self.assertTrue(graph.add_edge(fromKey="A", destination=destination))
self.assertFalse(graph.add_edge(fromKey="A", destination=destination))
def test_add_duplicate_edge_directed(self):
graph=pathgraph.graph_by_type("directed")
destination = pathgraph.DestinationNode("B", 1)
self.assertTrue(graph.add_edge(fromKey="A", destination=destination))
self.assertFalse(graph.add_edge(fromKey="A", destination=destination))
def main():
unittest.main()
if __name__ == "__main__":
main() | 37.862069 | 101 | 0.73224 | import pathgraph
import robotsearch
import unittest
class TestGraphMethods(unittest.TestCase):
def test_create_undirected_graph(self):
self.assertTrue(isinstance(pathgraph.graph_by_type("undirected"), pathgraph.UndirectedGraph))
def test_create_directed_graph(self):
self.assertTrue(isinstance(pathgraph.graph_by_type("directed"), pathgraph.DirectedGraph))
def test_add_duplicate_edge_undirected(self):
graph = pathgraph.graph_by_type("undirected")
destination = pathgraph.DestinationNode("B", 1)
self.assertTrue(graph.add_edge(fromKey="A", destination=destination))
self.assertFalse(graph.add_edge(fromKey="A", destination=destination))
def test_add_duplicate_edge_directed(self):
graph=pathgraph.graph_by_type("directed")
destination = pathgraph.DestinationNode("B", 1)
self.assertTrue(graph.add_edge(fromKey="A", destination=destination))
self.assertFalse(graph.add_edge(fromKey="A", destination=destination))
def main():
unittest.main()
if __name__ == "__main__":
main() | true | true |
f713e5f6c485bc439394846d31e1724342ae0b37 | 13,276 | py | Python | qtp_diversity/tests/test_validate.py | antgonza/qtp-diversity | 0c2ec84711decf798ea6ffdb3e97dc9582ba4035 | [
"BSD-3-Clause"
] | null | null | null | qtp_diversity/tests/test_validate.py | antgonza/qtp-diversity | 0c2ec84711decf798ea6ffdb3e97dc9582ba4035 | [
"BSD-3-Clause"
] | null | null | null | qtp_diversity/tests/test_validate.py | antgonza/qtp-diversity | 0c2ec84711decf798ea6ffdb3e97dc9582ba4035 | [
"BSD-3-Clause"
] | null | null | null | # -----------------------------------------------------------------------------
# Copyright (c) 2014--, The Qiita Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
from unittest import main
from tempfile import mkdtemp, mkstemp
from os.path import exists, isdir, join
from os import remove, close
from shutil import rmtree
from json import dumps
from skbio.stats.distance import randdm
from skbio import OrdinationResults
from qiita_client import ArtifactInfo
from qiita_client.testing import PluginTestCase
import pandas as pd
import numpy as np
from qtp_diversity import plugin
from qtp_diversity.validate import (
_validate_distance_matrix, _validate_ordination_results,
_validate_alpha_vector, _validate_feature_data_taxonomy, validate)
class ValidateTests(PluginTestCase):
def setUp(self):
self.out_dir = mkdtemp()
self._clean_up_files = [self.out_dir]
self.metadata = {
'1.SKM4.640180': {'col': "doesn't really matters"},
'1.SKB8.640193': {'col': "doesn't really matters"},
'1.SKD8.640184': {'col': "doesn't really matters"},
'1.SKM9.640192': {'col': "doesn't really matters"},
'1.SKB7.640196': {'col': "doesn't really matters"}}
plugin('https://localhost:8383', 'register', 'ignored')
def tearDown(self):
for fp in self._clean_up_files:
if exists(fp):
if isdir(fp):
rmtree(fp)
else:
remove(fp)
def _create_distance_matrix(self, sample_ids):
dm = randdm(len(sample_ids), sample_ids)
fd, fp = mkstemp(suffix='.txt', dir=self.out_dir)
close(fd)
dm.write(fp)
return fp
def _create_ordination_results(self, sample_ids):
eigvals = [0.51236726, 0.30071909, 0.26791207, 0.20898868]
proportion_explained = [0.2675738328, 0.157044696, 0.1399118638,
0.1091402725]
axis_labels = ['PC1', 'PC2', 'PC3', 'PC4']
samples = [[-2.584, 1.739, 3.828, -1.944],
[-2.710, -1.859, -8.648, 1.180],
[2.350, 9.625, -3.457, -3.208],
[2.614, -1.114, 1.476, 2.908],
[2.850, -1.925, 6.232, 1.381]]
ord_res = OrdinationResults(
short_method_name='PCoA',
long_method_name='Principal Coordinate Analysis',
eigvals=pd.Series(eigvals, index=axis_labels),
samples=pd.DataFrame(np.asarray(samples), index=sample_ids,
columns=axis_labels),
proportion_explained=pd.Series(proportion_explained,
index=axis_labels))
fd, fp = mkstemp(suffix='.txt', dir=self.out_dir)
close(fd)
ord_res.write(fp)
return fp
def _create_alpha_vector(self, sample_ids):
fd, fp = mkstemp(suffix='.txt', dir=self.out_dir)
close(fd)
with open(fp, 'w') as f:
f.write("\tobserved_otus\n")
for s_id in sample_ids:
f.write("%s\t%d\n" % (s_id, np.random.randint(1, 200)))
return fp
def _create_job(self, a_type, files, analysis):
parameters = {'template': None,
'files': dumps(files),
'artifact_type': a_type,
'analysis': analysis}
data = {'command': dumps(['Diversity types', '0.1.1', 'Validate']),
'parameters': dumps(parameters),
'status': 'running'}
job_id = self.qclient.post(
'/apitest/processing_job/', data=data)['job']
return job_id, parameters
def test_validate_distance_matrix(self):
# Create a distance matrix
sample_ids = ['1.SKM4.640180', '1.SKB8.640193', '1.SKD8.640184',
'1.SKM9.640192', '1.SKB7.640196']
dm_fp = self._create_distance_matrix(sample_ids)
# Test success
obs_success, obs_ainfo, obs_error = _validate_distance_matrix(
{'plain_text': [dm_fp]}, self.metadata, self.out_dir)
self.assertTrue(obs_success)
exp_ainfo = [ArtifactInfo(None, "distance_matrix",
[(dm_fp, 'plain_text')])]
self.assertEqual(obs_ainfo, exp_ainfo)
self.assertEqual(obs_error, "")
# Test failure
sample_ids = ['1.SKM4.640180', '1.SKB8.640193', '1.SKD8.640184',
'1.SKM9.640192', 'NotASample']
dm_fp = self._create_distance_matrix(sample_ids)
obs_success, obs_ainfo, obs_error = _validate_distance_matrix(
{'plain_text': [dm_fp]}, self.metadata, self.out_dir)
self.assertFalse(obs_success)
self.assertIsNone(obs_ainfo)
self.assertEqual(obs_error, "The distance matrix contain samples not "
"present in the metadata")
def test_validate_ordination_results(self):
# Create the ordination results
sample_ids = ['1.SKM4.640180', '1.SKB8.640193', '1.SKD8.640184',
'1.SKM9.640192', '1.SKB7.640196']
ord_res_fp = self._create_ordination_results(sample_ids)
# Test success
obs_success, obs_ainfo, obs_error = _validate_ordination_results(
{'plain_text': [ord_res_fp]}, self.metadata, self.out_dir)
self.assertTrue(obs_success)
exp_ainfo = [ArtifactInfo(None, "ordination_results",
[(ord_res_fp, 'plain_text')])]
self.assertEqual(obs_ainfo, exp_ainfo)
self.assertEqual(obs_error, "")
# Test failure
sample_ids = ['1.SKM4.640180', '1.SKB8.640193', '1.SKD8.640184',
'1.SKM9.640192', 'NotASample']
ord_res_fp = self._create_ordination_results(sample_ids)
obs_success, obs_ainfo, obs_error = _validate_ordination_results(
{'plain_text': [ord_res_fp]}, self.metadata, self.out_dir)
self.assertFalse(obs_success)
self.assertIsNone(obs_ainfo)
self.assertEqual(obs_error, "The ordination results contain samples "
"not present in the metadata")
def test_validate_alpha_vector(self):
# Create the alpha vector
sample_ids = ['1.SKM4.640180', '1.SKB8.640193', '1.SKD8.640184',
'1.SKM9.640192']
alpha_vector_fp = self._create_alpha_vector(sample_ids)
# Test success
obs_success, obs_ainfo, obs_error = _validate_alpha_vector(
{'plain_text': [alpha_vector_fp]}, self.metadata, self.out_dir)
self.assertEqual(obs_error, "")
self.assertTrue(obs_success)
exp_ainfo = [ArtifactInfo(None, "alpha_vector",
[(alpha_vector_fp, 'plain_text')])]
self.assertEqual(obs_ainfo, exp_ainfo)
# Test failure wrong ids
sample_ids = ['1.SKM4.640180', '1.SKB8.640193', '1.SKD8.640184',
'NotASample']
alpha_vector_fp = self._create_alpha_vector(sample_ids)
obs_success, obs_ainfo, obs_error = _validate_alpha_vector(
{'plain_text': [alpha_vector_fp]}, self.metadata, self.out_dir)
self.assertEqual(obs_error, "The alpha vector contains samples not "
"present in the metadata")
self.assertFalse(obs_success)
self.assertIsNone(obs_ainfo)
# Test failure wrong format
fd, alpha_vector_fp = mkstemp(suffix='.txt', dir=self.out_dir)
close(fd)
with open(alpha_vector_fp, 'w') as f:
f.write("\tobserved_otus\nsample 1\n")
obs_success, obs_ainfo, obs_error = _validate_alpha_vector(
{'plain_text': [alpha_vector_fp]}, self.metadata, self.out_dir)
self.assertEqual(obs_error, "The alpha vector format is incorrect")
self.assertFalse(obs_success)
self.assertIsNone(obs_ainfo)
def test_validate(self):
# Test artifact type error
job_id, params = self._create_job(
'NotAType', {'plan_text': 'Will fail before checking this'}, 1)
obs_success, obs_ainfo, obs_error = validate(
self.qclient, job_id, params, self.out_dir)
self.assertFalse(obs_success)
self.assertIsNone(obs_ainfo)
self.assertEqual(
obs_error, "Unknown artifact type NotAType. Supported types: "
"FeatureData[Taxonomy], alpha_vector, distance_matrix, "
"ordination_results")
# Test missing metadata error - to be fair, I don't know how this error
# can happen in the live system, but better be safe than sorry
job_id, params = self._create_job(
'distance_matrix', {'plan_text': 'Will fail before checking this'},
None)
obs_success, obs_ainfo, obs_error = validate(
self.qclient, job_id, params, self.out_dir)
self.assertFalse(obs_success)
self.assertIsNone(obs_ainfo)
self.assertEqual(
obs_error, "Missing metadata information")
# Test distance matrix success
sample_ids = ['1.SKM4.640180', '1.SKB8.640193', '1.SKD8.640184',
'1.SKM9.640192', '1.SKB7.640196']
dm_fp = self._create_distance_matrix(sample_ids)
job_id, params = self._create_job(
'distance_matrix', {'plain_text': [dm_fp]}, 1)
obs_success, obs_ainfo, obs_error = validate(
self.qclient, job_id, params, self.out_dir)
self.assertTrue(obs_success)
html_fp = join(self.out_dir, 'index.html')
exp_ainfo = [ArtifactInfo(None, "distance_matrix",
[(dm_fp, 'plain_text'),
(html_fp, 'html_summary')])]
self.assertEqual(obs_ainfo, exp_ainfo)
self.assertEqual(obs_error, "")
# Test ordination results success
ord_res_fp = self._create_ordination_results(sample_ids)
job_id, params = self._create_job(
'ordination_results', {'plain_text': [ord_res_fp]}, 1)
obs_success, obs_ainfo, obs_error = validate(
self.qclient, job_id, params, self.out_dir)
self.assertTrue(obs_success)
html_fp = join(self.out_dir, 'index.html')
esf_fp = join(self.out_dir, 'emperor_support_files')
exp_ainfo = [ArtifactInfo(None, "ordination_results",
[(ord_res_fp, 'plain_text'),
(html_fp, 'html_summary'),
(esf_fp, 'html_summary_dir')])]
self.assertEqual(obs_ainfo, exp_ainfo)
self.assertEqual(obs_error, "")
# Test alpha vector success
alpha_vector_fp = self._create_alpha_vector(sample_ids)
job_id, params = self._create_job(
'alpha_vector', {'plain_text': [alpha_vector_fp]}, 1)
obs_success, obs_ainfo, obs_error = validate(
self.qclient, job_id, params, self.out_dir)
self.assertTrue(obs_success)
html_fp = join(self.out_dir, 'index.html')
sf_fp = join(self.out_dir, 'support_files')
exp_ainfo = [ArtifactInfo(None, "alpha_vector",
[(alpha_vector_fp, 'plain_text'),
(html_fp, 'html_summary'),
(sf_fp, 'html_summary_dir')])]
self.assertEqual(obs_ainfo, exp_ainfo)
self.assertEqual(obs_error, "")
def test_validate_FeatureData_Taxonomy(self):
# Create the feature data
fd, taxonomy_fp = mkstemp(suffix='.txt', dir=self.out_dir)
close(fd)
with open(taxonomy_fp, 'w') as f:
f.write("Feature ID\tTaxonomy\tConfidence\n")
f.write("TACGGAGGA\tk__Bacteria;p__Bacteroidetes;c__Bacteroidia\t"
"0.9998743\n")
f.write("TACGTAGGG\tk__Bacteria;p__Firmicutes;c__Clostridia\t"
"0.9999999\n")
# Test success
obs_success, obs_ainfo, obs_error = _validate_feature_data_taxonomy(
{'plain_text': [taxonomy_fp]}, None, self.out_dir)
self.assertEqual(obs_error, "")
self.assertTrue(obs_success)
exp_ainfo = [ArtifactInfo(None, "FeatureData[Taxonomy]",
[(taxonomy_fp, 'plain_text')])]
self.assertEqual(obs_ainfo, exp_ainfo)
# Test failure wrong format
fd, taxonomy_fp = mkstemp(suffix='.txt', dir=self.out_dir)
close(fd)
with open(taxonomy_fp, 'w') as f:
f.write("Feature ID\tIt's gonna fail!\tConfidence\n")
f.write("TACGGAGGA\tk__Bacteria;p__Bacteroidetes;c__Bacteroidia\t"
"0.9998743\n")
f.write("TACGTAGGG\tk__Bacteria;p__Firmicutes;c__Clostridia\t"
"0.9999999\n")
obs_success, obs_ainfo, obs_error = _validate_feature_data_taxonomy(
{'plain_text': [taxonomy_fp]}, None, self.out_dir)
self.assertIn("The file header seems wrong", obs_error)
self.assertFalse(obs_success)
self.assertIsNone(obs_ainfo)
if __name__ == '__main__':
main()
| 43.960265 | 79 | 0.597017 |
from unittest import main
from tempfile import mkdtemp, mkstemp
from os.path import exists, isdir, join
from os import remove, close
from shutil import rmtree
from json import dumps
from skbio.stats.distance import randdm
from skbio import OrdinationResults
from qiita_client import ArtifactInfo
from qiita_client.testing import PluginTestCase
import pandas as pd
import numpy as np
from qtp_diversity import plugin
from qtp_diversity.validate import (
_validate_distance_matrix, _validate_ordination_results,
_validate_alpha_vector, _validate_feature_data_taxonomy, validate)
class ValidateTests(PluginTestCase):
def setUp(self):
self.out_dir = mkdtemp()
self._clean_up_files = [self.out_dir]
self.metadata = {
'1.SKM4.640180': {'col': "doesn't really matters"},
'1.SKB8.640193': {'col': "doesn't really matters"},
'1.SKD8.640184': {'col': "doesn't really matters"},
'1.SKM9.640192': {'col': "doesn't really matters"},
'1.SKB7.640196': {'col': "doesn't really matters"}}
plugin('https://localhost:8383', 'register', 'ignored')
def tearDown(self):
for fp in self._clean_up_files:
if exists(fp):
if isdir(fp):
rmtree(fp)
else:
remove(fp)
def _create_distance_matrix(self, sample_ids):
dm = randdm(len(sample_ids), sample_ids)
fd, fp = mkstemp(suffix='.txt', dir=self.out_dir)
close(fd)
dm.write(fp)
return fp
def _create_ordination_results(self, sample_ids):
eigvals = [0.51236726, 0.30071909, 0.26791207, 0.20898868]
proportion_explained = [0.2675738328, 0.157044696, 0.1399118638,
0.1091402725]
axis_labels = ['PC1', 'PC2', 'PC3', 'PC4']
samples = [[-2.584, 1.739, 3.828, -1.944],
[-2.710, -1.859, -8.648, 1.180],
[2.350, 9.625, -3.457, -3.208],
[2.614, -1.114, 1.476, 2.908],
[2.850, -1.925, 6.232, 1.381]]
ord_res = OrdinationResults(
short_method_name='PCoA',
long_method_name='Principal Coordinate Analysis',
eigvals=pd.Series(eigvals, index=axis_labels),
samples=pd.DataFrame(np.asarray(samples), index=sample_ids,
columns=axis_labels),
proportion_explained=pd.Series(proportion_explained,
index=axis_labels))
fd, fp = mkstemp(suffix='.txt', dir=self.out_dir)
close(fd)
ord_res.write(fp)
return fp
def _create_alpha_vector(self, sample_ids):
fd, fp = mkstemp(suffix='.txt', dir=self.out_dir)
close(fd)
with open(fp, 'w') as f:
f.write("\tobserved_otus\n")
for s_id in sample_ids:
f.write("%s\t%d\n" % (s_id, np.random.randint(1, 200)))
return fp
def _create_job(self, a_type, files, analysis):
parameters = {'template': None,
'files': dumps(files),
'artifact_type': a_type,
'analysis': analysis}
data = {'command': dumps(['Diversity types', '0.1.1', 'Validate']),
'parameters': dumps(parameters),
'status': 'running'}
job_id = self.qclient.post(
'/apitest/processing_job/', data=data)['job']
return job_id, parameters
def test_validate_distance_matrix(self):
# Create a distance matrix
sample_ids = ['1.SKM4.640180', '1.SKB8.640193', '1.SKD8.640184',
'1.SKM9.640192', '1.SKB7.640196']
dm_fp = self._create_distance_matrix(sample_ids)
# Test success
obs_success, obs_ainfo, obs_error = _validate_distance_matrix(
{'plain_text': [dm_fp]}, self.metadata, self.out_dir)
self.assertTrue(obs_success)
exp_ainfo = [ArtifactInfo(None, "distance_matrix",
[(dm_fp, 'plain_text')])]
self.assertEqual(obs_ainfo, exp_ainfo)
self.assertEqual(obs_error, "")
# Test failure
sample_ids = ['1.SKM4.640180', '1.SKB8.640193', '1.SKD8.640184',
'1.SKM9.640192', 'NotASample']
dm_fp = self._create_distance_matrix(sample_ids)
obs_success, obs_ainfo, obs_error = _validate_distance_matrix(
{'plain_text': [dm_fp]}, self.metadata, self.out_dir)
self.assertFalse(obs_success)
self.assertIsNone(obs_ainfo)
self.assertEqual(obs_error, "The distance matrix contain samples not "
"present in the metadata")
def test_validate_ordination_results(self):
# Create the ordination results
sample_ids = ['1.SKM4.640180', '1.SKB8.640193', '1.SKD8.640184',
'1.SKM9.640192', '1.SKB7.640196']
ord_res_fp = self._create_ordination_results(sample_ids)
# Test success
obs_success, obs_ainfo, obs_error = _validate_ordination_results(
{'plain_text': [ord_res_fp]}, self.metadata, self.out_dir)
self.assertTrue(obs_success)
exp_ainfo = [ArtifactInfo(None, "ordination_results",
[(ord_res_fp, 'plain_text')])]
self.assertEqual(obs_ainfo, exp_ainfo)
self.assertEqual(obs_error, "")
# Test failure
sample_ids = ['1.SKM4.640180', '1.SKB8.640193', '1.SKD8.640184',
'1.SKM9.640192', 'NotASample']
ord_res_fp = self._create_ordination_results(sample_ids)
obs_success, obs_ainfo, obs_error = _validate_ordination_results(
{'plain_text': [ord_res_fp]}, self.metadata, self.out_dir)
self.assertFalse(obs_success)
self.assertIsNone(obs_ainfo)
self.assertEqual(obs_error, "The ordination results contain samples "
"not present in the metadata")
def test_validate_alpha_vector(self):
# Create the alpha vector
sample_ids = ['1.SKM4.640180', '1.SKB8.640193', '1.SKD8.640184',
'1.SKM9.640192']
alpha_vector_fp = self._create_alpha_vector(sample_ids)
# Test success
obs_success, obs_ainfo, obs_error = _validate_alpha_vector(
{'plain_text': [alpha_vector_fp]}, self.metadata, self.out_dir)
self.assertEqual(obs_error, "")
self.assertTrue(obs_success)
exp_ainfo = [ArtifactInfo(None, "alpha_vector",
[(alpha_vector_fp, 'plain_text')])]
self.assertEqual(obs_ainfo, exp_ainfo)
# Test failure wrong ids
sample_ids = ['1.SKM4.640180', '1.SKB8.640193', '1.SKD8.640184',
'NotASample']
alpha_vector_fp = self._create_alpha_vector(sample_ids)
obs_success, obs_ainfo, obs_error = _validate_alpha_vector(
{'plain_text': [alpha_vector_fp]}, self.metadata, self.out_dir)
self.assertEqual(obs_error, "The alpha vector contains samples not "
"present in the metadata")
self.assertFalse(obs_success)
self.assertIsNone(obs_ainfo)
# Test failure wrong format
fd, alpha_vector_fp = mkstemp(suffix='.txt', dir=self.out_dir)
close(fd)
with open(alpha_vector_fp, 'w') as f:
f.write("\tobserved_otus\nsample 1\n")
obs_success, obs_ainfo, obs_error = _validate_alpha_vector(
{'plain_text': [alpha_vector_fp]}, self.metadata, self.out_dir)
self.assertEqual(obs_error, "The alpha vector format is incorrect")
self.assertFalse(obs_success)
self.assertIsNone(obs_ainfo)
def test_validate(self):
# Test artifact type error
job_id, params = self._create_job(
'NotAType', {'plan_text': 'Will fail before checking this'}, 1)
obs_success, obs_ainfo, obs_error = validate(
self.qclient, job_id, params, self.out_dir)
self.assertFalse(obs_success)
self.assertIsNone(obs_ainfo)
self.assertEqual(
obs_error, "Unknown artifact type NotAType. Supported types: "
"FeatureData[Taxonomy], alpha_vector, distance_matrix, "
"ordination_results")
# Test missing metadata error - to be fair, I don't know how this error
job_id, params = self._create_job(
'distance_matrix', {'plan_text': 'Will fail before checking this'},
None)
obs_success, obs_ainfo, obs_error = validate(
self.qclient, job_id, params, self.out_dir)
self.assertFalse(obs_success)
self.assertIsNone(obs_ainfo)
self.assertEqual(
obs_error, "Missing metadata information")
sample_ids = ['1.SKM4.640180', '1.SKB8.640193', '1.SKD8.640184',
'1.SKM9.640192', '1.SKB7.640196']
dm_fp = self._create_distance_matrix(sample_ids)
job_id, params = self._create_job(
'distance_matrix', {'plain_text': [dm_fp]}, 1)
obs_success, obs_ainfo, obs_error = validate(
self.qclient, job_id, params, self.out_dir)
self.assertTrue(obs_success)
html_fp = join(self.out_dir, 'index.html')
exp_ainfo = [ArtifactInfo(None, "distance_matrix",
[(dm_fp, 'plain_text'),
(html_fp, 'html_summary')])]
self.assertEqual(obs_ainfo, exp_ainfo)
self.assertEqual(obs_error, "")
ord_res_fp = self._create_ordination_results(sample_ids)
job_id, params = self._create_job(
'ordination_results', {'plain_text': [ord_res_fp]}, 1)
obs_success, obs_ainfo, obs_error = validate(
self.qclient, job_id, params, self.out_dir)
self.assertTrue(obs_success)
html_fp = join(self.out_dir, 'index.html')
esf_fp = join(self.out_dir, 'emperor_support_files')
exp_ainfo = [ArtifactInfo(None, "ordination_results",
[(ord_res_fp, 'plain_text'),
(html_fp, 'html_summary'),
(esf_fp, 'html_summary_dir')])]
self.assertEqual(obs_ainfo, exp_ainfo)
self.assertEqual(obs_error, "")
alpha_vector_fp = self._create_alpha_vector(sample_ids)
job_id, params = self._create_job(
'alpha_vector', {'plain_text': [alpha_vector_fp]}, 1)
obs_success, obs_ainfo, obs_error = validate(
self.qclient, job_id, params, self.out_dir)
self.assertTrue(obs_success)
html_fp = join(self.out_dir, 'index.html')
sf_fp = join(self.out_dir, 'support_files')
exp_ainfo = [ArtifactInfo(None, "alpha_vector",
[(alpha_vector_fp, 'plain_text'),
(html_fp, 'html_summary'),
(sf_fp, 'html_summary_dir')])]
self.assertEqual(obs_ainfo, exp_ainfo)
self.assertEqual(obs_error, "")
def test_validate_FeatureData_Taxonomy(self):
fd, taxonomy_fp = mkstemp(suffix='.txt', dir=self.out_dir)
close(fd)
with open(taxonomy_fp, 'w') as f:
f.write("Feature ID\tTaxonomy\tConfidence\n")
f.write("TACGGAGGA\tk__Bacteria;p__Bacteroidetes;c__Bacteroidia\t"
"0.9998743\n")
f.write("TACGTAGGG\tk__Bacteria;p__Firmicutes;c__Clostridia\t"
"0.9999999\n")
obs_success, obs_ainfo, obs_error = _validate_feature_data_taxonomy(
{'plain_text': [taxonomy_fp]}, None, self.out_dir)
self.assertEqual(obs_error, "")
self.assertTrue(obs_success)
exp_ainfo = [ArtifactInfo(None, "FeatureData[Taxonomy]",
[(taxonomy_fp, 'plain_text')])]
self.assertEqual(obs_ainfo, exp_ainfo)
fd, taxonomy_fp = mkstemp(suffix='.txt', dir=self.out_dir)
close(fd)
with open(taxonomy_fp, 'w') as f:
f.write("Feature ID\tIt's gonna fail!\tConfidence\n")
f.write("TACGGAGGA\tk__Bacteria;p__Bacteroidetes;c__Bacteroidia\t"
"0.9998743\n")
f.write("TACGTAGGG\tk__Bacteria;p__Firmicutes;c__Clostridia\t"
"0.9999999\n")
obs_success, obs_ainfo, obs_error = _validate_feature_data_taxonomy(
{'plain_text': [taxonomy_fp]}, None, self.out_dir)
self.assertIn("The file header seems wrong", obs_error)
self.assertFalse(obs_success)
self.assertIsNone(obs_ainfo)
if __name__ == '__main__':
main()
| true | true |
f713e63d648cc648889f7311c7603febef324301 | 1,113 | py | Python | __scraping__/bit.do - requests/main.py | whitmans-max/python-examples | 881a8f23f0eebc76816a0078e19951893f0daaaa | [
"MIT"
] | 140 | 2017-02-21T22:49:04.000Z | 2022-03-22T17:51:58.000Z | __scraping__/bit.do - requests/main.py | whitmans-max/python-examples | 881a8f23f0eebc76816a0078e19951893f0daaaa | [
"MIT"
] | 5 | 2017-12-02T19:55:00.000Z | 2021-09-22T23:18:39.000Z | __scraping__/bit.do - requests/main.py | whitmans-max/python-examples | 881a8f23f0eebc76816a0078e19951893f0daaaa | [
"MIT"
] | 79 | 2017-01-25T10:53:33.000Z | 2022-03-11T16:13:57.000Z |
# date: 2019.04.21
# https://stackoverflow.com/a/55778640/1832058
import requests
# not need Sessions
s = requests.Session()
s.headers.update({
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'pl,en-US;q=0.7,en;q=0.3',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
})
#r = s.get('http://bit.do/')
#print(r.status_code)
#print(r.cookies)
# ------------------------------------
headers={
'X-Requested-With': 'XMLHttpRequest', # need it
#'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:68.0) Gecko/20100101 Firefox/68.0',
#'Cookie': 'permasession=1555801674|ole2ky65f9', #
}
data = {
'action': 'shorten',
'url': 'https://onet.pl',
'url2': ' site2 ', # need spaces
'url_hash': None,
'url_stats_is_private': 0,
'permasession': '1555801674|ole2ky65f9', # need it
}
r = requests.post('http://bit.do/mod_perl/url-shortener.pl', headers=headers, data=data)
print(r.status_code)
print(r.json())
import datetime
datetime.datetime.fromtimestamp(1555801674)
| 22.26 | 90 | 0.630728 |
import requests
s = requests.Session()
s.headers.update({
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'pl,en-US;q=0.7,en;q=0.3',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
})
headers={
'X-Requested-With': 'XMLHttpRequest',
}
data = {
'action': 'shorten',
'url': 'https://onet.pl',
'url2': ' site2 ',
'url_hash': None,
'url_stats_is_private': 0,
'permasession': '1555801674|ole2ky65f9',
}
r = requests.post('http://bit.do/mod_perl/url-shortener.pl', headers=headers, data=data)
print(r.status_code)
print(r.json())
import datetime
datetime.datetime.fromtimestamp(1555801674)
| true | true |
f713e76a06efdb4f9ded490c7a6cb8a5cf4c458f | 84,590 | py | Python | astropy/coordinates/sky_coordinate.py | methane/astropy | 1a065d5ce403e226799cfb3d606fda33be0a6c08 | [
"BSD-3-Clause"
] | null | null | null | astropy/coordinates/sky_coordinate.py | methane/astropy | 1a065d5ce403e226799cfb3d606fda33be0a6c08 | [
"BSD-3-Clause"
] | null | null | null | astropy/coordinates/sky_coordinate.py | methane/astropy | 1a065d5ce403e226799cfb3d606fda33be0a6c08 | [
"BSD-3-Clause"
] | null | null | null | import re
import copy
import warnings
import operator
import numpy as np
from astropy import _erfa as erfa
from astropy.utils.compat.misc import override__dir__
from astropy import units as u
from astropy.constants import c as speed_of_light
from astropy.utils.data_info import MixinInfo
from astropy.utils import ShapedLikeNDArray
from astropy.time import Time
from astropy.utils.exceptions import AstropyUserWarning
from .distances import Distance
from .angles import Angle
from .baseframe import (BaseCoordinateFrame, frame_transform_graph,
GenericFrame)
from .builtin_frames import ICRS, SkyOffsetFrame
from .representation import (SphericalRepresentation,
UnitSphericalRepresentation, SphericalDifferential)
from .sky_coordinate_parsers import (_get_frame_class, _get_frame_without_data,
_parse_coordinate_data)
__all__ = ['SkyCoord', 'SkyCoordInfo']
class SkyCoordInfo(MixinInfo):
"""
Container for meta information like name, description, format. This is
required when the object is used as a mixin column within a table, but can
be used as a general way to store meta information.
"""
attrs_from_parent = set(['unit']) # Unit is read-only
_supports_indexing = False
@staticmethod
def default_format(val):
repr_data = val.info._repr_data
formats = ['{0.' + compname + '.value:}' for compname
in repr_data.components]
return ','.join(formats).format(repr_data)
@property
def unit(self):
repr_data = self._repr_data
unit = ','.join(str(getattr(repr_data, comp).unit) or 'None'
for comp in repr_data.components)
return unit
@property
def _repr_data(self):
if self._parent is None:
return None
sc = self._parent
if (issubclass(sc.representation_type, SphericalRepresentation)
and isinstance(sc.data, UnitSphericalRepresentation)):
repr_data = sc.represent_as(sc.data.__class__, in_frame_units=True)
else:
repr_data = sc.represent_as(sc.representation_type,
in_frame_units=True)
return repr_data
def _represent_as_dict(self):
obj = self._parent
attrs = (list(obj.representation_component_names) +
list(frame_transform_graph.frame_attributes.keys()))
# Don't output distance if it is all unitless 1.0
if 'distance' in attrs and np.all(obj.distance == 1.0):
attrs.remove('distance')
out = super()._represent_as_dict(attrs)
out['representation_type'] = obj.representation_type.get_name()
out['frame'] = obj.frame.name
# Note that obj.info.unit is a fake composite unit (e.g. 'deg,deg,None'
# or None,None,m) and is not stored. The individual attributes have
# units.
return out
def new_like(self, skycoords, length, metadata_conflicts='warn', name=None):
"""
Return a new SkyCoord instance which is consistent with the input
SkyCoord objects ``skycoords`` and has ``length`` rows. Being
"consistent" is defined as being able to set an item from one to each of
the rest without any exception being raised.
This is intended for creating a new SkyCoord instance whose elements can
be set in-place for table operations like join or vstack. This is used
when a SkyCoord object is used as a mixin column in an astropy Table.
The data values are not predictable and it is expected that the consumer
of the object will fill in all values.
Parameters
----------
skycoords : list
List of input SkyCoord objects
length : int
Length of the output skycoord object
metadata_conflicts : str ('warn'|'error'|'silent')
How to handle metadata conflicts
name : str
Output name (sets output skycoord.info.name)
Returns
-------
skycoord : SkyCoord (or subclass)
Instance of this class consistent with ``skycoords``
"""
# Get merged info attributes like shape, dtype, format, description, etc.
attrs = self.merge_cols_attributes(skycoords, metadata_conflicts, name,
('meta', 'description'))
skycoord0 = skycoords[0]
# Make a new SkyCoord object with the desired length and attributes
# by using the _apply / __getitem__ machinery to effectively return
# skycoord0[[0, 0, ..., 0, 0]]. This will have the all the right frame
# attributes with the right shape.
indexes = np.zeros(length, dtype=np.int64)
out = skycoord0[indexes]
# Use __setitem__ machinery to check for consistency of all skycoords
for skycoord in skycoords[1:]:
try:
out[0] = skycoord[0]
except Exception as err:
raise ValueError(f'input skycoords are inconsistent: {err}')
# Set (merged) info attributes
for attr in ('name', 'meta', 'description'):
if attr in attrs:
setattr(out.info, attr, attrs[attr])
return out
class SkyCoord(ShapedLikeNDArray):
"""High-level object providing a flexible interface for celestial coordinate
representation, manipulation, and transformation between systems.
The `SkyCoord` class accepts a wide variety of inputs for initialization. At
a minimum these must provide one or more celestial coordinate values with
unambiguous units. Inputs may be scalars or lists/tuples/arrays, yielding
scalar or array coordinates (can be checked via ``SkyCoord.isscalar``).
Typically one also specifies the coordinate frame, though this is not
required. The general pattern for spherical representations is::
SkyCoord(COORD, [FRAME], keyword_args ...)
SkyCoord(LON, LAT, [FRAME], keyword_args ...)
SkyCoord(LON, LAT, [DISTANCE], frame=FRAME, unit=UNIT, keyword_args ...)
SkyCoord([FRAME], <lon_attr>=LON, <lat_attr>=LAT, keyword_args ...)
It is also possible to input coordinate values in other representations
such as cartesian or cylindrical. In this case one includes the keyword
argument ``representation_type='cartesian'`` (for example) along with data
in ``x``, ``y``, and ``z``.
See also: http://docs.astropy.org/en/stable/coordinates/
Examples
--------
The examples below illustrate common ways of initializing a `SkyCoord`
object. For a complete description of the allowed syntax see the
full coordinates documentation. First some imports::
>>> from astropy.coordinates import SkyCoord # High-level coordinates
>>> from astropy.coordinates import ICRS, Galactic, FK4, FK5 # Low-level frames
>>> from astropy.coordinates import Angle, Latitude, Longitude # Angles
>>> import astropy.units as u
The coordinate values and frame specification can now be provided using
positional and keyword arguments::
>>> c = SkyCoord(10, 20, unit="deg") # defaults to ICRS frame
>>> c = SkyCoord([1, 2, 3], [-30, 45, 8], frame="icrs", unit="deg") # 3 coords
>>> coords = ["1:12:43.2 +31:12:43", "1 12 43.2 +31 12 43"]
>>> c = SkyCoord(coords, frame=FK4, unit=(u.hourangle, u.deg), obstime="J1992.21")
>>> c = SkyCoord("1h12m43.2s +1d12m43s", frame=Galactic) # Units from string
>>> c = SkyCoord(frame="galactic", l="1h12m43.2s", b="+1d12m43s")
>>> ra = Longitude([1, 2, 3], unit=u.deg) # Could also use Angle
>>> dec = np.array([4.5, 5.2, 6.3]) * u.deg # Astropy Quantity
>>> c = SkyCoord(ra, dec, frame='icrs')
>>> c = SkyCoord(frame=ICRS, ra=ra, dec=dec, obstime='2001-01-02T12:34:56')
>>> c = FK4(1 * u.deg, 2 * u.deg) # Uses defaults for obstime, equinox
>>> c = SkyCoord(c, obstime='J2010.11', equinox='B1965') # Override defaults
>>> c = SkyCoord(w=0, u=1, v=2, unit='kpc', frame='galactic',
... representation_type='cartesian')
>>> c = SkyCoord([ICRS(ra=1*u.deg, dec=2*u.deg), ICRS(ra=3*u.deg, dec=4*u.deg)])
Velocity components (proper motions or radial velocities) can also be
provided in a similar manner::
>>> c = SkyCoord(ra=1*u.deg, dec=2*u.deg, radial_velocity=10*u.km/u.s)
>>> c = SkyCoord(ra=1*u.deg, dec=2*u.deg, pm_ra_cosdec=2*u.mas/u.yr, pm_dec=1*u.mas/u.yr)
As shown, the frame can be a `~astropy.coordinates.BaseCoordinateFrame`
class or the corresponding string alias. The frame classes that are built in
to astropy are `ICRS`, `FK5`, `FK4`, `FK4NoETerms`, and `Galactic`.
The string aliases are simply lower-case versions of the class name, and
allow for creating a `SkyCoord` object and transforming frames without
explicitly importing the frame classes.
Parameters
----------
frame : `~astropy.coordinates.BaseCoordinateFrame` class or string, optional
Type of coordinate frame this `SkyCoord` should represent. Defaults to
to ICRS if not given or given as None.
unit : `~astropy.units.Unit`, string, or tuple of :class:`~astropy.units.Unit` or str, optional
Units for supplied ``LON`` and ``LAT`` values, respectively. If
only one unit is supplied then it applies to both ``LON`` and
``LAT``.
obstime : valid `~astropy.time.Time` initializer, optional
Time(s) of observation.
equinox : valid `~astropy.time.Time` initializer, optional
Coordinate frame equinox.
representation_type : str or Representation class
Specifies the representation, e.g. 'spherical', 'cartesian', or
'cylindrical'. This affects the positional args and other keyword args
which must correspond to the given representation.
copy : bool, optional
If `True` (default), a copy of any coordinate data is made. This
argument can only be passed in as a keyword argument.
**keyword_args
Other keyword arguments as applicable for user-defined coordinate frames.
Common options include:
ra, dec : valid `~astropy.coordinates.Angle` initializer, optional
RA and Dec for frames where ``ra`` and ``dec`` are keys in the
frame's ``representation_component_names``, including `ICRS`,
`FK5`, `FK4`, and `FK4NoETerms`.
pm_ra_cosdec, pm_dec : `~astropy.units.Quantity`, optional
Proper motion components, in angle per time units.
l, b : valid `~astropy.coordinates.Angle` initializer, optional
Galactic ``l`` and ``b`` for for frames where ``l`` and ``b`` are
keys in the frame's ``representation_component_names``, including
the `Galactic` frame.
pm_l_cosb, pm_b : `~astropy.units.Quantity`, optional
Proper motion components in the `Galactic` frame, in angle per time
units.
x, y, z : float or `~astropy.units.Quantity`, optional
Cartesian coordinates values
u, v, w : float or `~astropy.units.Quantity`, optional
Cartesian coordinates values for the Galactic frame.
radial_velocity : `~astropy.units.Quantity`, optional
The component of the velocity along the line-of-sight (i.e., the
radial direction), in velocity units.
"""
# Declare that SkyCoord can be used as a Table column by defining the
# info property.
info = SkyCoordInfo()
def __init__(self, *args, copy=True, **kwargs):
# these are frame attributes set on this SkyCoord but *not* a part of
# the frame object this SkyCoord contains
self._extra_frameattr_names = set()
# If all that is passed in is a frame instance that already has data,
# we should bypass all of the parsing and logic below. This is here
# to make this the fastest way to create a SkyCoord instance. Many of
# the classmethods implemented for performance enhancements will use
# this as the initialization path
if (len(args) == 1 and len(kwargs) == 0
and isinstance(args[0], (BaseCoordinateFrame, SkyCoord))):
coords = args[0]
if isinstance(coords, SkyCoord):
self._extra_frameattr_names = coords._extra_frameattr_names
self.info = coords.info
# Copy over any extra frame attributes
for attr_name in self._extra_frameattr_names:
# Setting it will also validate it.
setattr(self, attr_name, getattr(coords, attr_name))
coords = coords.frame
if not coords.has_data:
raise ValueError('Cannot initialize from a coordinate frame '
'instance without coordinate data')
if copy:
self._sky_coord_frame = coords.copy()
else:
self._sky_coord_frame = coords
else:
# Get the frame instance without coordinate data but with all frame
# attributes set - these could either have been passed in with the
# frame as an instance, or passed in as kwargs here
frame_cls, frame_kwargs = _get_frame_without_data(args, kwargs)
# Parse the args and kwargs to assemble a sanitized and validated
# kwargs dict for initializing attributes for this object and for
# creating the internal self._sky_coord_frame object
args = list(args) # Make it mutable
skycoord_kwargs, components, info = _parse_coordinate_data(
frame_cls(**frame_kwargs), args, kwargs)
# In the above two parsing functions, these kwargs were identified
# as valid frame attributes for *some* frame, but not the frame that
# this SkyCoord will have. We keep these attributes as special
# skycoord frame attributes:
for attr in skycoord_kwargs:
# Setting it will also validate it.
setattr(self, attr, skycoord_kwargs[attr])
if info is not None:
self.info = info
# Finally make the internal coordinate object.
frame_kwargs.update(components)
self._sky_coord_frame = frame_cls(copy=copy, **frame_kwargs)
if not self._sky_coord_frame.has_data:
raise ValueError('Cannot create a SkyCoord without data')
@property
def frame(self):
return self._sky_coord_frame
@property
def representation_type(self):
return self.frame.representation_type
@representation_type.setter
def representation_type(self, value):
self.frame.representation_type = value
# TODO: remove these in future
@property
def representation(self):
return self.frame.representation
@representation.setter
def representation(self, value):
self.frame.representation = value
@property
def shape(self):
return self.frame.shape
def __eq__(self, value):
"""Equality operator for SkyCoord
This implements strict equality and requires that the frames are
equivalent, extra frame attributes are equivalent, and that the
representation data are exactly equal.
"""
# Make sure that any extra frame attribute names are equivalent.
for attr in self._extra_frameattr_names | value._extra_frameattr_names:
if not self.frame._frameattr_equiv(getattr(self, attr),
getattr(value, attr)):
raise ValueError(f"cannot compare: extra frame attribute "
f"'{attr}' is not equivalent "
f"(perhaps compare the frames directly to avoid "
f"this exception)")
return self._sky_coord_frame == value._sky_coord_frame
def __ne__(self, value):
return np.logical_not(self == value)
def _apply(self, method, *args, **kwargs):
"""Create a new instance, applying a method to the underlying data.
In typical usage, the method is any of the shape-changing methods for
`~numpy.ndarray` (``reshape``, ``swapaxes``, etc.), as well as those
picking particular elements (``__getitem__``, ``take``, etc.), which
are all defined in `~astropy.utils.shapes.ShapedLikeNDArray`. It will be
applied to the underlying arrays in the representation (e.g., ``x``,
``y``, and ``z`` for `~astropy.coordinates.CartesianRepresentation`),
as well as to any frame attributes that have a shape, with the results
used to create a new instance.
Internally, it is also used to apply functions to the above parts
(in particular, `~numpy.broadcast_to`).
Parameters
----------
method : str or callable
If str, it is the name of a method that is applied to the internal
``components``. If callable, the function is applied.
args : tuple
Any positional arguments for ``method``.
kwargs : dict
Any keyword arguments for ``method``.
"""
def apply_method(value):
if isinstance(value, ShapedLikeNDArray):
return value._apply(method, *args, **kwargs)
else:
if callable(method):
return method(value, *args, **kwargs)
else:
return getattr(value, method)(*args, **kwargs)
# create a new but empty instance, and copy over stuff
new = super().__new__(self.__class__)
new._sky_coord_frame = self._sky_coord_frame._apply(method,
*args, **kwargs)
new._extra_frameattr_names = self._extra_frameattr_names.copy()
for attr in self._extra_frameattr_names:
value = getattr(self, attr)
if getattr(value, 'shape', ()):
value = apply_method(value)
elif method == 'copy' or method == 'flatten':
# flatten should copy also for a single element array, but
# we cannot use it directly for array scalars, since it
# always returns a one-dimensional array. So, just copy.
value = copy.copy(value)
setattr(new, '_' + attr, value)
# Copy other 'info' attr only if it has actually been defined.
# See PR #3898 for further explanation and justification, along
# with Quantity.__array_finalize__
if 'info' in self.__dict__:
new.info = self.info
return new
def __setitem__(self, item, value):
"""Implement self[item] = value for SkyCoord
The right hand ``value`` must be strictly consistent with self:
- Identical class
- Equivalent frames
- Identical representation_types
- Identical representation differentials keys
- Identical frame attributes
- Identical "extra" frame attributes (e.g. obstime for an ICRS coord)
With these caveats the setitem ends up as effectively a setitem on
the representation data.
self.frame.data[item] = value.frame.data
"""
if self.__class__ is not value.__class__:
raise TypeError(f'can only set from object of same class: '
f'{self.__class__.__name__} vs. '
f'{value.__class__.__name__}')
# Make sure that any extra frame attribute names are equivalent.
for attr in self._extra_frameattr_names | value._extra_frameattr_names:
if not self.frame._frameattr_equiv(getattr(self, attr),
getattr(value, attr)):
raise ValueError(f'attribute {attr} is not equivalent')
# Set the frame values. This checks frame equivalence and also clears
# the cache to ensure that the object is not in an inconsistent state.
self._sky_coord_frame[item] = value._sky_coord_frame
def insert(self, obj, values, axis=0):
"""
Insert coordinate values before the given indices in the object and
return a new Frame object.
The values to be inserted must conform to the rules for in-place setting
of ``SkyCoord`` objects.
The API signature matches the ``np.insert`` API, but is more limited.
The specification of insert index ``obj`` must be a single integer,
and the ``axis`` must be ``0`` for simple insertion before the index.
Parameters
----------
obj : int
Integer index before which ``values`` is inserted.
values : array_like
Value(s) to insert. If the type of ``values`` is different
from that of quantity, ``values`` is converted to the matching type.
axis : int, optional
Axis along which to insert ``values``. Default is 0, which is the
only allowed value and will insert a row.
Returns
-------
out : `~astropy.coordinates.SkyCoord` instance
New coordinate object with inserted value(s)
"""
# Validate inputs: obj arg is integer, axis=0, self is not a scalar, and
# input index is in bounds.
try:
idx0 = operator.index(obj)
except TypeError:
raise TypeError('obj arg must be an integer')
if axis != 0:
raise ValueError('axis must be 0')
if not self.shape:
raise TypeError('cannot insert into scalar {} object'
.format(self.__class__.__name__))
if abs(idx0) > len(self):
raise IndexError('index {} is out of bounds for axis 0 with size {}'
.format(idx0, len(self)))
# Turn negative index into positive
if idx0 < 0:
idx0 = len(self) + idx0
n_values = len(values) if values.shape else 1
# Finally make the new object with the correct length and set values for the
# three sections, before insert, the insert, and after the insert.
out = self.__class__.info.new_like([self], len(self) + n_values, name=self.info.name)
# Set the output values. This is where validation of `values` takes place to ensure
# that it can indeed be inserted.
out[:idx0] = self[:idx0]
out[idx0:idx0 + n_values] = values
out[idx0 + n_values:] = self[idx0:]
return out
def transform_to(self, frame, merge_attributes=True):
"""Transform this coordinate to a new frame.
The precise frame transformed to depends on ``merge_attributes``.
If `False`, the destination frame is used exactly as passed in.
But this is often not quite what one wants. E.g., suppose one wants to
transform an ICRS coordinate that has an obstime attribute to FK4; in
this case, one likely would want to use this information. Thus, the
default for ``merge_attributes`` is `True`, in which the precedence is
as follows: (1) explicitly set (i.e., non-default) values in the
destination frame; (2) explicitly set values in the source; (3) default
value in the destination frame.
Note that in either case, any explicitly set attributes on the source
`SkyCoord` that are not part of the destination frame's definition are
kept (stored on the resulting `SkyCoord`), and thus one can round-trip
(e.g., from FK4 to ICRS to FK4 without loosing obstime).
Parameters
----------
frame : str, `BaseCoordinateFrame` class or instance, or `SkyCoord` instance
The frame to transform this coordinate into. If a `SkyCoord`, the
underlying frame is extracted, and all other information ignored.
merge_attributes : bool, optional
Whether the default attributes in the destination frame are allowed
to be overridden by explicitly set attributes in the source
(see note above; default: `True`).
Returns
-------
coord : `SkyCoord`
A new object with this coordinate represented in the `frame` frame.
Raises
------
ValueError
If there is no possible transformation route.
"""
from astropy.coordinates.errors import ConvertError
frame_kwargs = {}
# Frame name (string) or frame class? Coerce into an instance.
try:
frame = _get_frame_class(frame)()
except Exception:
pass
if isinstance(frame, SkyCoord):
frame = frame.frame # Change to underlying coord frame instance
if isinstance(frame, BaseCoordinateFrame):
new_frame_cls = frame.__class__
# Get frame attributes, allowing defaults to be overridden by
# explicitly set attributes of the source if ``merge_attributes``.
for attr in frame_transform_graph.frame_attributes:
self_val = getattr(self, attr, None)
frame_val = getattr(frame, attr, None)
if (frame_val is not None
and not (merge_attributes
and frame.is_frame_attr_default(attr))):
frame_kwargs[attr] = frame_val
elif (self_val is not None
and not self.is_frame_attr_default(attr)):
frame_kwargs[attr] = self_val
elif frame_val is not None:
frame_kwargs[attr] = frame_val
else:
raise ValueError('Transform `frame` must be a frame name, class, or instance')
# Get the composite transform to the new frame
trans = frame_transform_graph.get_transform(self.frame.__class__, new_frame_cls)
if trans is None:
raise ConvertError('Cannot transform from {} to {}'
.format(self.frame.__class__, new_frame_cls))
# Make a generic frame which will accept all the frame kwargs that
# are provided and allow for transforming through intermediate frames
# which may require one or more of those kwargs.
generic_frame = GenericFrame(frame_kwargs)
# Do the transformation, returning a coordinate frame of the desired
# final type (not generic).
new_coord = trans(self.frame, generic_frame)
# Finally make the new SkyCoord object from the `new_coord` and
# remaining frame_kwargs that are not frame_attributes in `new_coord`.
for attr in (set(new_coord.get_frame_attr_names()) &
set(frame_kwargs.keys())):
frame_kwargs.pop(attr)
return self.__class__(new_coord, **frame_kwargs)
def apply_space_motion(self, new_obstime=None, dt=None):
"""
Compute the position of the source represented by this coordinate object
to a new time using the velocities stored in this object and assuming
linear space motion (including relativistic corrections). This is
sometimes referred to as an "epoch transformation."
The initial time before the evolution is taken from the ``obstime``
attribute of this coordinate. Note that this method currently does not
support evolving coordinates where the *frame* has an ``obstime`` frame
attribute, so the ``obstime`` is only used for storing the before and
after times, not actually as an attribute of the frame. Alternatively,
if ``dt`` is given, an ``obstime`` need not be provided at all.
Parameters
----------
new_obstime : `~astropy.time.Time`, optional
The time at which to evolve the position to. Requires that the
``obstime`` attribute be present on this frame.
dt : `~astropy.units.Quantity`, `~astropy.time.TimeDelta`, optional
An amount of time to evolve the position of the source. Cannot be
given at the same time as ``new_obstime``.
Returns
-------
new_coord : `SkyCoord`
A new coordinate object with the evolved location of this coordinate
at the new time. ``obstime`` will be set on this object to the new
time only if ``self`` also has ``obstime``.
"""
if (new_obstime is None and dt is None or
new_obstime is not None and dt is not None):
raise ValueError("You must specify one of `new_obstime` or `dt`, "
"but not both.")
# Validate that we have velocity info
if 's' not in self.frame.data.differentials:
raise ValueError('SkyCoord requires velocity data to evolve the '
'position.')
if 'obstime' in self.frame.frame_attributes:
raise NotImplementedError("Updating the coordinates in a frame "
"with explicit time dependence is "
"currently not supported. If you would "
"like this functionality, please open an "
"issue on github:\n"
"https://github.com/astropy/astropy")
if new_obstime is not None and self.obstime is None:
# If no obstime is already on this object, raise an error if a new
# obstime is passed: we need to know the time / epoch at which the
# the position / velocity were measured initially
raise ValueError('This object has no associated `obstime`. '
'apply_space_motion() must receive a time '
'difference, `dt`, and not a new obstime.')
# Compute t1 and t2, the times used in the starpm call, which *only*
# uses them to compute a delta-time
t1 = self.obstime
if dt is None:
# self.obstime is not None and new_obstime is not None b/c of above
# checks
t2 = new_obstime
else:
# new_obstime is definitely None b/c of the above checks
if t1 is None:
# MAGIC NUMBER: if the current SkyCoord object has no obstime,
# assume J2000 to do the dt offset. This is not actually used
# for anything except a delta-t in starpm, so it's OK that it's
# not necessarily the "real" obstime
t1 = Time('J2000')
new_obstime = None # we don't actually know the inital obstime
t2 = t1 + dt
else:
t2 = t1 + dt
new_obstime = t2
# starpm wants tdb time
t1 = t1.tdb
t2 = t2.tdb
# proper motion in RA should not include the cos(dec) term, see the
# erfa function eraStarpv, comment (4). So we convert to the regular
# spherical differentials.
icrsrep = self.icrs.represent_as(SphericalRepresentation, SphericalDifferential)
icrsvel = icrsrep.differentials['s']
parallax_zero = False
try:
plx = icrsrep.distance.to_value(u.arcsecond, u.parallax())
except u.UnitConversionError: # No distance: set to 0 by convention
plx = 0.
parallax_zero = True
try:
rv = icrsvel.d_distance.to_value(u.km/u.s)
except u.UnitConversionError: # No RV
rv = 0.
starpm = erfa.pmsafe(icrsrep.lon.radian, icrsrep.lat.radian,
icrsvel.d_lon.to_value(u.radian/u.yr),
icrsvel.d_lat.to_value(u.radian/u.yr),
plx, rv, t1.jd1, t1.jd2, t2.jd1, t2.jd2)
if parallax_zero:
new_distance = None
else:
new_distance = Distance(parallax=starpm[4] << u.arcsec)
icrs2 = ICRS(ra=u.Quantity(starpm[0], u.radian, copy=False),
dec=u.Quantity(starpm[1], u.radian, copy=False),
pm_ra=u.Quantity(starpm[2], u.radian/u.yr, copy=False),
pm_dec=u.Quantity(starpm[3], u.radian/u.yr, copy=False),
distance=new_distance,
radial_velocity=u.Quantity(starpm[5], u.km/u.s, copy=False),
differential_type=SphericalDifferential)
# Update the obstime of the returned SkyCoord, and need to carry along
# the frame attributes
frattrs = {attrnm: getattr(self, attrnm)
for attrnm in self._extra_frameattr_names}
frattrs['obstime'] = new_obstime
return self.__class__(icrs2, **frattrs).transform_to(self.frame)
def _is_name(self, string):
"""
Returns whether a string is one of the aliases for the frame.
"""
return (self.frame.name == string or
(isinstance(self.frame.name, list) and string in self.frame.name))
def __getattr__(self, attr):
"""
Overrides getattr to return coordinates that this can be transformed
to, based on the alias attr in the master transform graph.
"""
if '_sky_coord_frame' in self.__dict__:
if self._is_name(attr):
return self # Should this be a deepcopy of self?
# Anything in the set of all possible frame_attr_names is handled
# here. If the attr is relevant for the current frame then delegate
# to self.frame otherwise get it from self._<attr>.
if attr in frame_transform_graph.frame_attributes:
if attr in self.frame.get_frame_attr_names():
return getattr(self.frame, attr)
else:
return getattr(self, '_' + attr, None)
# Some attributes might not fall in the above category but still
# are available through self._sky_coord_frame.
if not attr.startswith('_') and hasattr(self._sky_coord_frame, attr):
return getattr(self._sky_coord_frame, attr)
# Try to interpret as a new frame for transforming.
frame_cls = frame_transform_graph.lookup_name(attr)
if frame_cls is not None and self.frame.is_transformable_to(frame_cls):
return self.transform_to(attr)
# Fail
raise AttributeError("'{}' object has no attribute '{}'"
.format(self.__class__.__name__, attr))
def __setattr__(self, attr, val):
# This is to make anything available through __getattr__ immutable
if '_sky_coord_frame' in self.__dict__:
if self._is_name(attr):
raise AttributeError(f"'{attr}' is immutable")
if not attr.startswith('_') and hasattr(self._sky_coord_frame, attr):
setattr(self._sky_coord_frame, attr, val)
return
frame_cls = frame_transform_graph.lookup_name(attr)
if frame_cls is not None and self.frame.is_transformable_to(frame_cls):
raise AttributeError(f"'{attr}' is immutable")
if attr in frame_transform_graph.frame_attributes:
# All possible frame attributes can be set, but only via a private
# variable. See __getattr__ above.
super().__setattr__('_' + attr, val)
# Validate it
frame_transform_graph.frame_attributes[attr].__get__(self)
# And add to set of extra attributes
self._extra_frameattr_names |= {attr}
else:
# Otherwise, do the standard Python attribute setting
super().__setattr__(attr, val)
def __delattr__(self, attr):
# mirror __setattr__ above
if '_sky_coord_frame' in self.__dict__:
if self._is_name(attr):
raise AttributeError(f"'{attr}' is immutable")
if not attr.startswith('_') and hasattr(self._sky_coord_frame,
attr):
delattr(self._sky_coord_frame, attr)
return
frame_cls = frame_transform_graph.lookup_name(attr)
if frame_cls is not None and self.frame.is_transformable_to(frame_cls):
raise AttributeError(f"'{attr}' is immutable")
if attr in frame_transform_graph.frame_attributes:
# All possible frame attributes can be deleted, but need to remove
# the corresponding private variable. See __getattr__ above.
super().__delattr__('_' + attr)
# Also remove it from the set of extra attributes
self._extra_frameattr_names -= {attr}
else:
# Otherwise, do the standard Python attribute setting
super().__delattr__(attr)
@override__dir__
def __dir__(self):
"""
Override the builtin `dir` behavior to include:
- Transforms available by aliases
- Attribute / methods of the underlying self.frame object
"""
# determine the aliases that this can be transformed to.
dir_values = set()
for name in frame_transform_graph.get_names():
frame_cls = frame_transform_graph.lookup_name(name)
if self.frame.is_transformable_to(frame_cls):
dir_values.add(name)
# Add public attributes of self.frame
dir_values.update(set(attr for attr in dir(self.frame) if not attr.startswith('_')))
# Add all possible frame attributes
dir_values.update(frame_transform_graph.frame_attributes.keys())
return dir_values
def __repr__(self):
clsnm = self.__class__.__name__
coonm = self.frame.__class__.__name__
frameattrs = self.frame._frame_attrs_repr()
if frameattrs:
frameattrs = ': ' + frameattrs
data = self.frame._data_repr()
if data:
data = ': ' + data
return '<{clsnm} ({coonm}{frameattrs}){data}>'.format(**locals())
def to_string(self, style='decimal', **kwargs):
"""
A string representation of the coordinates.
The default styles definitions are::
'decimal': 'lat': {'decimal': True, 'unit': "deg"}
'lon': {'decimal': True, 'unit': "deg"}
'dms': 'lat': {'unit': "deg"}
'lon': {'unit': "deg"}
'hmsdms': 'lat': {'alwayssign': True, 'pad': True, 'unit': "deg"}
'lon': {'pad': True, 'unit': "hour"}
See :meth:`~astropy.coordinates.Angle.to_string` for details and
keyword arguments (the two angles forming the coordinates are are
both :class:`~astropy.coordinates.Angle` instances). Keyword
arguments have precedence over the style defaults and are passed
to :meth:`~astropy.coordinates.Angle.to_string`.
Parameters
----------
style : {'hmsdms', 'dms', 'decimal'}
The formatting specification to use. These encode the three most
common ways to represent coordinates. The default is `decimal`.
kwargs
Keyword args passed to :meth:`~astropy.coordinates.Angle.to_string`.
"""
sph_coord = self.frame.represent_as(SphericalRepresentation)
styles = {'hmsdms': {'lonargs': {'unit': u.hour, 'pad': True},
'latargs': {'unit': u.degree, 'pad': True, 'alwayssign': True}},
'dms': {'lonargs': {'unit': u.degree},
'latargs': {'unit': u.degree}},
'decimal': {'lonargs': {'unit': u.degree, 'decimal': True},
'latargs': {'unit': u.degree, 'decimal': True}}
}
lonargs = {}
latargs = {}
if style in styles:
lonargs.update(styles[style]['lonargs'])
latargs.update(styles[style]['latargs'])
else:
raise ValueError('Invalid style. Valid options are: {}'.format(",".join(styles)))
lonargs.update(kwargs)
latargs.update(kwargs)
if np.isscalar(sph_coord.lon.value):
coord_string = (sph_coord.lon.to_string(**lonargs) +
" " + sph_coord.lat.to_string(**latargs))
else:
coord_string = []
for lonangle, latangle in zip(sph_coord.lon.ravel(), sph_coord.lat.ravel()):
coord_string += [(lonangle.to_string(**lonargs) +
" " + latangle.to_string(**latargs))]
if len(sph_coord.shape) > 1:
coord_string = np.array(coord_string).reshape(sph_coord.shape)
return coord_string
def is_equivalent_frame(self, other):
"""
Checks if this object's frame as the same as that of the ``other``
object.
To be the same frame, two objects must be the same frame class and have
the same frame attributes. For two `SkyCoord` objects, *all* of the
frame attributes have to match, not just those relevant for the object's
frame.
Parameters
----------
other : SkyCoord or BaseCoordinateFrame
The other object to check.
Returns
-------
isequiv : bool
True if the frames are the same, False if not.
Raises
------
TypeError
If ``other`` isn't a `SkyCoord` or a `BaseCoordinateFrame` or subclass.
"""
if isinstance(other, BaseCoordinateFrame):
return self.frame.is_equivalent_frame(other)
elif isinstance(other, SkyCoord):
if other.frame.name != self.frame.name:
return False
for fattrnm in frame_transform_graph.frame_attributes:
if not BaseCoordinateFrame._frameattr_equiv(getattr(self, fattrnm),
getattr(other, fattrnm)):
return False
return True
else:
# not a BaseCoordinateFrame nor a SkyCoord object
raise TypeError("Tried to do is_equivalent_frame on something that "
"isn't frame-like")
# High-level convenience methods
def separation(self, other):
"""
Computes on-sky separation between this coordinate and another.
.. note::
If the ``other`` coordinate object is in a different frame, it is
first transformed to the frame of this object. This can lead to
unintuitive behavior if not accounted for. Particularly of note is
that ``self.separation(other)`` and ``other.separation(self)`` may
not give the same answer in this case.
For more on how to use this (and related) functionality, see the
examples in :doc:`/coordinates/matchsep`.
Parameters
----------
other : `~astropy.coordinates.SkyCoord` or `~astropy.coordinates.BaseCoordinateFrame`
The coordinate to get the separation to.
Returns
-------
sep : `~astropy.coordinates.Angle`
The on-sky separation between this and the ``other`` coordinate.
Notes
-----
The separation is calculated using the Vincenty formula, which
is stable at all locations, including poles and antipodes [1]_.
.. [1] https://en.wikipedia.org/wiki/Great-circle_distance
"""
from . import Angle
from .angle_utilities import angular_separation
if not self.is_equivalent_frame(other):
try:
kwargs = {'merge_attributes': False} if isinstance(other, SkyCoord) else {}
other = other.transform_to(self, **kwargs)
except TypeError:
raise TypeError('Can only get separation to another SkyCoord '
'or a coordinate frame with data')
lon1 = self.spherical.lon
lat1 = self.spherical.lat
lon2 = other.spherical.lon
lat2 = other.spherical.lat
# Get the separation as a Quantity, convert to Angle in degrees
sep = angular_separation(lon1, lat1, lon2, lat2)
return Angle(sep, unit=u.degree)
def separation_3d(self, other):
"""
Computes three dimensional separation between this coordinate
and another.
For more on how to use this (and related) functionality, see the
examples in :doc:`/coordinates/matchsep`.
Parameters
----------
other : `~astropy.coordinates.SkyCoord` or `~astropy.coordinates.BaseCoordinateFrame`
The coordinate to get the separation to.
Returns
-------
sep : `~astropy.coordinates.Distance`
The real-space distance between these two coordinates.
Raises
------
ValueError
If this or the other coordinate do not have distances.
"""
if not self.is_equivalent_frame(other):
try:
kwargs = {'merge_attributes': False} if isinstance(other, SkyCoord) else {}
other = other.transform_to(self, **kwargs)
except TypeError:
raise TypeError('Can only get separation to another SkyCoord '
'or a coordinate frame with data')
if issubclass(self.data.__class__, UnitSphericalRepresentation):
raise ValueError('This object does not have a distance; cannot '
'compute 3d separation.')
if issubclass(other.data.__class__, UnitSphericalRepresentation):
raise ValueError('The other object does not have a distance; '
'cannot compute 3d separation.')
c1 = self.cartesian.without_differentials()
c2 = other.cartesian.without_differentials()
return Distance((c1 - c2).norm())
def spherical_offsets_to(self, tocoord):
r"""
Computes angular offsets to go *from* this coordinate *to* another.
Parameters
----------
tocoord : `~astropy.coordinates.BaseCoordinateFrame`
The coordinate to find the offset to.
Returns
-------
lon_offset : `~astropy.coordinates.Angle`
The angular offset in the longitude direction (i.e., RA for
equatorial coordinates).
lat_offset : `~astropy.coordinates.Angle`
The angular offset in the latitude direction (i.e., Dec for
equatorial coordinates).
Raises
------
ValueError
If the ``tocoord`` is not in the same frame as this one. This is
different from the behavior of the `separation`/`separation_3d`
methods because the offset components depend critically on the
specific choice of frame.
Notes
-----
This uses the sky offset frame machinery, and hence will produce a new
sky offset frame if one does not already exist for this object's frame
class.
See Also
--------
separation : for the *total* angular offset (not broken out into components).
position_angle : for the direction of the offset.
"""
if not self.is_equivalent_frame(tocoord):
raise ValueError('Tried to use spherical_offsets_to with two non-matching frames!')
aframe = self.skyoffset_frame()
acoord = tocoord.transform_to(aframe)
dlon = acoord.spherical.lon.view(Angle)
dlat = acoord.spherical.lat.view(Angle)
return dlon, dlat
def directional_offset_by(self, position_angle, separation):
"""
Computes coordinates at the given offset from this coordinate.
Parameters
----------
position_angle : `~astropy.coordinates.Angle`
position_angle of offset
separation : `~astropy.coordinates.Angle`
offset angular separation
Returns
-------
newpoints : `~astropy.coordinates.SkyCoord`
The coordinates for the location that corresponds to offsetting by
the given `position_angle` and `separation`.
Notes
-----
Returned SkyCoord frame retains only the frame attributes that are for
the resulting frame type. (e.g. if the input frame is
`~astropy.coordinates.ICRS`, an ``equinox`` value will be retained, but
an ``obstime`` will not.)
For a more complete set of transform offsets, use `~astropy.wcs.WCS`.
`~astropy.coordinates.SkyCoord.skyoffset_frame()` can also be used to
create a spherical frame with (lat=0, lon=0) at a reference point,
approximating an xy cartesian system for small offsets. This method
is distinct in that it is accurate on the sphere.
See Also
--------
position_angle : inverse operation for the ``position_angle`` component
separation : inverse operation for the ``separation`` component
"""
from . import angle_utilities
slat = self.represent_as(UnitSphericalRepresentation).lat
slon = self.represent_as(UnitSphericalRepresentation).lon
newlon, newlat = angle_utilities.offset_by(
lon=slon, lat=slat,
posang=position_angle, distance=separation)
return SkyCoord(newlon, newlat, frame=self.frame)
def match_to_catalog_sky(self, catalogcoord, nthneighbor=1):
"""
Finds the nearest on-sky matches of this coordinate in a set of
catalog coordinates.
For more on how to use this (and related) functionality, see the
examples in :doc:`/coordinates/matchsep`.
Parameters
----------
catalogcoord : `~astropy.coordinates.SkyCoord` or `~astropy.coordinates.BaseCoordinateFrame`
The base catalog in which to search for matches. Typically this
will be a coordinate object that is an array (i.e.,
``catalogcoord.isscalar == False``)
nthneighbor : int, optional
Which closest neighbor to search for. Typically ``1`` is
desired here, as that is correct for matching one set of
coordinates to another. The next likely use case is ``2``,
for matching a coordinate catalog against *itself* (``1``
is inappropriate because each point will find itself as the
closest match).
Returns
-------
idx : integer array
Indices into ``catalogcoord`` to get the matched points for
each of this object's coordinates. Shape matches this
object.
sep2d : `~astropy.coordinates.Angle`
The on-sky separation between the closest match for each
element in this object in ``catalogcoord``. Shape matches
this object.
dist3d : `~astropy.units.Quantity`
The 3D distance between the closest match for each element
in this object in ``catalogcoord``. Shape matches this
object. Unless both this and ``catalogcoord`` have associated
distances, this quantity assumes that all sources are at a
distance of 1 (dimensionless).
Notes
-----
This method requires `SciPy <https://www.scipy.org/>`_ to be
installed or it will fail.
See Also
--------
astropy.coordinates.match_coordinates_sky
SkyCoord.match_to_catalog_3d
"""
from .matching import match_coordinates_sky
if (isinstance(catalogcoord, (SkyCoord, BaseCoordinateFrame))
and catalogcoord.has_data):
self_in_catalog_frame = self.transform_to(catalogcoord)
else:
raise TypeError('Can only get separation to another SkyCoord or a '
'coordinate frame with data')
res = match_coordinates_sky(self_in_catalog_frame, catalogcoord,
nthneighbor=nthneighbor,
storekdtree='_kdtree_sky')
return res
def match_to_catalog_3d(self, catalogcoord, nthneighbor=1):
"""
Finds the nearest 3-dimensional matches of this coordinate to a set
of catalog coordinates.
This finds the 3-dimensional closest neighbor, which is only different
from the on-sky distance if ``distance`` is set in this object or the
``catalogcoord`` object.
For more on how to use this (and related) functionality, see the
examples in :doc:`/coordinates/matchsep`.
Parameters
----------
catalogcoord : `~astropy.coordinates.SkyCoord` or `~astropy.coordinates.BaseCoordinateFrame`
The base catalog in which to search for matches. Typically this
will be a coordinate object that is an array (i.e.,
``catalogcoord.isscalar == False``)
nthneighbor : int, optional
Which closest neighbor to search for. Typically ``1`` is
desired here, as that is correct for matching one set of
coordinates to another. The next likely use case is
``2``, for matching a coordinate catalog against *itself*
(``1`` is inappropriate because each point will find
itself as the closest match).
Returns
-------
idx : integer array
Indices into ``catalogcoord`` to get the matched points for
each of this object's coordinates. Shape matches this
object.
sep2d : `~astropy.coordinates.Angle`
The on-sky separation between the closest match for each
element in this object in ``catalogcoord``. Shape matches
this object.
dist3d : `~astropy.units.Quantity`
The 3D distance between the closest match for each element
in this object in ``catalogcoord``. Shape matches this
object.
Notes
-----
This method requires `SciPy <https://www.scipy.org/>`_ to be
installed or it will fail.
See Also
--------
astropy.coordinates.match_coordinates_3d
SkyCoord.match_to_catalog_sky
"""
from .matching import match_coordinates_3d
if (isinstance(catalogcoord, (SkyCoord, BaseCoordinateFrame))
and catalogcoord.has_data):
self_in_catalog_frame = self.transform_to(catalogcoord)
else:
raise TypeError('Can only get separation to another SkyCoord or a '
'coordinate frame with data')
res = match_coordinates_3d(self_in_catalog_frame, catalogcoord,
nthneighbor=nthneighbor,
storekdtree='_kdtree_3d')
return res
def search_around_sky(self, searcharoundcoords, seplimit):
"""
Searches for all coordinates in this object around a supplied set of
points within a given on-sky separation.
This is intended for use on `~astropy.coordinates.SkyCoord` objects
with coordinate arrays, rather than a scalar coordinate. For a scalar
coordinate, it is better to use
`~astropy.coordinates.SkyCoord.separation`.
For more on how to use this (and related) functionality, see the
examples in :doc:`/coordinates/matchsep`.
Parameters
----------
searcharoundcoords : `~astropy.coordinates.SkyCoord` or `~astropy.coordinates.BaseCoordinateFrame`
The coordinates to search around to try to find matching points in
this `SkyCoord`. This should be an object with array coordinates,
not a scalar coordinate object.
seplimit : `~astropy.units.Quantity` with angle units
The on-sky separation to search within.
Returns
-------
idxsearcharound : integer array
Indices into ``searcharoundcoords`` that match the
corresponding elements of ``idxself``. Shape matches
``idxself``.
idxself : integer array
Indices into ``self`` that match the
corresponding elements of ``idxsearcharound``. Shape matches
``idxsearcharound``.
sep2d : `~astropy.coordinates.Angle`
The on-sky separation between the coordinates. Shape matches
``idxsearcharound`` and ``idxself``.
dist3d : `~astropy.units.Quantity`
The 3D distance between the coordinates. Shape matches
``idxsearcharound`` and ``idxself``.
Notes
-----
This method requires `SciPy <https://www.scipy.org/>`_ (>=0.12.0) to be
installed or it will fail.
In the current implementation, the return values are always sorted in
the same order as the ``searcharoundcoords`` (so ``idxsearcharound`` is
in ascending order). This is considered an implementation detail,
though, so it could change in a future release.
See Also
--------
astropy.coordinates.search_around_sky
SkyCoord.search_around_3d
"""
from .matching import search_around_sky
return search_around_sky(searcharoundcoords, self, seplimit,
storekdtree='_kdtree_sky')
def search_around_3d(self, searcharoundcoords, distlimit):
"""
Searches for all coordinates in this object around a supplied set of
points within a given 3D radius.
This is intended for use on `~astropy.coordinates.SkyCoord` objects
with coordinate arrays, rather than a scalar coordinate. For a scalar
coordinate, it is better to use
`~astropy.coordinates.SkyCoord.separation_3d`.
For more on how to use this (and related) functionality, see the
examples in :doc:`/coordinates/matchsep`.
Parameters
----------
searcharoundcoords : `~astropy.coordinates.SkyCoord` or `~astropy.coordinates.BaseCoordinateFrame`
The coordinates to search around to try to find matching points in
this `SkyCoord`. This should be an object with array coordinates,
not a scalar coordinate object.
distlimit : `~astropy.units.Quantity` with distance units
The physical radius to search within.
Returns
-------
idxsearcharound : integer array
Indices into ``searcharoundcoords`` that match the
corresponding elements of ``idxself``. Shape matches
``idxself``.
idxself : integer array
Indices into ``self`` that match the
corresponding elements of ``idxsearcharound``. Shape matches
``idxsearcharound``.
sep2d : `~astropy.coordinates.Angle`
The on-sky separation between the coordinates. Shape matches
``idxsearcharound`` and ``idxself``.
dist3d : `~astropy.units.Quantity`
The 3D distance between the coordinates. Shape matches
``idxsearcharound`` and ``idxself``.
Notes
-----
This method requires `SciPy <https://www.scipy.org/>`_ (>=0.12.0) to be
installed or it will fail.
In the current implementation, the return values are always sorted in
the same order as the ``searcharoundcoords`` (so ``idxsearcharound`` is
in ascending order). This is considered an implementation detail,
though, so it could change in a future release.
See Also
--------
astropy.coordinates.search_around_3d
SkyCoord.search_around_sky
"""
from .matching import search_around_3d
return search_around_3d(searcharoundcoords, self, distlimit,
storekdtree='_kdtree_3d')
def position_angle(self, other):
"""
Computes the on-sky position angle (East of North) between this
`SkyCoord` and another.
Parameters
----------
other : `SkyCoord`
The other coordinate to compute the position angle to. It is
treated as the "head" of the vector of the position angle.
Returns
-------
pa : `~astropy.coordinates.Angle`
The (positive) position angle of the vector pointing from ``self``
to ``other``. If either ``self`` or ``other`` contain arrays, this
will be an array following the appropriate `numpy` broadcasting
rules.
Examples
--------
>>> c1 = SkyCoord(0*u.deg, 0*u.deg)
>>> c2 = SkyCoord(1*u.deg, 0*u.deg)
>>> c1.position_angle(c2).degree
90.0
>>> c3 = SkyCoord(1*u.deg, 1*u.deg)
>>> c1.position_angle(c3).degree # doctest: +FLOAT_CMP
44.995636455344844
"""
from . import angle_utilities
if not self.is_equivalent_frame(other):
try:
other = other.transform_to(self, merge_attributes=False)
except TypeError:
raise TypeError('Can only get position_angle to another '
'SkyCoord or a coordinate frame with data')
slat = self.represent_as(UnitSphericalRepresentation).lat
slon = self.represent_as(UnitSphericalRepresentation).lon
olat = other.represent_as(UnitSphericalRepresentation).lat
olon = other.represent_as(UnitSphericalRepresentation).lon
return angle_utilities.position_angle(slon, slat, olon, olat)
def skyoffset_frame(self, rotation=None):
"""
Returns the sky offset frame with this `SkyCoord` at the origin.
Returns
-------
astrframe : `~astropy.coordinates.SkyOffsetFrame`
A sky offset frame of the same type as this `SkyCoord` (e.g., if
this object has an ICRS coordinate, the resulting frame is
SkyOffsetICRS, with the origin set to this object)
rotation : `~astropy.coordinates.Angle` or `~astropy.units.Quantity` with angle units
The final rotation of the frame about the ``origin``. The sign of
the rotation is the left-hand rule. That is, an object at a
particular position angle in the un-rotated system will be sent to
the positive latitude (z) direction in the final frame.
"""
return SkyOffsetFrame(origin=self, rotation=rotation)
def get_constellation(self, short_name=False, constellation_list='iau'):
"""
Determines the constellation(s) of the coordinates this `SkyCoord`
contains.
Parameters
----------
short_name : bool
If True, the returned names are the IAU-sanctioned abbreviated
names. Otherwise, full names for the constellations are used.
constellation_list : str
The set of constellations to use. Currently only ``'iau'`` is
supported, meaning the 88 "modern" constellations endorsed by the IAU.
Returns
-------
constellation : str or string array
If this is a scalar coordinate, returns the name of the
constellation. If it is an array `SkyCoord`, it returns an array of
names.
Notes
-----
To determine which constellation a point on the sky is in, this first
precesses to B1875, and then uses the Delporte boundaries of the 88
modern constellations, as tabulated by
`Roman 1987 <http://cdsarc.u-strasbg.fr/viz-bin/Cat?VI/42>`_.
See Also
--------
astropy.coordinates.get_constellation
"""
from .funcs import get_constellation
# because of issue #7028, the conversion to a PrecessedGeocentric
# system fails in some cases. Work around is to drop the velocities.
# they are not needed here since only position infromation is used
extra_frameattrs = {nm: getattr(self, nm)
for nm in self._extra_frameattr_names}
novel = SkyCoord(self.realize_frame(self.data.without_differentials()),
**extra_frameattrs)
return get_constellation(novel, short_name, constellation_list)
# the simpler version below can be used when gh-issue #7028 is resolved
# return get_constellation(self, short_name, constellation_list)
# WCS pixel to/from sky conversions
def to_pixel(self, wcs, origin=0, mode='all'):
"""
Convert this coordinate to pixel coordinates using a `~astropy.wcs.WCS`
object.
Parameters
----------
wcs : `~astropy.wcs.WCS`
The WCS to use for convert
origin : int
Whether to return 0 or 1-based pixel coordinates.
mode : 'all' or 'wcs'
Whether to do the transformation including distortions (``'all'``) or
only including only the core WCS transformation (``'wcs'``).
Returns
-------
xp, yp : `numpy.ndarray`
The pixel coordinates
See Also
--------
astropy.wcs.utils.skycoord_to_pixel : the implementation of this method
"""
from astropy.wcs.utils import skycoord_to_pixel
return skycoord_to_pixel(self, wcs=wcs, origin=origin, mode=mode)
@classmethod
def from_pixel(cls, xp, yp, wcs, origin=0, mode='all'):
"""
Create a new `SkyCoord` from pixel coordinates using an
`~astropy.wcs.WCS` object.
Parameters
----------
xp, yp : float or `numpy.ndarray`
The coordinates to convert.
wcs : `~astropy.wcs.WCS`
The WCS to use for convert
origin : int
Whether to return 0 or 1-based pixel coordinates.
mode : 'all' or 'wcs'
Whether to do the transformation including distortions (``'all'``) or
only including only the core WCS transformation (``'wcs'``).
Returns
-------
coord : an instance of this class
A new object with sky coordinates corresponding to the input ``xp``
and ``yp``.
See Also
--------
to_pixel : to do the inverse operation
astropy.wcs.utils.pixel_to_skycoord : the implementation of this method
"""
from astropy.wcs.utils import pixel_to_skycoord
return pixel_to_skycoord(xp, yp, wcs=wcs, origin=origin, mode=mode, cls=cls)
def contained_by(self, wcs, image=None, **kwargs):
"""
Determines if the SkyCoord is contained in the given wcs footprint.
Parameters
----------
wcs : `~astropy.wcs.WCS`
The coordinate to check if it is within the wcs coordinate.
image : array
Optional. The image associated with the wcs object that the cooordinate
is being checked against. If not given the naxis keywords will be used
to determine if the coordinate falls within the wcs footprint.
**kwargs :
Additional arguments to pass to `~astropy.coordinates.SkyCoord.to_pixel`
Returns
-------
response : bool
True means the WCS footprint contains the coordinate, False means it does not.
"""
if image is not None:
ymax, xmax = image.shape
else:
xmax, ymax = wcs._naxis
import warnings
with warnings.catch_warnings():
# Suppress warnings since they just mean we didn't find the coordinate
warnings.simplefilter("ignore")
try:
x, y = self.to_pixel(wcs, **kwargs)
except Exception:
return False
return (x < xmax) & (x > 0) & (y < ymax) & (y > 0)
def radial_velocity_correction(self, kind='barycentric', obstime=None,
location=None):
"""
Compute the correction required to convert a radial velocity at a given
time and place on the Earth's Surface to a barycentric or heliocentric
velocity.
Parameters
----------
kind : str
The kind of velocity correction. Must be 'barycentric' or
'heliocentric'.
obstime : `~astropy.time.Time` or None, optional
The time at which to compute the correction. If `None`, the
``obstime`` frame attribute on the `SkyCoord` will be used.
location : `~astropy.coordinates.EarthLocation` or None, optional
The observer location at which to compute the correction. If
`None`, the ``location`` frame attribute on the passed-in
``obstime`` will be used, and if that is None, the ``location``
frame attribute on the `SkyCoord` will be used.
Raises
------
ValueError
If either ``obstime`` or ``location`` are passed in (not ``None``)
when the frame attribute is already set on this `SkyCoord`.
TypeError
If ``obstime`` or ``location`` aren't provided, either as arguments
or as frame attributes.
Returns
-------
vcorr : `~astropy.units.Quantity` with velocity units
The correction with a positive sign. I.e., *add* this
to an observed radial velocity to get the barycentric (or
heliocentric) velocity. If m/s precision or better is needed,
see the notes below.
Notes
-----
The barycentric correction is calculated to higher precision than the
heliocentric correction and includes additional physics (e.g time dilation).
Use barycentric corrections if m/s precision is required.
The algorithm here is sufficient to perform corrections at the mm/s level, but
care is needed in application. The barycentric correction returned uses the optical
approximation v = z * c. Strictly speaking, the barycentric correction is
multiplicative and should be applied as::
>>> from astropy.time import Time
>>> from astropy.coordinates import SkyCoord, EarthLocation
>>> from astropy.constants import c
>>> t = Time(56370.5, format='mjd', scale='utc')
>>> loc = EarthLocation('149d33m00.5s','-30d18m46.385s',236.87*u.m)
>>> sc = SkyCoord(1*u.deg, 2*u.deg)
>>> vcorr = sc.radial_velocity_correction(kind='barycentric', obstime=t, location=loc) # doctest: +REMOTE_DATA
>>> rv = rv + vcorr + rv * vcorr / c # doctest: +SKIP
Also note that this method returns the correction velocity in the so-called
*optical convention*::
>>> vcorr = zb * c # doctest: +SKIP
where ``zb`` is the barycentric correction redshift as defined in section 3
of Wright & Eastman (2014). The application formula given above follows from their
equation (11) under assumption that the radial velocity ``rv`` has also been defined
using the same optical convention. Note, this can be regarded as a matter of
velocity definition and does not by itself imply any loss of accuracy, provided
sufficient care has been taken during interpretation of the results. If you need
the barycentric correction expressed as the full relativistic velocity (e.g., to provide
it as the input to another software which performs the application), the
following recipe can be used::
>>> zb = vcorr / c # doctest: +REMOTE_DATA
>>> zb_plus_one_squared = (zb + 1) ** 2 # doctest: +REMOTE_DATA
>>> vcorr_rel = c * (zb_plus_one_squared - 1) / (zb_plus_one_squared + 1) # doctest: +REMOTE_DATA
or alternatively using just equivalencies::
>>> vcorr_rel = vcorr.to(u.Hz, u.doppler_optical(1*u.Hz)).to(vcorr.unit, u.doppler_relativistic(1*u.Hz)) # doctest: +REMOTE_DATA
See also `~astropy.units.equivalencies.doppler_optical`,
`~astropy.units.equivalencies.doppler_radio`, and
`~astropy.units.equivalencies.doppler_relativistic` for more information on
the velocity conventions.
The default is for this method to use the builtin ephemeris for
computing the sun and earth location. Other ephemerides can be chosen
by setting the `~astropy.coordinates.solar_system_ephemeris` variable,
either directly or via ``with`` statement. For example, to use the JPL
ephemeris, do::
>>> from astropy.coordinates import solar_system_ephemeris
>>> sc = SkyCoord(1*u.deg, 2*u.deg)
>>> with solar_system_ephemeris.set('jpl'): # doctest: +REMOTE_DATA
... rv += sc.radial_velocity_correction(obstime=t, location=loc) # doctest: +SKIP
"""
# has to be here to prevent circular imports
from .solar_system import get_body_barycentric_posvel
# location validation
timeloc = getattr(obstime, 'location', None)
if location is None:
if self.location is not None:
location = self.location
if timeloc is not None:
raise ValueError('`location` cannot be in both the '
'passed-in `obstime` and this `SkyCoord` '
'because it is ambiguous which is meant '
'for the radial_velocity_correction.')
elif timeloc is not None:
location = timeloc
else:
raise TypeError('Must provide a `location` to '
'radial_velocity_correction, either as a '
'SkyCoord frame attribute, as an attribute on '
'the passed in `obstime`, or in the method '
'call.')
elif self.location is not None or timeloc is not None:
raise ValueError('Cannot compute radial velocity correction if '
'`location` argument is passed in and there is '
'also a `location` attribute on this SkyCoord or '
'the passed-in `obstime`.')
# obstime validation
coo_at_rv_obstime = self # assume we need no space motion for now
if obstime is None:
obstime = self.obstime
if obstime is None:
raise TypeError('Must provide an `obstime` to '
'radial_velocity_correction, either as a '
'SkyCoord frame attribute or in the method '
'call.')
elif self.obstime is not None and self.frame.data.differentials:
# we do need space motion after all
coo_at_rv_obstime = self.apply_space_motion(obstime)
elif self.obstime is None:
# warn the user if the object has differentials set
if 's' in self.data.differentials:
warnings.warn(
"SkyCoord has space motion, and therefore the specified "
"position of the SkyCoord may not be the same as "
"the `obstime` for the radial velocity measurement. "
"This may affect the rv correction at the order of km/s"
"for very high proper motions sources. If you wish to "
"apply space motion of the SkyCoord to correct for this"
"the `obstime` attribute of the SkyCoord must be set",
AstropyUserWarning
)
pos_earth, v_earth = get_body_barycentric_posvel('earth', obstime)
if kind == 'barycentric':
v_origin_to_earth = v_earth
elif kind == 'heliocentric':
v_sun = get_body_barycentric_posvel('sun', obstime)[1]
v_origin_to_earth = v_earth - v_sun
else:
raise ValueError("`kind` argument to radial_velocity_correction must "
"be 'barycentric' or 'heliocentric', but got "
"'{}'".format(kind))
gcrs_p, gcrs_v = location.get_gcrs_posvel(obstime)
# transforming to GCRS is not the correct thing to do here, since we don't want to
# include aberration (or light deflection)? Instead, only apply parallax if necessary
icrs_cart = coo_at_rv_obstime.icrs.cartesian
icrs_cart_novel = icrs_cart.without_differentials()
if self.data.__class__ is UnitSphericalRepresentation:
targcart = icrs_cart_novel
else:
# skycoord has distances so apply parallax
obs_icrs_cart = pos_earth + gcrs_p
targcart = icrs_cart_novel - obs_icrs_cart
targcart /= targcart.norm()
if kind == 'barycentric':
beta_obs = (v_origin_to_earth + gcrs_v) / speed_of_light
gamma_obs = 1 / np.sqrt(1 - beta_obs.norm()**2)
gr = location.gravitational_redshift(obstime)
# barycentric redshift according to eq 28 in Wright & Eastmann (2014),
# neglecting Shapiro delay and effects of the star's own motion
zb = gamma_obs * (1 + beta_obs.dot(targcart)) / (1 + gr/speed_of_light)
# try and get terms corresponding to stellar motion.
if icrs_cart.differentials:
try:
ro = self.icrs.cartesian
beta_star = ro.differentials['s'].to_cartesian() / speed_of_light
# ICRS unit vector at coordinate epoch
ro = ro.without_differentials()
ro /= ro.norm()
zb *= (1 + beta_star.dot(ro)) / (1 + beta_star.dot(targcart))
except u.UnitConversionError:
warnings.warn("SkyCoord contains some velocity information, but not enough to "
"calculate the full space motion of the source, and so this has "
"been ignored for the purposes of calculating the radial velocity "
"correction. This can lead to errors on the order of metres/second.",
AstropyUserWarning)
zb = zb - 1
return zb * speed_of_light
else:
# do a simpler correction ignoring time dilation and gravitational redshift
# this is adequate since Heliocentric corrections shouldn't be used if
# cm/s precision is required.
return targcart.dot(v_origin_to_earth + gcrs_v)
# Table interactions
@classmethod
def guess_from_table(cls, table, **coord_kwargs):
r"""
A convenience method to create and return a new `SkyCoord` from the data
in an astropy Table.
This method matches table columns that start with the case-insensitive
names of the the components of the requested frames, if they are also
followed by a non-alphanumeric character. It will also match columns
that *end* with the component name if a non-alphanumeric character is
*before* it.
For example, the first rule means columns with names like
``'RA[J2000]'`` or ``'ra'`` will be interpreted as ``ra`` attributes for
`~astropy.coordinates.ICRS` frames, but ``'RAJ2000'`` or ``'radius'``
are *not*. Similarly, the second rule applied to the
`~astropy.coordinates.Galactic` frame means that a column named
``'gal_l'`` will be used as the the ``l`` component, but ``gall`` or
``'fill'`` will not.
The definition of alphanumeric here is based on Unicode's definition
of alphanumeric, except without ``_`` (which is normally considered
alphanumeric). So for ASCII, this means the non-alphanumeric characters
are ``<space>_!"#$%&'()*+,-./\:;<=>?@[]^`{|}~``).
Parameters
----------
table : astropy.Table
The table to load data from.
coord_kwargs
Any additional keyword arguments are passed directly to this class's
constructor.
Returns
-------
newsc : same as this class
The new `SkyCoord` (or subclass) object.
"""
_frame_cls, _frame_kwargs = _get_frame_without_data([], coord_kwargs)
frame = _frame_cls(**_frame_kwargs)
coord_kwargs['frame'] = coord_kwargs.get('frame', frame)
comp_kwargs = {}
for comp_name in frame.representation_component_names:
# this matches things like 'ra[...]'' but *not* 'rad'.
# note that the "_" must be in there explicitly, because
# "alphanumeric" usually includes underscores.
starts_with_comp = comp_name + r'(\W|\b|_)'
# this part matches stuff like 'center_ra', but *not*
# 'aura'
ends_with_comp = r'.*(\W|\b|_)' + comp_name + r'\b'
# the final regex ORs together the two patterns
rex = re.compile('(' + starts_with_comp + ')|(' + ends_with_comp + ')',
re.IGNORECASE | re.UNICODE)
for col_name in table.colnames:
if rex.match(col_name):
if comp_name in comp_kwargs:
oldname = comp_kwargs[comp_name].name
msg = ('Found at least two matches for component "{0}"'
': "{1}" and "{2}". Cannot continue with this '
'ambiguity.')
raise ValueError(msg.format(comp_name, oldname, col_name))
comp_kwargs[comp_name] = table[col_name]
for k, v in comp_kwargs.items():
if k in coord_kwargs:
raise ValueError('Found column "{}" in table, but it was '
'already provided as "{}" keyword to '
'guess_from_table function.'.format(v.name, k))
else:
coord_kwargs[k] = v
return cls(**coord_kwargs)
# Name resolve
@classmethod
def from_name(cls, name, frame='icrs', parse=False, cache=True):
"""
Given a name, query the CDS name resolver to attempt to retrieve
coordinate information for that object. The search database, sesame
url, and query timeout can be set through configuration items in
``astropy.coordinates.name_resolve`` -- see docstring for
`~astropy.coordinates.get_icrs_coordinates` for more
information.
Parameters
----------
name : str
The name of the object to get coordinates for, e.g. ``'M42'``.
frame : str or `BaseCoordinateFrame` class or instance
The frame to transform the object to.
parse: bool
Whether to attempt extracting the coordinates from the name by
parsing with a regex. For objects catalog names that have
J-coordinates embedded in their names, e.g.,
'CRTS SSS100805 J194428-420209', this may be much faster than a
Sesame query for the same object name. The coordinates extracted
in this way may differ from the database coordinates by a few
deci-arcseconds, so only use this option if you do not need
sub-arcsecond accuracy for coordinates.
cache : bool, optional
Determines whether to cache the results or not. To update or
overwrite an existing value, pass ``cache='update'``.
Returns
-------
coord : SkyCoord
Instance of the SkyCoord class.
"""
from .name_resolve import get_icrs_coordinates
icrs_coord = get_icrs_coordinates(name, parse, cache=cache)
icrs_sky_coord = cls(icrs_coord)
if frame in ('icrs', icrs_coord.__class__):
return icrs_sky_coord
else:
return icrs_sky_coord.transform_to(frame)
| 43.268542 | 139 | 0.60519 | import re
import copy
import warnings
import operator
import numpy as np
from astropy import _erfa as erfa
from astropy.utils.compat.misc import override__dir__
from astropy import units as u
from astropy.constants import c as speed_of_light
from astropy.utils.data_info import MixinInfo
from astropy.utils import ShapedLikeNDArray
from astropy.time import Time
from astropy.utils.exceptions import AstropyUserWarning
from .distances import Distance
from .angles import Angle
from .baseframe import (BaseCoordinateFrame, frame_transform_graph,
GenericFrame)
from .builtin_frames import ICRS, SkyOffsetFrame
from .representation import (SphericalRepresentation,
UnitSphericalRepresentation, SphericalDifferential)
from .sky_coordinate_parsers import (_get_frame_class, _get_frame_without_data,
_parse_coordinate_data)
__all__ = ['SkyCoord', 'SkyCoordInfo']
class SkyCoordInfo(MixinInfo):
attrs_from_parent = set(['unit'])
_supports_indexing = False
@staticmethod
def default_format(val):
repr_data = val.info._repr_data
formats = ['{0.' + compname + '.value:}' for compname
in repr_data.components]
return ','.join(formats).format(repr_data)
@property
def unit(self):
repr_data = self._repr_data
unit = ','.join(str(getattr(repr_data, comp).unit) or 'None'
for comp in repr_data.components)
return unit
@property
def _repr_data(self):
if self._parent is None:
return None
sc = self._parent
if (issubclass(sc.representation_type, SphericalRepresentation)
and isinstance(sc.data, UnitSphericalRepresentation)):
repr_data = sc.represent_as(sc.data.__class__, in_frame_units=True)
else:
repr_data = sc.represent_as(sc.representation_type,
in_frame_units=True)
return repr_data
def _represent_as_dict(self):
obj = self._parent
attrs = (list(obj.representation_component_names) +
list(frame_transform_graph.frame_attributes.keys()))
if 'distance' in attrs and np.all(obj.distance == 1.0):
attrs.remove('distance')
out = super()._represent_as_dict(attrs)
out['representation_type'] = obj.representation_type.get_name()
out['frame'] = obj.frame.name
# Note that obj.info.unit is a fake composite unit (e.g. 'deg,deg,None'
# or None,None,m) and is not stored. The individual attributes have
# units.
return out
def new_like(self, skycoords, length, metadata_conflicts='warn', name=None):
# Get merged info attributes like shape, dtype, format, description, etc.
attrs = self.merge_cols_attributes(skycoords, metadata_conflicts, name,
('meta', 'description'))
skycoord0 = skycoords[0]
# Make a new SkyCoord object with the desired length and attributes
# by using the _apply / __getitem__ machinery to effectively return
# skycoord0[[0, 0, ..., 0, 0]]. This will have the all the right frame
# attributes with the right shape.
indexes = np.zeros(length, dtype=np.int64)
out = skycoord0[indexes]
# Use __setitem__ machinery to check for consistency of all skycoords
for skycoord in skycoords[1:]:
try:
out[0] = skycoord[0]
except Exception as err:
raise ValueError(f'input skycoords are inconsistent: {err}')
# Set (merged) info attributes
for attr in ('name', 'meta', 'description'):
if attr in attrs:
setattr(out.info, attr, attrs[attr])
return out
class SkyCoord(ShapedLikeNDArray):
# Declare that SkyCoord can be used as a Table column by defining the
# info property.
info = SkyCoordInfo()
def __init__(self, *args, copy=True, **kwargs):
# these are frame attributes set on this SkyCoord but *not* a part of
# the frame object this SkyCoord contains
self._extra_frameattr_names = set()
# If all that is passed in is a frame instance that already has data,
# we should bypass all of the parsing and logic below. This is here
# to make this the fastest way to create a SkyCoord instance. Many of
# the classmethods implemented for performance enhancements will use
# this as the initialization path
if (len(args) == 1 and len(kwargs) == 0
and isinstance(args[0], (BaseCoordinateFrame, SkyCoord))):
coords = args[0]
if isinstance(coords, SkyCoord):
self._extra_frameattr_names = coords._extra_frameattr_names
self.info = coords.info
# Copy over any extra frame attributes
for attr_name in self._extra_frameattr_names:
# Setting it will also validate it.
setattr(self, attr_name, getattr(coords, attr_name))
coords = coords.frame
if not coords.has_data:
raise ValueError('Cannot initialize from a coordinate frame '
'instance without coordinate data')
if copy:
self._sky_coord_frame = coords.copy()
else:
self._sky_coord_frame = coords
else:
# Get the frame instance without coordinate data but with all frame
# attributes set - these could either have been passed in with the
# frame as an instance, or passed in as kwargs here
frame_cls, frame_kwargs = _get_frame_without_data(args, kwargs)
# Parse the args and kwargs to assemble a sanitized and validated
# kwargs dict for initializing attributes for this object and for
# creating the internal self._sky_coord_frame object
args = list(args) # Make it mutable
skycoord_kwargs, components, info = _parse_coordinate_data(
frame_cls(**frame_kwargs), args, kwargs)
# In the above two parsing functions, these kwargs were identified
# as valid frame attributes for *some* frame, but not the frame that
# this SkyCoord will have. We keep these attributes as special
# skycoord frame attributes:
for attr in skycoord_kwargs:
# Setting it will also validate it.
setattr(self, attr, skycoord_kwargs[attr])
if info is not None:
self.info = info
# Finally make the internal coordinate object.
frame_kwargs.update(components)
self._sky_coord_frame = frame_cls(copy=copy, **frame_kwargs)
if not self._sky_coord_frame.has_data:
raise ValueError('Cannot create a SkyCoord without data')
@property
def frame(self):
return self._sky_coord_frame
@property
def representation_type(self):
return self.frame.representation_type
@representation_type.setter
def representation_type(self, value):
self.frame.representation_type = value
# TODO: remove these in future
@property
def representation(self):
return self.frame.representation
@representation.setter
def representation(self, value):
self.frame.representation = value
@property
def shape(self):
return self.frame.shape
def __eq__(self, value):
# Make sure that any extra frame attribute names are equivalent.
for attr in self._extra_frameattr_names | value._extra_frameattr_names:
if not self.frame._frameattr_equiv(getattr(self, attr),
getattr(value, attr)):
raise ValueError(f"cannot compare: extra frame attribute "
f"'{attr}' is not equivalent "
f"(perhaps compare the frames directly to avoid "
f"this exception)")
return self._sky_coord_frame == value._sky_coord_frame
def __ne__(self, value):
return np.logical_not(self == value)
def _apply(self, method, *args, **kwargs):
def apply_method(value):
if isinstance(value, ShapedLikeNDArray):
return value._apply(method, *args, **kwargs)
else:
if callable(method):
return method(value, *args, **kwargs)
else:
return getattr(value, method)(*args, **kwargs)
# create a new but empty instance, and copy over stuff
new = super().__new__(self.__class__)
new._sky_coord_frame = self._sky_coord_frame._apply(method,
*args, **kwargs)
new._extra_frameattr_names = self._extra_frameattr_names.copy()
for attr in self._extra_frameattr_names:
value = getattr(self, attr)
if getattr(value, 'shape', ()):
value = apply_method(value)
elif method == 'copy' or method == 'flatten':
# flatten should copy also for a single element array, but
# we cannot use it directly for array scalars, since it
# always returns a one-dimensional array. So, just copy.
value = copy.copy(value)
setattr(new, '_' + attr, value)
# Copy other 'info' attr only if it has actually been defined.
# See PR #3898 for further explanation and justification, along
# with Quantity.__array_finalize__
if 'info' in self.__dict__:
new.info = self.info
return new
def __setitem__(self, item, value):
if self.__class__ is not value.__class__:
raise TypeError(f'can only set from object of same class: '
f'{self.__class__.__name__} vs. '
f'{value.__class__.__name__}')
# Make sure that any extra frame attribute names are equivalent.
for attr in self._extra_frameattr_names | value._extra_frameattr_names:
if not self.frame._frameattr_equiv(getattr(self, attr),
getattr(value, attr)):
raise ValueError(f'attribute {attr} is not equivalent')
# Set the frame values. This checks frame equivalence and also clears
# the cache to ensure that the object is not in an inconsistent state.
self._sky_coord_frame[item] = value._sky_coord_frame
def insert(self, obj, values, axis=0):
# Validate inputs: obj arg is integer, axis=0, self is not a scalar, and
# input index is in bounds.
try:
idx0 = operator.index(obj)
except TypeError:
raise TypeError('obj arg must be an integer')
if axis != 0:
raise ValueError('axis must be 0')
if not self.shape:
raise TypeError('cannot insert into scalar {} object'
.format(self.__class__.__name__))
if abs(idx0) > len(self):
raise IndexError('index {} is out of bounds for axis 0 with size {}'
.format(idx0, len(self)))
# Turn negative index into positive
if idx0 < 0:
idx0 = len(self) + idx0
n_values = len(values) if values.shape else 1
# Finally make the new object with the correct length and set values for the
# three sections, before insert, the insert, and after the insert.
out = self.__class__.info.new_like([self], len(self) + n_values, name=self.info.name)
# Set the output values. This is where validation of `values` takes place to ensure
# that it can indeed be inserted.
out[:idx0] = self[:idx0]
out[idx0:idx0 + n_values] = values
out[idx0 + n_values:] = self[idx0:]
return out
def transform_to(self, frame, merge_attributes=True):
from astropy.coordinates.errors import ConvertError
frame_kwargs = {}
# Frame name (string) or frame class? Coerce into an instance.
try:
frame = _get_frame_class(frame)()
except Exception:
pass
if isinstance(frame, SkyCoord):
frame = frame.frame # Change to underlying coord frame instance
if isinstance(frame, BaseCoordinateFrame):
new_frame_cls = frame.__class__
# Get frame attributes, allowing defaults to be overridden by
# explicitly set attributes of the source if ``merge_attributes``.
for attr in frame_transform_graph.frame_attributes:
self_val = getattr(self, attr, None)
frame_val = getattr(frame, attr, None)
if (frame_val is not None
and not (merge_attributes
and frame.is_frame_attr_default(attr))):
frame_kwargs[attr] = frame_val
elif (self_val is not None
and not self.is_frame_attr_default(attr)):
frame_kwargs[attr] = self_val
elif frame_val is not None:
frame_kwargs[attr] = frame_val
else:
raise ValueError('Transform `frame` must be a frame name, class, or instance')
# Get the composite transform to the new frame
trans = frame_transform_graph.get_transform(self.frame.__class__, new_frame_cls)
if trans is None:
raise ConvertError('Cannot transform from {} to {}'
.format(self.frame.__class__, new_frame_cls))
# Make a generic frame which will accept all the frame kwargs that
# are provided and allow for transforming through intermediate frames
# which may require one or more of those kwargs.
generic_frame = GenericFrame(frame_kwargs)
# Do the transformation, returning a coordinate frame of the desired
# final type (not generic).
new_coord = trans(self.frame, generic_frame)
# Finally make the new SkyCoord object from the `new_coord` and
# remaining frame_kwargs that are not frame_attributes in `new_coord`.
for attr in (set(new_coord.get_frame_attr_names()) &
set(frame_kwargs.keys())):
frame_kwargs.pop(attr)
return self.__class__(new_coord, **frame_kwargs)
def apply_space_motion(self, new_obstime=None, dt=None):
if (new_obstime is None and dt is None or
new_obstime is not None and dt is not None):
raise ValueError("You must specify one of `new_obstime` or `dt`, "
"but not both.")
# Validate that we have velocity info
if 's' not in self.frame.data.differentials:
raise ValueError('SkyCoord requires velocity data to evolve the '
'position.')
if 'obstime' in self.frame.frame_attributes:
raise NotImplementedError("Updating the coordinates in a frame "
"with explicit time dependence is "
"currently not supported. If you would "
"like this functionality, please open an "
"issue on github:\n"
"https://github.com/astropy/astropy")
if new_obstime is not None and self.obstime is None:
# If no obstime is already on this object, raise an error if a new
# obstime is passed: we need to know the time / epoch at which the
# the position / velocity were measured initially
raise ValueError('This object has no associated `obstime`. '
'apply_space_motion() must receive a time '
'difference, `dt`, and not a new obstime.')
# Compute t1 and t2, the times used in the starpm call, which *only*
# uses them to compute a delta-time
t1 = self.obstime
if dt is None:
# self.obstime is not None and new_obstime is not None b/c of above
# checks
t2 = new_obstime
else:
# new_obstime is definitely None b/c of the above checks
if t1 is None:
# MAGIC NUMBER: if the current SkyCoord object has no obstime,
# assume J2000 to do the dt offset. This is not actually used
# for anything except a delta-t in starpm, so it's OK that it's
# not necessarily the "real" obstime
t1 = Time('J2000')
new_obstime = None # we don't actually know the inital obstime
t2 = t1 + dt
else:
t2 = t1 + dt
new_obstime = t2
t1 = t1.tdb
t2 = t2.tdb
icrsrep = self.icrs.represent_as(SphericalRepresentation, SphericalDifferential)
icrsvel = icrsrep.differentials['s']
parallax_zero = False
try:
plx = icrsrep.distance.to_value(u.arcsecond, u.parallax())
except u.UnitConversionError:
plx = 0.
parallax_zero = True
try:
rv = icrsvel.d_distance.to_value(u.km/u.s)
except u.UnitConversionError:
rv = 0.
starpm = erfa.pmsafe(icrsrep.lon.radian, icrsrep.lat.radian,
icrsvel.d_lon.to_value(u.radian/u.yr),
icrsvel.d_lat.to_value(u.radian/u.yr),
plx, rv, t1.jd1, t1.jd2, t2.jd1, t2.jd2)
if parallax_zero:
new_distance = None
else:
new_distance = Distance(parallax=starpm[4] << u.arcsec)
icrs2 = ICRS(ra=u.Quantity(starpm[0], u.radian, copy=False),
dec=u.Quantity(starpm[1], u.radian, copy=False),
pm_ra=u.Quantity(starpm[2], u.radian/u.yr, copy=False),
pm_dec=u.Quantity(starpm[3], u.radian/u.yr, copy=False),
distance=new_distance,
radial_velocity=u.Quantity(starpm[5], u.km/u.s, copy=False),
differential_type=SphericalDifferential)
frattrs = {attrnm: getattr(self, attrnm)
for attrnm in self._extra_frameattr_names}
frattrs['obstime'] = new_obstime
return self.__class__(icrs2, **frattrs).transform_to(self.frame)
def _is_name(self, string):
return (self.frame.name == string or
(isinstance(self.frame.name, list) and string in self.frame.name))
def __getattr__(self, attr):
if '_sky_coord_frame' in self.__dict__:
if self._is_name(attr):
return self
if attr in frame_transform_graph.frame_attributes:
if attr in self.frame.get_frame_attr_names():
return getattr(self.frame, attr)
else:
return getattr(self, '_' + attr, None)
if not attr.startswith('_') and hasattr(self._sky_coord_frame, attr):
return getattr(self._sky_coord_frame, attr)
frame_cls = frame_transform_graph.lookup_name(attr)
if frame_cls is not None and self.frame.is_transformable_to(frame_cls):
return self.transform_to(attr)
raise AttributeError("'{}' object has no attribute '{}'"
.format(self.__class__.__name__, attr))
def __setattr__(self, attr, val):
if '_sky_coord_frame' in self.__dict__:
if self._is_name(attr):
raise AttributeError(f"'{attr}' is immutable")
if not attr.startswith('_') and hasattr(self._sky_coord_frame, attr):
setattr(self._sky_coord_frame, attr, val)
return
frame_cls = frame_transform_graph.lookup_name(attr)
if frame_cls is not None and self.frame.is_transformable_to(frame_cls):
raise AttributeError(f"'{attr}' is immutable")
if attr in frame_transform_graph.frame_attributes:
super().__setattr__('_' + attr, val)
frame_transform_graph.frame_attributes[attr].__get__(self)
self._extra_frameattr_names |= {attr}
else:
super().__setattr__(attr, val)
def __delattr__(self, attr):
if '_sky_coord_frame' in self.__dict__:
if self._is_name(attr):
raise AttributeError(f"'{attr}' is immutable")
if not attr.startswith('_') and hasattr(self._sky_coord_frame,
attr):
delattr(self._sky_coord_frame, attr)
return
frame_cls = frame_transform_graph.lookup_name(attr)
if frame_cls is not None and self.frame.is_transformable_to(frame_cls):
raise AttributeError(f"'{attr}' is immutable")
if attr in frame_transform_graph.frame_attributes:
super().__delattr__('_' + attr)
self._extra_frameattr_names -= {attr}
else:
super().__delattr__(attr)
@override__dir__
def __dir__(self):
dir_values = set()
for name in frame_transform_graph.get_names():
frame_cls = frame_transform_graph.lookup_name(name)
if self.frame.is_transformable_to(frame_cls):
dir_values.add(name)
dir_values.update(set(attr for attr in dir(self.frame) if not attr.startswith('_')))
dir_values.update(frame_transform_graph.frame_attributes.keys())
return dir_values
def __repr__(self):
clsnm = self.__class__.__name__
coonm = self.frame.__class__.__name__
frameattrs = self.frame._frame_attrs_repr()
if frameattrs:
frameattrs = ': ' + frameattrs
data = self.frame._data_repr()
if data:
data = ': ' + data
return '<{clsnm} ({coonm}{frameattrs}){data}>'.format(**locals())
def to_string(self, style='decimal', **kwargs):
sph_coord = self.frame.represent_as(SphericalRepresentation)
styles = {'hmsdms': {'lonargs': {'unit': u.hour, 'pad': True},
'latargs': {'unit': u.degree, 'pad': True, 'alwayssign': True}},
'dms': {'lonargs': {'unit': u.degree},
'latargs': {'unit': u.degree}},
'decimal': {'lonargs': {'unit': u.degree, 'decimal': True},
'latargs': {'unit': u.degree, 'decimal': True}}
}
lonargs = {}
latargs = {}
if style in styles:
lonargs.update(styles[style]['lonargs'])
latargs.update(styles[style]['latargs'])
else:
raise ValueError('Invalid style. Valid options are: {}'.format(",".join(styles)))
lonargs.update(kwargs)
latargs.update(kwargs)
if np.isscalar(sph_coord.lon.value):
coord_string = (sph_coord.lon.to_string(**lonargs) +
" " + sph_coord.lat.to_string(**latargs))
else:
coord_string = []
for lonangle, latangle in zip(sph_coord.lon.ravel(), sph_coord.lat.ravel()):
coord_string += [(lonangle.to_string(**lonargs) +
" " + latangle.to_string(**latargs))]
if len(sph_coord.shape) > 1:
coord_string = np.array(coord_string).reshape(sph_coord.shape)
return coord_string
def is_equivalent_frame(self, other):
if isinstance(other, BaseCoordinateFrame):
return self.frame.is_equivalent_frame(other)
elif isinstance(other, SkyCoord):
if other.frame.name != self.frame.name:
return False
for fattrnm in frame_transform_graph.frame_attributes:
if not BaseCoordinateFrame._frameattr_equiv(getattr(self, fattrnm),
getattr(other, fattrnm)):
return False
return True
else:
raise TypeError("Tried to do is_equivalent_frame on something that "
"isn't frame-like")
# High-level convenience methods
def separation(self, other):
from . import Angle
from .angle_utilities import angular_separation
if not self.is_equivalent_frame(other):
try:
kwargs = {'merge_attributes': False} if isinstance(other, SkyCoord) else {}
other = other.transform_to(self, **kwargs)
except TypeError:
raise TypeError('Can only get separation to another SkyCoord '
'or a coordinate frame with data')
lon1 = self.spherical.lon
lat1 = self.spherical.lat
lon2 = other.spherical.lon
lat2 = other.spherical.lat
# Get the separation as a Quantity, convert to Angle in degrees
sep = angular_separation(lon1, lat1, lon2, lat2)
return Angle(sep, unit=u.degree)
def separation_3d(self, other):
if not self.is_equivalent_frame(other):
try:
kwargs = {'merge_attributes': False} if isinstance(other, SkyCoord) else {}
other = other.transform_to(self, **kwargs)
except TypeError:
raise TypeError('Can only get separation to another SkyCoord '
'or a coordinate frame with data')
if issubclass(self.data.__class__, UnitSphericalRepresentation):
raise ValueError('This object does not have a distance; cannot '
'compute 3d separation.')
if issubclass(other.data.__class__, UnitSphericalRepresentation):
raise ValueError('The other object does not have a distance; '
'cannot compute 3d separation.')
c1 = self.cartesian.without_differentials()
c2 = other.cartesian.without_differentials()
return Distance((c1 - c2).norm())
def spherical_offsets_to(self, tocoord):
if not self.is_equivalent_frame(tocoord):
raise ValueError('Tried to use spherical_offsets_to with two non-matching frames!')
aframe = self.skyoffset_frame()
acoord = tocoord.transform_to(aframe)
dlon = acoord.spherical.lon.view(Angle)
dlat = acoord.spherical.lat.view(Angle)
return dlon, dlat
def directional_offset_by(self, position_angle, separation):
from . import angle_utilities
slat = self.represent_as(UnitSphericalRepresentation).lat
slon = self.represent_as(UnitSphericalRepresentation).lon
newlon, newlat = angle_utilities.offset_by(
lon=slon, lat=slat,
posang=position_angle, distance=separation)
return SkyCoord(newlon, newlat, frame=self.frame)
def match_to_catalog_sky(self, catalogcoord, nthneighbor=1):
from .matching import match_coordinates_sky
if (isinstance(catalogcoord, (SkyCoord, BaseCoordinateFrame))
and catalogcoord.has_data):
self_in_catalog_frame = self.transform_to(catalogcoord)
else:
raise TypeError('Can only get separation to another SkyCoord or a '
'coordinate frame with data')
res = match_coordinates_sky(self_in_catalog_frame, catalogcoord,
nthneighbor=nthneighbor,
storekdtree='_kdtree_sky')
return res
def match_to_catalog_3d(self, catalogcoord, nthneighbor=1):
from .matching import match_coordinates_3d
if (isinstance(catalogcoord, (SkyCoord, BaseCoordinateFrame))
and catalogcoord.has_data):
self_in_catalog_frame = self.transform_to(catalogcoord)
else:
raise TypeError('Can only get separation to another SkyCoord or a '
'coordinate frame with data')
res = match_coordinates_3d(self_in_catalog_frame, catalogcoord,
nthneighbor=nthneighbor,
storekdtree='_kdtree_3d')
return res
def search_around_sky(self, searcharoundcoords, seplimit):
from .matching import search_around_sky
return search_around_sky(searcharoundcoords, self, seplimit,
storekdtree='_kdtree_sky')
def search_around_3d(self, searcharoundcoords, distlimit):
from .matching import search_around_3d
return search_around_3d(searcharoundcoords, self, distlimit,
storekdtree='_kdtree_3d')
def position_angle(self, other):
from . import angle_utilities
if not self.is_equivalent_frame(other):
try:
other = other.transform_to(self, merge_attributes=False)
except TypeError:
raise TypeError('Can only get position_angle to another '
'SkyCoord or a coordinate frame with data')
slat = self.represent_as(UnitSphericalRepresentation).lat
slon = self.represent_as(UnitSphericalRepresentation).lon
olat = other.represent_as(UnitSphericalRepresentation).lat
olon = other.represent_as(UnitSphericalRepresentation).lon
return angle_utilities.position_angle(slon, slat, olon, olat)
def skyoffset_frame(self, rotation=None):
return SkyOffsetFrame(origin=self, rotation=rotation)
def get_constellation(self, short_name=False, constellation_list='iau'):
from .funcs import get_constellation
# because of issue #7028, the conversion to a PrecessedGeocentric
# system fails in some cases. Work around is to drop the velocities.
# they are not needed here since only position infromation is used
extra_frameattrs = {nm: getattr(self, nm)
for nm in self._extra_frameattr_names}
novel = SkyCoord(self.realize_frame(self.data.without_differentials()),
**extra_frameattrs)
return get_constellation(novel, short_name, constellation_list)
# the simpler version below can be used when gh-issue #7028 is resolved
# return get_constellation(self, short_name, constellation_list)
# WCS pixel to/from sky conversions
def to_pixel(self, wcs, origin=0, mode='all'):
from astropy.wcs.utils import skycoord_to_pixel
return skycoord_to_pixel(self, wcs=wcs, origin=origin, mode=mode)
@classmethod
def from_pixel(cls, xp, yp, wcs, origin=0, mode='all'):
from astropy.wcs.utils import pixel_to_skycoord
return pixel_to_skycoord(xp, yp, wcs=wcs, origin=origin, mode=mode, cls=cls)
def contained_by(self, wcs, image=None, **kwargs):
if image is not None:
ymax, xmax = image.shape
else:
xmax, ymax = wcs._naxis
import warnings
with warnings.catch_warnings():
# Suppress warnings since they just mean we didn't find the coordinate
warnings.simplefilter("ignore")
try:
x, y = self.to_pixel(wcs, **kwargs)
except Exception:
return False
return (x < xmax) & (x > 0) & (y < ymax) & (y > 0)
def radial_velocity_correction(self, kind='barycentric', obstime=None,
location=None):
from .solar_system import get_body_barycentric_posvel
timeloc = getattr(obstime, 'location', None)
if location is None:
if self.location is not None:
location = self.location
if timeloc is not None:
raise ValueError('`location` cannot be in both the '
'passed-in `obstime` and this `SkyCoord` '
'because it is ambiguous which is meant '
'for the radial_velocity_correction.')
elif timeloc is not None:
location = timeloc
else:
raise TypeError('Must provide a `location` to '
'radial_velocity_correction, either as a '
'SkyCoord frame attribute, as an attribute on '
'the passed in `obstime`, or in the method '
'call.')
elif self.location is not None or timeloc is not None:
raise ValueError('Cannot compute radial velocity correction if '
'`location` argument is passed in and there is '
'also a `location` attribute on this SkyCoord or '
'the passed-in `obstime`.')
coo_at_rv_obstime = self
if obstime is None:
obstime = self.obstime
if obstime is None:
raise TypeError('Must provide an `obstime` to '
'radial_velocity_correction, either as a '
'SkyCoord frame attribute or in the method '
'call.')
elif self.obstime is not None and self.frame.data.differentials:
coo_at_rv_obstime = self.apply_space_motion(obstime)
elif self.obstime is None:
if 's' in self.data.differentials:
warnings.warn(
"SkyCoord has space motion, and therefore the specified "
"position of the SkyCoord may not be the same as "
"the `obstime` for the radial velocity measurement. "
"This may affect the rv correction at the order of km/s"
"for very high proper motions sources. If you wish to "
"apply space motion of the SkyCoord to correct for this"
"the `obstime` attribute of the SkyCoord must be set",
AstropyUserWarning
)
pos_earth, v_earth = get_body_barycentric_posvel('earth', obstime)
if kind == 'barycentric':
v_origin_to_earth = v_earth
elif kind == 'heliocentric':
v_sun = get_body_barycentric_posvel('sun', obstime)[1]
v_origin_to_earth = v_earth - v_sun
else:
raise ValueError("`kind` argument to radial_velocity_correction must "
"be 'barycentric' or 'heliocentric', but got "
"'{}'".format(kind))
gcrs_p, gcrs_v = location.get_gcrs_posvel(obstime)
# include aberration (or light deflection)? Instead, only apply parallax if necessary
icrs_cart = coo_at_rv_obstime.icrs.cartesian
icrs_cart_novel = icrs_cart.without_differentials()
if self.data.__class__ is UnitSphericalRepresentation:
targcart = icrs_cart_novel
else:
# skycoord has distances so apply parallax
obs_icrs_cart = pos_earth + gcrs_p
targcart = icrs_cart_novel - obs_icrs_cart
targcart /= targcart.norm()
if kind == 'barycentric':
beta_obs = (v_origin_to_earth + gcrs_v) / speed_of_light
gamma_obs = 1 / np.sqrt(1 - beta_obs.norm()**2)
gr = location.gravitational_redshift(obstime)
# barycentric redshift according to eq 28 in Wright & Eastmann (2014),
# neglecting Shapiro delay and effects of the star's own motion
zb = gamma_obs * (1 + beta_obs.dot(targcart)) / (1 + gr/speed_of_light)
if icrs_cart.differentials:
try:
ro = self.icrs.cartesian
beta_star = ro.differentials['s'].to_cartesian() / speed_of_light
ro = ro.without_differentials()
ro /= ro.norm()
zb *= (1 + beta_star.dot(ro)) / (1 + beta_star.dot(targcart))
except u.UnitConversionError:
warnings.warn("SkyCoord contains some velocity information, but not enough to "
"calculate the full space motion of the source, and so this has "
"been ignored for the purposes of calculating the radial velocity "
"correction. This can lead to errors on the order of metres/second.",
AstropyUserWarning)
zb = zb - 1
return zb * speed_of_light
else:
# cm/s precision is required.
return targcart.dot(v_origin_to_earth + gcrs_v)
# Table interactions
@classmethod
def guess_from_table(cls, table, **coord_kwargs):
_frame_cls, _frame_kwargs = _get_frame_without_data([], coord_kwargs)
frame = _frame_cls(**_frame_kwargs)
coord_kwargs['frame'] = coord_kwargs.get('frame', frame)
comp_kwargs = {}
for comp_name in frame.representation_component_names:
# this matches things like 'ra[...]'' but *not* 'rad'.
starts_with_comp = comp_name + r'(\W|\b|_)'
ends_with_comp = r'.*(\W|\b|_)' + comp_name + r'\b'
rex = re.compile('(' + starts_with_comp + ')|(' + ends_with_comp + ')',
re.IGNORECASE | re.UNICODE)
for col_name in table.colnames:
if rex.match(col_name):
if comp_name in comp_kwargs:
oldname = comp_kwargs[comp_name].name
msg = ('Found at least two matches for component "{0}"'
': "{1}" and "{2}". Cannot continue with this '
'ambiguity.')
raise ValueError(msg.format(comp_name, oldname, col_name))
comp_kwargs[comp_name] = table[col_name]
for k, v in comp_kwargs.items():
if k in coord_kwargs:
raise ValueError('Found column "{}" in table, but it was '
'already provided as "{}" keyword to '
'guess_from_table function.'.format(v.name, k))
else:
coord_kwargs[k] = v
return cls(**coord_kwargs)
@classmethod
def from_name(cls, name, frame='icrs', parse=False, cache=True):
from .name_resolve import get_icrs_coordinates
icrs_coord = get_icrs_coordinates(name, parse, cache=cache)
icrs_sky_coord = cls(icrs_coord)
if frame in ('icrs', icrs_coord.__class__):
return icrs_sky_coord
else:
return icrs_sky_coord.transform_to(frame)
| true | true |
f713e7b42f1da89d365885b3f25e6ebfed8d99bf | 211 | py | Python | chevah/compat/tests/normal/testing/__init__.py | chevah/compat | d22e5f551a628f8a1652c9f2eea306e17930cb8f | [
"BSD-3-Clause"
] | 5 | 2016-12-03T22:54:50.000Z | 2021-11-17T11:17:39.000Z | chevah/compat/tests/normal/testing/__init__.py | chevah/compat | d22e5f551a628f8a1652c9f2eea306e17930cb8f | [
"BSD-3-Clause"
] | 76 | 2015-01-22T16:00:31.000Z | 2022-02-09T22:13:34.000Z | chevah/compat/tests/normal/testing/__init__.py | chevah/compat | d22e5f551a628f8a1652c9f2eea306e17930cb8f | [
"BSD-3-Clause"
] | 1 | 2016-12-10T15:57:31.000Z | 2016-12-10T15:57:31.000Z | # Copyright (c) 2012 Adi Roiban.
# See LICENSE for details.
"""
Unit tests for empirical package.
"""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
| 23.444444 | 38 | 0.796209 |
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.