hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
96136b4925cce41dbcc2a3f619431ef24032a9c9 | 280 | py | Python | pypassage/__init__.py | col16/pypassage | 25cef12307b2a9a84d6f8f4bdd57938f6b229cb5 | [
"0BSD"
] | 8 | 2015-01-09T21:56:03.000Z | 2021-02-06T14:37:39.000Z | pypassage/__init__.py | joshpetit/pypassage | 70d4246c39ed9ab85ad8f4f599de5a842226a149 | [
"0BSD"
] | 2 | 2020-09-27T21:27:14.000Z | 2021-03-06T00:13:33.000Z | pypassage/__init__.py | joshpetit/pypassage | 70d4246c39ed9ab85ad8f4f599de5a842226a149 | [
"0BSD"
] | 3 | 2019-10-08T20:10:41.000Z | 2021-05-31T12:10:16.000Z | from .reference import Passage
from .reference import PassageCollection
from .reference import PassageDelta
from .reference import InvalidPassageException
from .reference import book_total_verses
from .reference import get_passage_text
from .reference import passages_from_string
| 35 | 46 | 0.875 | 34 | 280 | 7.029412 | 0.411765 | 0.380753 | 0.556485 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.1 | 280 | 7 | 47 | 40 | 0.948413 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.857143 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
8251e6a966590ea1f93b3ef128de59d8e7d2d519 | 727 | py | Python | web3/utils/toolz/__init__.py | pjryan93/web3.py | e066452a7b0e78d6cb8a9462532d169de901ef99 | [
"MIT"
] | 10 | 2020-11-20T21:17:19.000Z | 2021-08-07T07:33:46.000Z | web3/utils/toolz/__init__.py | pjryan93/web3.py | e066452a7b0e78d6cb8a9462532d169de901ef99 | [
"MIT"
] | 12 | 2020-09-18T05:46:27.000Z | 2021-12-24T09:38:31.000Z | web3/utils/toolz/__init__.py | pjryan93/web3.py | e066452a7b0e78d6cb8a9462532d169de901ef99 | [
"MIT"
] | 16 | 2019-02-28T03:21:14.000Z | 2021-07-15T06:49:39.000Z | try:
from cytoolz import (
assoc,
complement,
compose,
concat,
curry,
dicttoolz,
dissoc,
excepts,
functoolz,
groupby,
identity,
itertoolz,
merge,
partial,
pipe,
sliding_window,
valfilter,
valmap,
)
except ImportError:
from toolz import ( # noqa: F401
assoc,
complement,
compose,
concat,
curry,
dicttoolz,
dissoc,
excepts,
functoolz,
groupby,
identity,
itertoolz,
merge,
partial,
pipe,
sliding_window,
valfilter,
valmap,
)
| 16.906977 | 37 | 0.448418 | 49 | 727 | 6.612245 | 0.571429 | 0.092593 | 0.135802 | 0.17284 | 0.814815 | 0.814815 | 0.814815 | 0.814815 | 0.814815 | 0.814815 | 0 | 0.008108 | 0.491059 | 727 | 42 | 38 | 17.309524 | 0.867568 | 0.013755 | 0 | 0.857143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.071429 | 0 | 0.071429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
826c3d095aedbe7671acc7aac4a1497cf5647164 | 36,996 | py | Python | TWLight/resources/migrations/0063_auto_20190220_1639_squashed_0084_auto_20201019_1310.py | sahilgrewal8072/TWLight | 3f2c9a6e0e5812c4094c7b5e90dd5b0da5bc6d3c | [
"MIT"
] | null | null | null | TWLight/resources/migrations/0063_auto_20190220_1639_squashed_0084_auto_20201019_1310.py | sahilgrewal8072/TWLight | 3f2c9a6e0e5812c4094c7b5e90dd5b0da5bc6d3c | [
"MIT"
] | null | null | null | TWLight/resources/migrations/0063_auto_20190220_1639_squashed_0084_auto_20201019_1310.py | sahilgrewal8072/TWLight | 3f2c9a6e0e5812c4094c7b5e90dd5b0da5bc6d3c | [
"MIT"
] | null | null | null | # Generated by Django 3.0.11 on 2021-01-31 06:26
import TWLight.resources.models
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("contenttypes", "0002_remove_content_type_name"),
("resources", "0001_initial_squashed_0062_auto_20190220_1639"),
("users", "0038_squashed_0067_remove_editor_editcounts"),
]
operations = [
migrations.AddField(
model_name="accesscode",
name="authorization",
field=models.OneToOneField(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="accesscodes",
to="users.Authorization",
),
),
migrations.AddField(
model_name="partner",
name="user_instructions",
field=models.TextField(
blank=True,
help_text="Optional instructions for editors to use access codes or free signup URLs for this partner. Sent via email upon application approval (for links) or access code assignment. If this partner has collections, fill out user instructions on each collection instead.",
null=True,
),
),
migrations.AddField(
model_name="stream",
name="authorization_method",
field=models.IntegerField(
choices=[
(0, "Email"),
(1, "Access codes"),
(2, "Proxy"),
(3, "Library Bundle"),
(4, "Link"),
],
default=0,
help_text="Which authorization method does this collection use? 'Email' means the accounts are set up via email, and is the default. Select 'Access Codes' if we send individual, or group, login details or access codes. 'Proxy' means access delivered directly via EZProxy, and Library Bundle is automated proxy-based access. 'Link' is if we send users a URL to use to create an account.",
),
),
migrations.AddField(
model_name="stream",
name="user_instructions",
field=models.TextField(
blank=True,
help_text="Optional instructions for editors to use access codes or free signup URLs for this collection. Sent via email upon application approval (for links) or access code assignment.",
null=True,
),
),
migrations.AlterField(
model_name="partner",
name="authorization_method",
field=models.IntegerField(
choices=[
(0, "Email"),
(1, "Access codes"),
(2, "Proxy"),
(3, "Library Bundle"),
(4, "Link"),
],
default=0,
help_text="Which authorization method does this partner use? 'Email' means the accounts are set up via email, and is the default. Select 'Access Codes' if we send individual, or group, login details or access codes. 'Proxy' means access delivered directly via EZProxy, and Library Bundle is automated proxy-based access. 'Link' is if we send users a URL to use to create an account.",
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_eo",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_eo",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_eo",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="textfieldtag",
name="name_eo",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AddField(
model_name="partner",
name="send_instructions_vi",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_vi",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_vi",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="textfieldtag",
name="name_vi",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AddField(
model_name="partner",
name="send_instructions_uk",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_uk",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_uk",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="textfieldtag",
name="name_uk",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AddField(
model_name="partner",
name="account_length",
field=models.DurationField(
blank=True,
help_text="The standard length of an access grant from this Partner. Entered as <days hours:minutes:seconds>.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_en_gb",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_en_gb",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_en_gb",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="textfieldtag",
name="name_en_gb",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AddField(
model_name="partner",
name="send_instructions_ja",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_ja",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_ja",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="textfieldtag",
name="name_ja",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AlterField(
model_name="partner",
name="accounts_available",
field=models.PositiveSmallIntegerField(
blank=True,
help_text="Add the number of new accounts to the existing value, not by resetting it to zero. If 'specific stream' is true, change accounts availability at the collection level.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_es",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_es",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_es",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="textfieldtag",
name="name_es",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AddField(
model_name="partner",
name="target_url",
field=models.URLField(
blank=True,
help_text="Link to partner resources. Required for proxied resources; optional otherwise.",
null=True,
),
),
migrations.AddField(
model_name="stream",
name="target_url",
field=models.URLField(
blank=True,
help_text="Link to collection. Required for proxied collections; optional otherwise.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="requested_access_duration",
field=models.BooleanField(
default=False,
help_text="Must be checked if the authorization method of this partner is proxy; optional otherwise.",
),
),
migrations.AlterField(
model_name="contact",
name="short_name",
field=models.CharField(
help_text="The form of the contact person's name to use in email greetings (as in 'Hi Jake')",
max_length=15,
),
),
migrations.AlterField(
model_name="partner",
name="description",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
verbose_name="long description",
),
),
migrations.AlterField(
model_name="partner",
name="description_ar",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AlterField(
model_name="partner",
name="description_br",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AlterField(
model_name="partner",
name="description_da",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AlterField(
model_name="partner",
name="description_de",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AlterField(
model_name="partner",
name="description_en",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AddField(
model_name="partner",
name="description_en_gb",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AddField(
model_name="partner",
name="description_eo",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AddField(
model_name="partner",
name="description_es",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AlterField(
model_name="partner",
name="description_fa",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AlterField(
model_name="partner",
name="description_fi",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AlterField(
model_name="partner",
name="description_fr",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AlterField(
model_name="partner",
name="description_hi",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AddField(
model_name="partner",
name="description_ja",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AlterField(
model_name="partner",
name="description_ko",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AlterField(
model_name="partner",
name="description_mk",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AlterField(
model_name="partner",
name="description_mr",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AlterField(
model_name="partner",
name="description_my",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AlterField(
model_name="partner",
name="description_pt",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AlterField(
model_name="partner",
name="description_pt_br",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AlterField(
model_name="partner",
name="description_ru",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AlterField(
model_name="partner",
name="description_sv",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AlterField(
model_name="partner",
name="description_ta",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AlterField(
model_name="partner",
name="description_tr",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AddField(
model_name="partner",
name="description_uk",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AddField(
model_name="partner",
name="description_vi",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AlterField(
model_name="partner",
name="description_zh_hans",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AlterField(
model_name="partner",
name="description_zh_hant",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AlterField(
model_name="partner",
name="mutually_exclusive",
field=models.NullBooleanField(
default=None,
help_text="If True, users can only apply for one Stream at a time from this Partner. If False, users can apply for multiple Streams at a time. This field must be filled in when Partners have multiple Streams, but may be left blank otherwise.",
),
),
migrations.AlterField(
model_name="partnerlogo",
name="logo",
field=models.ImageField(
blank=True,
help_text="Optional image file that can be used to represent this partner.",
null=True,
upload_to="",
),
),
migrations.AlterField(
model_name="partner",
name="company_name",
field=models.CharField(
help_text="Partner's name (e.g. McFarland). Note: this will be user-visible and *not translated*.",
max_length=255,
),
),
migrations.AddField(
model_name="textfieldtag",
name="meta_url",
field=models.URLField(
blank=True,
help_text="Link to Meta-Wiki (eg.: https://meta.wikimedia.org/wiki/The_Wikipedia_Library/Collections/Agroforestry) for additional information for this tag.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="description_ro",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_ro",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_ro",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_ro",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="textfieldtag",
name="name_ro",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AddField(
model_name="partner",
name="description_pl",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_pl",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_pl",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_pl",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="textfieldtag",
name="name_pl",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
migrations.AddField(
model_name="accesscode",
name="partner",
field=models.ForeignKey(
limit_choices_to=models.Q(authorization_method=1),
on_delete=django.db.models.deletion.CASCADE,
related_name="accesscodes",
to="resources.Partner",
),
),
migrations.AlterField(
model_name="language",
name="language",
field=models.CharField(
choices=[
("af", "Afrikaans"),
("ar", "العربية"),
("ast", "asturianu"),
("az", "az-latn"),
("be", "беларуская"),
("bg", "български"),
("bn", "বাংলা"),
("br", "brezhoneg"),
("bs", "bosanski"),
("ca", "català"),
("cs", "čeština"),
("cy", "Cymraeg"),
("da", "dansk"),
("de", "Deutsch"),
("dsb", "dolnoserbski"),
("el", "Ελληνικά"),
("en", "English"),
("en-gb", "British English"),
("eo", "Esperanto"),
("es", "español"),
("es-ni", "español nicaragüense"),
("et", "eesti"),
("eu", "euskara"),
("fa", "فارسی"),
("fi", "suomi"),
("fr", "français"),
("fy", "Frysk"),
("ga", "Gaeilge"),
("gd", "Gàidhlig"),
("gl", "galego"),
("he", "עברית"),
("hi", "हिन्दी"),
("hr", "hrvatski"),
("hsb", "hornjoserbsce"),
("hu", "magyar"),
("hy", "Հայերեն"),
("ia", "interlingua"),
("id", "Bahasa Indonesia"),
("io", "Ido"),
("is", "íslenska"),
("it", "italiano"),
("ja", "日本語"),
("ka", "ქართული"),
("kab", "Taqbaylit"),
("kk", "kk-cyrl"),
("km", "ភាសាខ្មែរ"),
("kn", "ಕನ್ನಡ"),
("ko", "한국어"),
("lb", "Lëtzebuergesch"),
("lt", "lietuvių"),
("lv", "latviešu"),
("mk", "македонски"),
("ml", "മലയാളം"),
("mn", "монгол"),
("mr", "मराठी"),
("my", "မြန်မာဘာသာ"),
("nb", "norsk (bokmål)"),
("ne", "नेपाली"),
("nl", "Nederlands"),
("nn", "norsk (nynorsk)"),
("os", "Ирон"),
("pa", "pa-guru"),
("pl", "polski"),
("pt", "português"),
("pt-br", "português do Brasil"),
("ro", "română"),
("ru", "русский"),
("sk", "slovenčina"),
("sl", "slovenščina"),
("sq", "shqip"),
("sr", "sr-cyrl"),
("sr-latn", "srpski"),
("sv", "svenska"),
("sw", "Kiswahili"),
("ta", "தமிழ்"),
("te", "తెలుగు"),
("th", "ไทย"),
("tr", "Türkçe"),
("tt", "татарча"),
("udm", "удмурт"),
("uk", "українська"),
("ur", "اردو"),
("uz", "oʻzbekcha"),
("vi", "Tiếng Việt"),
("zh-hans", "中文(简体)"),
("zh-hant", "中文(繁體)"),
],
max_length=8,
unique=True,
validators=[TWLight.resources.models.validate_language_code],
),
),
migrations.AlterField(
model_name="taggedtextfield",
name="content_type",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="resources_taggedtextfield_tagged_items",
to="contenttypes.ContentType",
verbose_name="content type",
),
),
migrations.AlterField(
model_name="taggedtextfield",
name="object_id",
field=models.IntegerField(db_index=True, verbose_name="object ID"),
),
migrations.AddField(
model_name="partner",
name="description_ind",
field=models.TextField(
blank=True,
help_text="Optional detailed description in addition to the short description such as collections, instructions, notes, special requirements, alternate access options, unique features, citations notes.",
null=True,
verbose_name="long description",
),
),
migrations.AddField(
model_name="partner",
name="send_instructions_ind",
field=models.TextField(
blank=True,
help_text="Optional instructions for sending application data to this partner.",
null=True,
),
),
migrations.AddField(
model_name="partner",
name="short_description_ind",
field=models.TextField(
blank=True,
help_text="Optional short description of this partner's resources.",
max_length=1000,
null=True,
),
),
migrations.AddField(
model_name="stream",
name="description_ind",
field=models.TextField(
blank=True,
help_text="Optional description of this stream's resources.",
null=True,
),
),
migrations.AddField(
model_name="textfieldtag",
name="name_ind",
field=models.TextField(max_length=100, null=True, verbose_name="Name"),
),
]
| 41.475336 | 403 | 0.527165 | 3,316 | 36,996 | 5.78076 | 0.141737 | 0.040378 | 0.071991 | 0.058532 | 0.844332 | 0.838854 | 0.817674 | 0.81324 | 0.801189 | 0.797799 | 0 | 0.005645 | 0.3775 | 36,996 | 891 | 404 | 41.521886 | 0.825488 | 0.001243 | 0 | 0.777401 | 1 | 0.046328 | 0.363078 | 0.00858 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.00339 | 0 | 0.00678 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
827b47ee10a6f1f4aea5a0aca0695a6b57b615a7 | 24,712 | py | Python | tests/unittests/core/io/interactive_commands/test_branching_prompt.py | aimar1986bupt/orion | 6d217af1f9002aa671f8a3260a687c540ca5336d | [
"BSD-3-Clause"
] | 4 | 2019-09-02T19:41:04.000Z | 2020-04-07T13:05:47.000Z | tests/unittests/core/io/interactive_commands/test_branching_prompt.py | aimar1986bupt/orion | 6d217af1f9002aa671f8a3260a687c540ca5336d | [
"BSD-3-Clause"
] | 2 | 2018-06-26T19:17:09.000Z | 2022-02-23T13:40:04.000Z | tests/unittests/core/io/interactive_commands/test_branching_prompt.py | aimar1986bupt/orion | 6d217af1f9002aa671f8a3260a687c540ca5336d | [
"BSD-3-Clause"
] | 2 | 2019-08-26T11:36:47.000Z | 2020-04-07T13:05:48.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Collection of tests for :mod:`orion.core.io.interactive_commands.branching_prompt`."""
import shlex
import pytest
from orion.core import evc
from orion.core.io.experiment_branch_builder import ExperimentBranchBuilder
from orion.core.io.interactive_commands.branching_prompt import BranchingPrompt
from orion.core.io.space_builder import DimensionBuilder
@pytest.fixture
def new_cat_dimension_conflict(old_config, new_config):
"""Generate a new dimension conflict with categorical prior for new experiment configuration"""
name = 'new-cat'
prior = 'choices(["hello", 2])'
dimension = DimensionBuilder().build(name, prior)
return evc.conflicts.NewDimensionConflict(old_config, new_config, dimension, prior)
@pytest.fixture
def missing_conflict_with_identical_prior(old_config, new_config, new_dimension_conflict):
"""Generate a missing dimension conflict which have the same prior as the new dim conflict"""
name = 'missing-idem'
prior = new_dimension_conflict.prior
dimension = DimensionBuilder().build(name, prior)
return evc.conflicts.MissingDimensionConflict(old_config, new_config, dimension, prior)
@pytest.fixture
def missing_cat_dimension_conflict(old_config, new_config):
"""Generate a missing dimension conflict with categorical prior for new experiment
configuration
"""
name = 'missing-cat'
prior = 'choices(["goodbye", 5])'
dimension = DimensionBuilder().build(name, prior)
return evc.conflicts.MissingDimensionConflict(old_config, new_config, dimension, prior)
@pytest.fixture
def conflicts(new_dimension_conflict, new_cat_dimension_conflict,
changed_dimension_conflict,
missing_dimension_conflict, missing_cat_dimension_conflict,
missing_conflict_with_identical_prior,
algorithm_conflict, code_conflict, cli_conflict, config_conflict,
experiment_name_conflict):
"""Create a container for conflicts with one of each types for testing purposes"""
conflicts = evc.conflicts.Conflicts()
conflicts.register(new_dimension_conflict)
conflicts.register(new_cat_dimension_conflict)
conflicts.register(changed_dimension_conflict)
conflicts.register(missing_dimension_conflict)
conflicts.register(missing_cat_dimension_conflict)
conflicts.register(missing_conflict_with_identical_prior)
conflicts.register(algorithm_conflict)
conflicts.register(code_conflict)
conflicts.register(config_conflict)
conflicts.register(cli_conflict)
conflicts.register(experiment_name_conflict)
return conflicts
@pytest.fixture
def branch_builder(conflicts):
"""Generate the experiment branch builder"""
return ExperimentBranchBuilder(conflicts, {'manual_resolution': True})
@pytest.fixture
def branch_solver_prompt(branch_builder):
"""Generate the branch solved prompt"""
return BranchingPrompt(branch_builder)
class TestCommands(object):
"""Test the commands of the prompt"""
def test_add_dim(self, conflicts, branch_solver_prompt):
"""Verify that dimension is added properly"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_add("new")
assert len(conflicts.get_resolved()) == 1
def test_add_bad_dim(self, capsys, conflicts, branch_solver_prompt):
"""Verify that error message is given when dimension does not exist"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_add("idontexist")
out, err = capsys.readouterr()
assert "Dimension name 'idontexist' not found in conflicts" in out
assert len(conflicts.get_resolved()) == 0
def test_add_dim_twice(self, capsys, conflicts, branch_solver_prompt):
"""Verify that error message is given trying to solve twice the same conflict"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_add("new")
assert len(conflicts.get_resolved()) == 1
branch_solver_prompt.do_add("new")
out, err = capsys.readouterr()
assert "Dimension name \'new\' not found in conflicts" in out
assert len(conflicts.get_resolved()) == 1
def test_add_dim_with_default(self, conflicts, branch_solver_prompt):
"""Verify that dimension is added with given default value"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_add("new --default-value=9")
assert len(conflicts.get_resolved()) == 1
assert conflicts.get_resolved()[0].resolution.default_value == 9
def test_add_dim_with_bad_default(self, capsys, conflicts, branch_solver_prompt):
"""Verify that error message is given for bad default value"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_add("new --default-value='bad'")
out, err = capsys.readouterr()
assert "could not convert string to float: 'bad'" in out
assert len(conflicts.get_resolved()) == 0
def test_add_dim_with_cat(self, conflicts, branch_solver_prompt):
"""Verify that categorical dimensions is added properly"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_add("new_cat")
assert len(conflicts.get_resolved()) == 1
def test_add_dim_with_cat_default(self, conflicts, branch_solver_prompt):
"""Verify that string and non-string default value are properly parsed"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_add("new_cat --default-value='hello'")
assert len(conflicts.get_resolved()) == 1
assert conflicts.get_resolved()[0].resolution.default_value == 'hello'
conflicts.revert(conflicts.get_resolved()[0].resolution)
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_add("new_cat --default-value=2")
assert len(conflicts.get_resolved()) == 1
assert conflicts.get_resolved()[0].resolution.default_value == 2
def test_add_dim_with_cat_bad_default(self, capsys, conflicts, branch_solver_prompt):
"""Verify that error message is given for default value of invalid category"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_add("new_cat --default-value='bad'")
out, err = capsys.readouterr()
assert "Invalid category: bad" in out
assert len(conflicts.get_resolved()) == 0
def test_reset_add(self, conflicts, branch_solver_prompt):
"""Verify that addition resolution is reverted"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_add("new")
assert len(conflicts.get_resolved()) == 1
branch_solver_prompt.do_reset("'{}'".format(str(conflicts.get_resolved()[0].resolution)))
assert len(conflicts.get_resolved()) == 0
def test_change_dim(self, conflicts, branch_solver_prompt):
"""Verify that changed resolution is created"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_add("changed")
assert len(conflicts.get_resolved()) == 1
def test_change_dim_twice(self, capsys, conflicts, branch_solver_prompt):
"""Verify that error message is given trying to solve twice the same conflict"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_add("changed")
assert len(conflicts.get_resolved()) == 1
branch_solver_prompt.do_add("changed")
out, err = capsys.readouterr()
assert "Dimension name \'changed\' not found in conflicts" in out
assert len(conflicts.get_resolved()) == 1
def test_reset_change(self, conflicts, branch_solver_prompt):
"""Verify that changed resolution is reverted"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_add("changed")
assert len(conflicts.get_resolved()) == 1
branch_solver_prompt.do_reset("'{}'".format(str(conflicts.get_resolved()[0].resolution)))
assert len(conflicts.get_resolved()) == 0
def test_remove_dim(self, conflicts, branch_solver_prompt):
"""Verify that missing dimension is removed"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_remove("missing")
assert len(conflicts.get_resolved()) == 1
def test_remove_bad_dim(self, capsys, conflicts, branch_solver_prompt):
"""Verify that error message is given for non existing dimension"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_remove("idontexist")
out, err = capsys.readouterr()
assert "Dimension name 'idontexist' not found in conflicts" in out
assert len(conflicts.get_resolved()) == 0
def test_remove_dim_twice(self, capsys, conflicts, branch_solver_prompt):
"""Verify that error message is given trying to solve twice the same conflict"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_remove("missing")
assert len(conflicts.get_resolved()) == 1
branch_solver_prompt.do_remove("missing")
out, err = capsys.readouterr()
assert "Dimension name \'missing\' not found in conflicts" in out
assert len(conflicts.get_resolved()) == 1
def test_remove_dim_with_default(self, conflicts, branch_solver_prompt):
"""Verify that default value is properly parsed for remove resolution"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_remove("missing --default-value=9")
assert len(conflicts.get_resolved()) == 1
assert conflicts.get_resolved()[0].resolution.default_value == 9
def test_remove_dim_with_bad_default(self, capsys, conflicts, branch_solver_prompt):
"""Verify error message is given when default value is invalid"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_remove("missing --default-value='bad'")
out, err = capsys.readouterr()
assert "could not convert string to float: 'bad'" in out
assert len(conflicts.get_resolved()) == 0
def test_remove_dim_with_cat(self, conflicts, branch_solver_prompt):
"""Verify that missing categorical dimension is properly removed"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_remove("missing_cat")
assert len(conflicts.get_resolved()) == 1
def test_remove_dim_with_cat_default(self, conflicts, branch_solver_prompt):
"""Verify that categorical default value is properly parsed for remove resolution"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_remove("missing_cat --default-value='goodbye'")
assert len(conflicts.get_resolved()) == 1
assert conflicts.get_resolved()[0].resolution.default_value == 'goodbye'
conflicts.revert(conflicts.get_resolved()[0].resolution)
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_remove("missing_cat --default-value=5")
assert len(conflicts.get_resolved()) == 1
assert conflicts.get_resolved()[0].resolution.default_value == 5
def test_remove_dim_with_cat_bad_default(self, capsys, conflicts, branch_solver_prompt):
"""Verify that error message is given for default value of invalid category"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_remove("missing_cat --default-value='bad'")
out, err = capsys.readouterr()
assert "Invalid category: bad" in out
assert len(conflicts.get_resolved()) == 0
def test_reset_remove(self, conflicts, branch_solver_prompt):
"""Verify that remove resolution is reverted"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_remove("missing")
assert len(conflicts.get_resolved()) == 1
branch_solver_prompt.do_reset("'{}'".format(str(conflicts.get_resolved()[0].resolution)))
assert len(conflicts.get_resolved()) == 0
def test_rename_dim(self, conflicts, branch_solver_prompt):
"""Verify that rename resolution is properly created"""
assert len(conflicts.get_resolved()) == 0
assert len(conflicts.get()) == 11
branch_solver_prompt.do_rename("missing_idem new")
assert len(conflicts.get_resolved()) == 2
assert len(conflicts.get()) == 11
def test_rename_and_change_dim(self, conflicts, branch_solver_prompt):
"""Verify that rename resolution is created and a new side-effect conflict added"""
assert len(conflicts.get_resolved()) == 0
assert len(conflicts.get()) == 11
branch_solver_prompt.do_rename("missing new")
assert len(conflicts.get_resolved()) == 2
assert len(conflicts.get()) == 12
def test_rename_bad_dim(self, capsys, conflicts, branch_solver_prompt):
"""Verify error messages when attempting invalid renamings"""
assert len(conflicts.get_resolved()) == 0
assert len(conflicts.get()) == 11
branch_solver_prompt.do_rename("new_cat new")
out, err = capsys.readouterr()
assert "Dimension name \'new_cat\' not found in conflicts" in out
assert len(conflicts.get_resolved()) == 0
assert len(conflicts.get()) == 11
assert len(conflicts.get_resolved()) == 0
assert len(conflicts.get()) == 11
branch_solver_prompt.do_rename("missing missing_cat")
out, err = capsys.readouterr()
assert "Dimension name \'missing_cat\' not found in conflicts" in out
assert len(conflicts.get_resolved()) == 0
assert len(conflicts.get()) == 11
def test_reset_rename_with_same_priors(self, conflicts, branch_solver_prompt):
"""Verify that rename resolution is reverted"""
assert len(conflicts.get_resolved()) == 0
assert len(conflicts.get()) == 11
branch_solver_prompt.do_rename("missing_idem new")
assert len(conflicts.get_resolved()) == 2
assert len(conflicts.get()) == 11
branch_solver_prompt.do_reset("'{}'".format(str(conflicts.get_resolved()[0].resolution)))
assert len(conflicts.get_resolved()) == 0
assert len(conflicts.get()) == 11
def test_reset_rename_with_different_priors(self, conflicts, branch_solver_prompt):
"""Verify that rename resolution is reverted and side-effect conflit is removed"""
assert len(conflicts.get_resolved()) == 0
assert len(conflicts.get()) == 11
branch_solver_prompt.do_rename("missing new")
assert len(conflicts.get_resolved()) == 2
assert len(conflicts.get()) == 12
branch_solver_prompt.do_reset("'{}'".format(str(conflicts.get_resolved()[0].resolution)))
assert len(conflicts.get_resolved()) == 0
assert len(conflicts.get()) == 11
def test_set_code_change_type(self, conflicts, branch_solver_prompt):
"""Verify that code change resolution is created"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_code("break")
assert len(conflicts.get_resolved()) == 1
def test_set_code_change_bad_type(self, capsys, conflicts, branch_solver_prompt):
"""Verify error message when attempting code resolution with bad type"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_code("bad")
out, err = capsys.readouterr()
assert "invalid choice: 'bad'" in err
assert len(conflicts.get_resolved()) == 0
def test_set_code_change_type_twice(self, capsys, conflicts, branch_solver_prompt):
"""Verify that error message is given trying to solve twice the same conflict"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_code("break")
assert len(conflicts.get_resolved()) == 1
branch_solver_prompt.do_code("noeffect")
out, err = capsys.readouterr()
assert "No code conflicts to solve" in out
assert len(conflicts.get_resolved()) == 1
def test_reset_code(self, conflicts, branch_solver_prompt):
"""Verify that code resolution is reverted"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_code("break")
assert len(conflicts.get_resolved()) == 1
branch_solver_prompt.do_reset("'{}'".format(str(conflicts.get_resolved()[0].resolution)))
assert len(conflicts.get_resolved()) == 0
def test_set_algo(self, conflicts, branch_solver_prompt):
"""Verify that algo resolution is created"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_algo("")
assert len(conflicts.get_resolved()) == 1
def test_set_algo_twice(self, capsys, conflicts, branch_solver_prompt):
"""Verify that error message is given trying to solve twice the same conflict"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_algo("")
assert len(conflicts.get_resolved()) == 1
branch_solver_prompt.do_algo("")
out, err = capsys.readouterr()
assert "No algo conflict to solve" in out
assert len(conflicts.get_resolved()) == 1
def test_reset_algo(self, conflicts, branch_solver_prompt):
"""Verify that algo resolution is reverted"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_algo("")
assert len(conflicts.get_resolved()) == 1
branch_solver_prompt.do_reset("' {}'".format(str(conflicts.get_resolved()[0].resolution)))
assert len(conflicts.get_resolved()) == 0
def test_set_config_change_type(self, conflicts, branch_solver_prompt):
"""Verify that script's config resolution is created"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_config("break")
assert len(conflicts.get_resolved()) == 1
def test_set_config_change_bad_type(self, capsys, conflicts, branch_solver_prompt):
"""Verify error message when attempting config resolution with bad type"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_config("bad")
out, err = capsys.readouterr()
assert "invalid choice: 'bad'" in err
assert len(conflicts.get_resolved()) == 0
def test_set_config_change_type_twice(self, capsys, conflicts, branch_solver_prompt):
"""Verify that error message is given trying to solve twice the same conflict"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_config("break")
assert len(conflicts.get_resolved()) == 1
branch_solver_prompt.do_config("noeffect")
out, err = capsys.readouterr()
assert "No script's config conflicts to solve" in out
assert len(conflicts.get_resolved()) == 1
def test_reset_config(self, conflicts, branch_solver_prompt):
"""Verify that script's config resolution is reverted"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_config("break")
assert len(conflicts.get_resolved()) == 1
branch_solver_prompt.do_reset("'{}'".format(str(conflicts.get_resolved()[0].resolution)))
assert len(conflicts.get_resolved()) == 0
def test_set_commandline_change_type(self, conflicts, branch_solver_prompt):
"""Verify that cli resolution is created"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_commandline("break")
assert len(conflicts.get_resolved()) == 1
def test_set_commandline_change_bad_type(self, capsys, conflicts, branch_solver_prompt):
"""Verify error message when attempting cli resolution with bad type"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_commandline("bad")
out, err = capsys.readouterr()
assert "invalid choice: 'bad'" in err
assert len(conflicts.get_resolved()) == 0
def test_set_commandline_change_type_twice(self, capsys, conflicts, branch_solver_prompt):
"""Verify that error message is given trying to solve twice the same conflict"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_commandline("break")
assert len(conflicts.get_resolved()) == 1
branch_solver_prompt.do_commandline("noeffect")
out, err = capsys.readouterr()
assert "No command line conflicts to solve" in out
assert len(conflicts.get_resolved()) == 1
def test_reset_commandline(self, conflicts, branch_solver_prompt):
"""Verify that cli resolution is reverted"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_commandline("break")
assert len(conflicts.get_resolved()) == 1
branch_solver_prompt.do_reset("'{}'".format(str(conflicts.get_resolved()[0].resolution)))
assert len(conflicts.get_resolved()) == 0
def test_set_experiment_name(self, conflicts, branch_solver_prompt):
"""Verify that experiment name resolution is created"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_name("new-name")
assert len(conflicts.get_resolved()) == 1
def test_set_experiment_bad_name(self, capsys, conflicts, branch_solver_prompt):
"""Verify error message when attempting experiment name resolution with bad name"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_name("test")
out, err = capsys.readouterr()
assert "Experiment name 'test' already exist for user" in out
assert len(conflicts.get_resolved()) == 0
def test_set_experiment_name_twice(self, capsys, conflicts, branch_solver_prompt):
"""Verify that error message is given trying to solve twice the same conflict"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_name("new-name")
assert len(conflicts.get_resolved()) == 1
branch_solver_prompt.do_name("whatever")
out, err = capsys.readouterr()
assert "No experiment name conflict to solve" in out
assert len(conflicts.get_resolved()) == 1
def test_reset_exp_name(self, conflicts, branch_solver_prompt):
"""Verify that experiment name resolution is reverted"""
assert len(conflicts.get_resolved()) == 0
branch_solver_prompt.do_name("new-name")
assert len(conflicts.get_resolved()) == 1
branch_solver_prompt.do_reset("'{}'".format(str(conflicts.get_resolved()[0].resolution)))
assert len(conflicts.get_resolved()) == 0
def test_commit_wont_quit_if_not_solved(self, conflicts, branch_solver_prompt):
"""Verify that commit will not quit if some conflicts are not resolved"""
assert len(conflicts.get_resolved()) == 0
assert len(conflicts.get()) == 11
branch_solver_prompt.do_auto("")
assert len(conflicts.get()) == 11
assert len(conflicts.get_resolved()) == 10
assert branch_solver_prompt.do_commit("") is False
def test_commit_quit_if_solved(self, conflicts, branch_solver_prompt):
"""Verify that commit will quit when all conflicts are resolved"""
branch_solver_prompt.do_auto("")
conflicts.conflicts = [conflicts.get_resolved()[0]]
assert conflicts.are_resolved
assert branch_solver_prompt.do_commit("") is True
def test_auto(self, conflicts, branch_solver_prompt):
"""Verify that all conflicts which requires not input are automatically resolved"""
assert len(conflicts.get_resolved()) == 0
assert len(conflicts.get()) == 11
branch_solver_prompt.do_auto("")
assert len(conflicts.get()) == 11
assert len(conflicts.get_resolved()) == 10
def test_reset_many(self, conflicts, branch_solver_prompt):
"""Verify that all resolutions are reverted"""
assert len(conflicts.get_resolved()) == 0
assert len(conflicts.get()) == 11
branch_solver_prompt.do_auto("")
assert len(conflicts.get()) == 11
assert len(conflicts.get_resolved()) == 10
reset_strings = []
for resolution in conflicts.get_resolutions():
resolution_string = shlex.quote((str(resolution)))
# Otherwise --argument is interpreted as an argument by argparse rather than a
# positional string
if not resolution_string.startswith("'"):
resolution_string = "' {}'".format(resolution_string)
reset_strings.append(resolution_string)
branch_solver_prompt.do_reset(" ".join(reset_strings))
assert len(conflicts.get_resolved()) == 0
| 49.031746 | 99 | 0.692174 | 3,093 | 24,712 | 5.28904 | 0.064662 | 0.1181 | 0.155144 | 0.181001 | 0.85427 | 0.825723 | 0.816676 | 0.797176 | 0.766917 | 0.745033 | 0 | 0.009803 | 0.199174 | 24,712 | 503 | 100 | 49.129225 | 0.816827 | 0.144747 | 0 | 0.624324 | 0 | 0 | 0.070644 | 0.006335 | 0 | 0 | 0 | 0 | 0.459459 | 1 | 0.148649 | false | 0 | 0.016216 | 0 | 0.183784 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
82a04a20e9d3f8ac6b4a29759cb94b00aebe5c6f | 30,061 | py | Python | sdk/python/pulumi_aws/elasticache/global_replication_group.py | aamir-locus/pulumi-aws | 3e234b050129bde35d8e072a88bd608562f02142 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws/elasticache/global_replication_group.py | aamir-locus/pulumi-aws | 3e234b050129bde35d8e072a88bd608562f02142 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws/elasticache/global_replication_group.py | aamir-locus/pulumi-aws | 3e234b050129bde35d8e072a88bd608562f02142 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['GlobalReplicationGroupArgs', 'GlobalReplicationGroup']
@pulumi.input_type
class GlobalReplicationGroupArgs:
def __init__(__self__, *,
global_replication_group_id_suffix: pulumi.Input[str],
primary_replication_group_id: pulumi.Input[str],
global_replication_group_description: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a GlobalReplicationGroup resource.
:param pulumi.Input[str] global_replication_group_id_suffix: The suffix name of a Global Datastore. If `global_replication_group_id_suffix` is changed, creates a new resource.
:param pulumi.Input[str] primary_replication_group_id: The ID of the primary cluster that accepts writes and will replicate updates to the secondary cluster. If `primary_replication_group_id` is changed, creates a new resource.
:param pulumi.Input[str] global_replication_group_description: A user-created description for the global replication group.
"""
pulumi.set(__self__, "global_replication_group_id_suffix", global_replication_group_id_suffix)
pulumi.set(__self__, "primary_replication_group_id", primary_replication_group_id)
if global_replication_group_description is not None:
pulumi.set(__self__, "global_replication_group_description", global_replication_group_description)
@property
@pulumi.getter(name="globalReplicationGroupIdSuffix")
def global_replication_group_id_suffix(self) -> pulumi.Input[str]:
"""
The suffix name of a Global Datastore. If `global_replication_group_id_suffix` is changed, creates a new resource.
"""
return pulumi.get(self, "global_replication_group_id_suffix")
@global_replication_group_id_suffix.setter
def global_replication_group_id_suffix(self, value: pulumi.Input[str]):
pulumi.set(self, "global_replication_group_id_suffix", value)
@property
@pulumi.getter(name="primaryReplicationGroupId")
def primary_replication_group_id(self) -> pulumi.Input[str]:
"""
The ID of the primary cluster that accepts writes and will replicate updates to the secondary cluster. If `primary_replication_group_id` is changed, creates a new resource.
"""
return pulumi.get(self, "primary_replication_group_id")
@primary_replication_group_id.setter
def primary_replication_group_id(self, value: pulumi.Input[str]):
pulumi.set(self, "primary_replication_group_id", value)
@property
@pulumi.getter(name="globalReplicationGroupDescription")
def global_replication_group_description(self) -> Optional[pulumi.Input[str]]:
"""
A user-created description for the global replication group.
"""
return pulumi.get(self, "global_replication_group_description")
@global_replication_group_description.setter
def global_replication_group_description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "global_replication_group_description", value)
@pulumi.input_type
class _GlobalReplicationGroupState:
def __init__(__self__, *,
actual_engine_version: Optional[pulumi.Input[str]] = None,
arn: Optional[pulumi.Input[str]] = None,
at_rest_encryption_enabled: Optional[pulumi.Input[bool]] = None,
auth_token_enabled: Optional[pulumi.Input[bool]] = None,
cache_node_type: Optional[pulumi.Input[str]] = None,
cluster_enabled: Optional[pulumi.Input[bool]] = None,
engine: Optional[pulumi.Input[str]] = None,
global_replication_group_description: Optional[pulumi.Input[str]] = None,
global_replication_group_id: Optional[pulumi.Input[str]] = None,
global_replication_group_id_suffix: Optional[pulumi.Input[str]] = None,
primary_replication_group_id: Optional[pulumi.Input[str]] = None,
transit_encryption_enabled: Optional[pulumi.Input[bool]] = None):
"""
Input properties used for looking up and filtering GlobalReplicationGroup resources.
:param pulumi.Input[str] actual_engine_version: The full version number of the cache engine running on the members of this global replication group.
:param pulumi.Input[str] arn: The ARN of the ElastiCache Global Replication Group.
:param pulumi.Input[bool] at_rest_encryption_enabled: A flag that indicate whether the encryption at rest is enabled.
:param pulumi.Input[bool] auth_token_enabled: A flag that indicate whether AuthToken (password) is enabled.
:param pulumi.Input[str] cache_node_type: The instance class used. See AWS documentation for information on [supported node types](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/CacheNodes.SupportedTypes.html) and [guidance on selecting node types](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/nodes-select-size.html).
:param pulumi.Input[bool] cluster_enabled: Indicates whether the Global Datastore is cluster enabled.
:param pulumi.Input[str] engine: The name of the cache engine to be used for the clusters in this global replication group.
:param pulumi.Input[str] global_replication_group_description: A user-created description for the global replication group.
:param pulumi.Input[str] global_replication_group_id: The full ID of the global replication group.
:param pulumi.Input[str] global_replication_group_id_suffix: The suffix name of a Global Datastore. If `global_replication_group_id_suffix` is changed, creates a new resource.
:param pulumi.Input[str] primary_replication_group_id: The ID of the primary cluster that accepts writes and will replicate updates to the secondary cluster. If `primary_replication_group_id` is changed, creates a new resource.
:param pulumi.Input[bool] transit_encryption_enabled: A flag that indicates whether the encryption in transit is enabled.
"""
if actual_engine_version is not None:
pulumi.set(__self__, "actual_engine_version", actual_engine_version)
if arn is not None:
pulumi.set(__self__, "arn", arn)
if at_rest_encryption_enabled is not None:
pulumi.set(__self__, "at_rest_encryption_enabled", at_rest_encryption_enabled)
if auth_token_enabled is not None:
pulumi.set(__self__, "auth_token_enabled", auth_token_enabled)
if cache_node_type is not None:
pulumi.set(__self__, "cache_node_type", cache_node_type)
if cluster_enabled is not None:
pulumi.set(__self__, "cluster_enabled", cluster_enabled)
if engine is not None:
pulumi.set(__self__, "engine", engine)
if global_replication_group_description is not None:
pulumi.set(__self__, "global_replication_group_description", global_replication_group_description)
if global_replication_group_id is not None:
pulumi.set(__self__, "global_replication_group_id", global_replication_group_id)
if global_replication_group_id_suffix is not None:
pulumi.set(__self__, "global_replication_group_id_suffix", global_replication_group_id_suffix)
if primary_replication_group_id is not None:
pulumi.set(__self__, "primary_replication_group_id", primary_replication_group_id)
if transit_encryption_enabled is not None:
pulumi.set(__self__, "transit_encryption_enabled", transit_encryption_enabled)
@property
@pulumi.getter(name="actualEngineVersion")
def actual_engine_version(self) -> Optional[pulumi.Input[str]]:
"""
The full version number of the cache engine running on the members of this global replication group.
"""
return pulumi.get(self, "actual_engine_version")
@actual_engine_version.setter
def actual_engine_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "actual_engine_version", value)
@property
@pulumi.getter
def arn(self) -> Optional[pulumi.Input[str]]:
"""
The ARN of the ElastiCache Global Replication Group.
"""
return pulumi.get(self, "arn")
@arn.setter
def arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "arn", value)
@property
@pulumi.getter(name="atRestEncryptionEnabled")
def at_rest_encryption_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
A flag that indicate whether the encryption at rest is enabled.
"""
return pulumi.get(self, "at_rest_encryption_enabled")
@at_rest_encryption_enabled.setter
def at_rest_encryption_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "at_rest_encryption_enabled", value)
@property
@pulumi.getter(name="authTokenEnabled")
def auth_token_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
A flag that indicate whether AuthToken (password) is enabled.
"""
return pulumi.get(self, "auth_token_enabled")
@auth_token_enabled.setter
def auth_token_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "auth_token_enabled", value)
@property
@pulumi.getter(name="cacheNodeType")
def cache_node_type(self) -> Optional[pulumi.Input[str]]:
"""
The instance class used. See AWS documentation for information on [supported node types](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/CacheNodes.SupportedTypes.html) and [guidance on selecting node types](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/nodes-select-size.html).
"""
return pulumi.get(self, "cache_node_type")
@cache_node_type.setter
def cache_node_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cache_node_type", value)
@property
@pulumi.getter(name="clusterEnabled")
def cluster_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Indicates whether the Global Datastore is cluster enabled.
"""
return pulumi.get(self, "cluster_enabled")
@cluster_enabled.setter
def cluster_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "cluster_enabled", value)
@property
@pulumi.getter
def engine(self) -> Optional[pulumi.Input[str]]:
"""
The name of the cache engine to be used for the clusters in this global replication group.
"""
return pulumi.get(self, "engine")
@engine.setter
def engine(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "engine", value)
@property
@pulumi.getter(name="globalReplicationGroupDescription")
def global_replication_group_description(self) -> Optional[pulumi.Input[str]]:
"""
A user-created description for the global replication group.
"""
return pulumi.get(self, "global_replication_group_description")
@global_replication_group_description.setter
def global_replication_group_description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "global_replication_group_description", value)
@property
@pulumi.getter(name="globalReplicationGroupId")
def global_replication_group_id(self) -> Optional[pulumi.Input[str]]:
"""
The full ID of the global replication group.
"""
return pulumi.get(self, "global_replication_group_id")
@global_replication_group_id.setter
def global_replication_group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "global_replication_group_id", value)
@property
@pulumi.getter(name="globalReplicationGroupIdSuffix")
def global_replication_group_id_suffix(self) -> Optional[pulumi.Input[str]]:
"""
The suffix name of a Global Datastore. If `global_replication_group_id_suffix` is changed, creates a new resource.
"""
return pulumi.get(self, "global_replication_group_id_suffix")
@global_replication_group_id_suffix.setter
def global_replication_group_id_suffix(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "global_replication_group_id_suffix", value)
@property
@pulumi.getter(name="primaryReplicationGroupId")
def primary_replication_group_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the primary cluster that accepts writes and will replicate updates to the secondary cluster. If `primary_replication_group_id` is changed, creates a new resource.
"""
return pulumi.get(self, "primary_replication_group_id")
@primary_replication_group_id.setter
def primary_replication_group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "primary_replication_group_id", value)
@property
@pulumi.getter(name="transitEncryptionEnabled")
def transit_encryption_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
A flag that indicates whether the encryption in transit is enabled.
"""
return pulumi.get(self, "transit_encryption_enabled")
@transit_encryption_enabled.setter
def transit_encryption_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "transit_encryption_enabled", value)
class GlobalReplicationGroup(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
global_replication_group_description: Optional[pulumi.Input[str]] = None,
global_replication_group_id_suffix: Optional[pulumi.Input[str]] = None,
primary_replication_group_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides an ElastiCache Global Replication Group resource, which manages replication between two or more Replication Groups in different regions. For more information, see the [ElastiCache User Guide](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Redis-Global-Datastore.html).
## Example Usage
### Global replication group with one secondary replication group
The global replication group depends on the primary group existing. Secondary replication groups depend on the global replication group. the provider dependency management will handle this transparently using resource value references.
```python
import pulumi
import pulumi_aws as aws
primary = aws.elasticache.ReplicationGroup("primary",
replication_group_description="primary replication group",
engine="redis",
engine_version="5.0.6",
node_type="cache.m5.large",
number_cache_clusters=1)
example = aws.elasticache.GlobalReplicationGroup("example",
global_replication_group_id_suffix="example",
primary_replication_group_id=primary.id)
secondary = aws.elasticache.ReplicationGroup("secondary",
replication_group_description="secondary replication group",
global_replication_group_id=example.global_replication_group_id,
number_cache_clusters=1,
opts=pulumi.ResourceOptions(provider=aws["other_region"]))
```
## Import
ElastiCache Global Replication Groups can be imported using the `global_replication_group_id`, e.g.
```sh
$ pulumi import aws:elasticache/globalReplicationGroup:GlobalReplicationGroup my_global_replication_group okuqm-global-replication-group-1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] global_replication_group_description: A user-created description for the global replication group.
:param pulumi.Input[str] global_replication_group_id_suffix: The suffix name of a Global Datastore. If `global_replication_group_id_suffix` is changed, creates a new resource.
:param pulumi.Input[str] primary_replication_group_id: The ID of the primary cluster that accepts writes and will replicate updates to the secondary cluster. If `primary_replication_group_id` is changed, creates a new resource.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: GlobalReplicationGroupArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides an ElastiCache Global Replication Group resource, which manages replication between two or more Replication Groups in different regions. For more information, see the [ElastiCache User Guide](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/Redis-Global-Datastore.html).
## Example Usage
### Global replication group with one secondary replication group
The global replication group depends on the primary group existing. Secondary replication groups depend on the global replication group. the provider dependency management will handle this transparently using resource value references.
```python
import pulumi
import pulumi_aws as aws
primary = aws.elasticache.ReplicationGroup("primary",
replication_group_description="primary replication group",
engine="redis",
engine_version="5.0.6",
node_type="cache.m5.large",
number_cache_clusters=1)
example = aws.elasticache.GlobalReplicationGroup("example",
global_replication_group_id_suffix="example",
primary_replication_group_id=primary.id)
secondary = aws.elasticache.ReplicationGroup("secondary",
replication_group_description="secondary replication group",
global_replication_group_id=example.global_replication_group_id,
number_cache_clusters=1,
opts=pulumi.ResourceOptions(provider=aws["other_region"]))
```
## Import
ElastiCache Global Replication Groups can be imported using the `global_replication_group_id`, e.g.
```sh
$ pulumi import aws:elasticache/globalReplicationGroup:GlobalReplicationGroup my_global_replication_group okuqm-global-replication-group-1
```
:param str resource_name: The name of the resource.
:param GlobalReplicationGroupArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(GlobalReplicationGroupArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
global_replication_group_description: Optional[pulumi.Input[str]] = None,
global_replication_group_id_suffix: Optional[pulumi.Input[str]] = None,
primary_replication_group_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = GlobalReplicationGroupArgs.__new__(GlobalReplicationGroupArgs)
__props__.__dict__["global_replication_group_description"] = global_replication_group_description
if global_replication_group_id_suffix is None and not opts.urn:
raise TypeError("Missing required property 'global_replication_group_id_suffix'")
__props__.__dict__["global_replication_group_id_suffix"] = global_replication_group_id_suffix
if primary_replication_group_id is None and not opts.urn:
raise TypeError("Missing required property 'primary_replication_group_id'")
__props__.__dict__["primary_replication_group_id"] = primary_replication_group_id
__props__.__dict__["actual_engine_version"] = None
__props__.__dict__["arn"] = None
__props__.__dict__["at_rest_encryption_enabled"] = None
__props__.__dict__["auth_token_enabled"] = None
__props__.__dict__["cache_node_type"] = None
__props__.__dict__["cluster_enabled"] = None
__props__.__dict__["engine"] = None
__props__.__dict__["global_replication_group_id"] = None
__props__.__dict__["transit_encryption_enabled"] = None
super(GlobalReplicationGroup, __self__).__init__(
'aws:elasticache/globalReplicationGroup:GlobalReplicationGroup',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
actual_engine_version: Optional[pulumi.Input[str]] = None,
arn: Optional[pulumi.Input[str]] = None,
at_rest_encryption_enabled: Optional[pulumi.Input[bool]] = None,
auth_token_enabled: Optional[pulumi.Input[bool]] = None,
cache_node_type: Optional[pulumi.Input[str]] = None,
cluster_enabled: Optional[pulumi.Input[bool]] = None,
engine: Optional[pulumi.Input[str]] = None,
global_replication_group_description: Optional[pulumi.Input[str]] = None,
global_replication_group_id: Optional[pulumi.Input[str]] = None,
global_replication_group_id_suffix: Optional[pulumi.Input[str]] = None,
primary_replication_group_id: Optional[pulumi.Input[str]] = None,
transit_encryption_enabled: Optional[pulumi.Input[bool]] = None) -> 'GlobalReplicationGroup':
"""
Get an existing GlobalReplicationGroup resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] actual_engine_version: The full version number of the cache engine running on the members of this global replication group.
:param pulumi.Input[str] arn: The ARN of the ElastiCache Global Replication Group.
:param pulumi.Input[bool] at_rest_encryption_enabled: A flag that indicate whether the encryption at rest is enabled.
:param pulumi.Input[bool] auth_token_enabled: A flag that indicate whether AuthToken (password) is enabled.
:param pulumi.Input[str] cache_node_type: The instance class used. See AWS documentation for information on [supported node types](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/CacheNodes.SupportedTypes.html) and [guidance on selecting node types](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/nodes-select-size.html).
:param pulumi.Input[bool] cluster_enabled: Indicates whether the Global Datastore is cluster enabled.
:param pulumi.Input[str] engine: The name of the cache engine to be used for the clusters in this global replication group.
:param pulumi.Input[str] global_replication_group_description: A user-created description for the global replication group.
:param pulumi.Input[str] global_replication_group_id: The full ID of the global replication group.
:param pulumi.Input[str] global_replication_group_id_suffix: The suffix name of a Global Datastore. If `global_replication_group_id_suffix` is changed, creates a new resource.
:param pulumi.Input[str] primary_replication_group_id: The ID of the primary cluster that accepts writes and will replicate updates to the secondary cluster. If `primary_replication_group_id` is changed, creates a new resource.
:param pulumi.Input[bool] transit_encryption_enabled: A flag that indicates whether the encryption in transit is enabled.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _GlobalReplicationGroupState.__new__(_GlobalReplicationGroupState)
__props__.__dict__["actual_engine_version"] = actual_engine_version
__props__.__dict__["arn"] = arn
__props__.__dict__["at_rest_encryption_enabled"] = at_rest_encryption_enabled
__props__.__dict__["auth_token_enabled"] = auth_token_enabled
__props__.__dict__["cache_node_type"] = cache_node_type
__props__.__dict__["cluster_enabled"] = cluster_enabled
__props__.__dict__["engine"] = engine
__props__.__dict__["global_replication_group_description"] = global_replication_group_description
__props__.__dict__["global_replication_group_id"] = global_replication_group_id
__props__.__dict__["global_replication_group_id_suffix"] = global_replication_group_id_suffix
__props__.__dict__["primary_replication_group_id"] = primary_replication_group_id
__props__.__dict__["transit_encryption_enabled"] = transit_encryption_enabled
return GlobalReplicationGroup(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="actualEngineVersion")
def actual_engine_version(self) -> pulumi.Output[str]:
"""
The full version number of the cache engine running on the members of this global replication group.
"""
return pulumi.get(self, "actual_engine_version")
@property
@pulumi.getter
def arn(self) -> pulumi.Output[str]:
"""
The ARN of the ElastiCache Global Replication Group.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter(name="atRestEncryptionEnabled")
def at_rest_encryption_enabled(self) -> pulumi.Output[bool]:
"""
A flag that indicate whether the encryption at rest is enabled.
"""
return pulumi.get(self, "at_rest_encryption_enabled")
@property
@pulumi.getter(name="authTokenEnabled")
def auth_token_enabled(self) -> pulumi.Output[bool]:
"""
A flag that indicate whether AuthToken (password) is enabled.
"""
return pulumi.get(self, "auth_token_enabled")
@property
@pulumi.getter(name="cacheNodeType")
def cache_node_type(self) -> pulumi.Output[str]:
"""
The instance class used. See AWS documentation for information on [supported node types](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/CacheNodes.SupportedTypes.html) and [guidance on selecting node types](https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/nodes-select-size.html).
"""
return pulumi.get(self, "cache_node_type")
@property
@pulumi.getter(name="clusterEnabled")
def cluster_enabled(self) -> pulumi.Output[bool]:
"""
Indicates whether the Global Datastore is cluster enabled.
"""
return pulumi.get(self, "cluster_enabled")
@property
@pulumi.getter
def engine(self) -> pulumi.Output[str]:
"""
The name of the cache engine to be used for the clusters in this global replication group.
"""
return pulumi.get(self, "engine")
@property
@pulumi.getter(name="globalReplicationGroupDescription")
def global_replication_group_description(self) -> pulumi.Output[Optional[str]]:
"""
A user-created description for the global replication group.
"""
return pulumi.get(self, "global_replication_group_description")
@property
@pulumi.getter(name="globalReplicationGroupId")
def global_replication_group_id(self) -> pulumi.Output[str]:
"""
The full ID of the global replication group.
"""
return pulumi.get(self, "global_replication_group_id")
@property
@pulumi.getter(name="globalReplicationGroupIdSuffix")
def global_replication_group_id_suffix(self) -> pulumi.Output[str]:
"""
The suffix name of a Global Datastore. If `global_replication_group_id_suffix` is changed, creates a new resource.
"""
return pulumi.get(self, "global_replication_group_id_suffix")
@property
@pulumi.getter(name="primaryReplicationGroupId")
def primary_replication_group_id(self) -> pulumi.Output[str]:
"""
The ID of the primary cluster that accepts writes and will replicate updates to the secondary cluster. If `primary_replication_group_id` is changed, creates a new resource.
"""
return pulumi.get(self, "primary_replication_group_id")
@property
@pulumi.getter(name="transitEncryptionEnabled")
def transit_encryption_enabled(self) -> pulumi.Output[bool]:
"""
A flag that indicates whether the encryption in transit is enabled.
"""
return pulumi.get(self, "transit_encryption_enabled")
| 53.489324 | 354 | 0.70979 | 3,551 | 30,061 | 5.70459 | 0.067587 | 0.142963 | 0.141186 | 0.075826 | 0.893518 | 0.871057 | 0.834823 | 0.808955 | 0.796614 | 0.770943 | 0 | 0.000628 | 0.205216 | 30,061 | 561 | 355 | 53.58467 | 0.847229 | 0.384751 | 0 | 0.525084 | 1 | 0 | 0.162096 | 0.122453 | 0 | 0 | 0 | 0 | 0 | 1 | 0.16388 | false | 0.003344 | 0.016722 | 0 | 0.284281 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
82ad7b2f4c351d2fdc15e6f524fc79b8135acd5e | 20,038 | py | Python | test/unit/rmq_2_isse/process_msg.py | mjpernot/rabbitmq-isse | ad8246d98e3e4924b1946bb8f6e8856f2c4a0309 | [
"MIT"
] | null | null | null | test/unit/rmq_2_isse/process_msg.py | mjpernot/rabbitmq-isse | ad8246d98e3e4924b1946bb8f6e8856f2c4a0309 | [
"MIT"
] | null | null | null | test/unit/rmq_2_isse/process_msg.py | mjpernot/rabbitmq-isse | ad8246d98e3e4924b1946bb8f6e8856f2c4a0309 | [
"MIT"
] | null | null | null | #!/usr/bin/python
# Classification (U)
"""Program: process_msg.py
Description: Unit testing of process_msg in rmq_2_isse.py.
Usage:
test/unit/rmq_2_isse/process_msg.py
Arguments:
"""
# Libraries and Global Variables
# Standard
import sys
import os
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
# Third-party
import mock
# Local
sys.path.append(os.getcwd())
import rmq_2_isse
import version
__version__ = version.__version__
class UnitTest(unittest.TestCase):
"""Class: UnitTest
Description: Class which is a representation of a unit testing.
Methods:
setUp -> Initialize testing environment.
test_is_valid_name_false -> Test with is valid name is false.
test_is_valid_name_true -> Test with is valid name is true.
test_empty_line_body -> Test an empty body argument.
test_one_line_body -> Test with a one line body argument.
test_one_line_multi_body -> Test one line multiple entries for body.
test_multi_line_body -> Test with multiple line body argument.
test_multi_line_multi_body -> Test with multi line multi body.
test_file_search_zero -> Test with zero count on resends.
test_file_search_one -> Test with one count on resends.
test_file_search_equal -> Test with count equal to resends.
test_file_search_greater -> Test with count greater than resends.
test_valid_msg_false -> Test with valid message is false.
test_valid_msg_true -> Test with valid message is true.
test_file_list_empty_list -> Test with file list returns empty list.
test_file_list_data_list -> Test with file list returns data in list.
test_empty_line_file_list -> Test with an empty file list.
test_one_line_file_list -> Test with one entry in file list.
test_multi_line_file_list -> Test with multiple entries in file list.
test_is_valid_ext_false -> Test with is valid extension is false.
test_is_valid_ext_true -> Test with is valid extension is true.
"""
def setUp(self):
"""Function: setUp
Description: Initialization for unit testing.
Arguments:
"""
class CfgTest(object):
"""Class: CfgTest
Description: Class which is a representation of a cfg module.
Methods:
__init__ -> Initialize configuration environment.
"""
def __init__(self):
"""Method: __init__
Description: Initialization instance of the CfgTest class.
Arguments:
"""
self.max_resend = 3
self.proc_file = "files_processed"
self.isse_dir = "/ISSE_DIR_PATH"
self.cfg = CfgTest()
self.method = "Method Properties"
self.body = "File1.txt"
self.rq = "RabbitMQ Instance"
@mock.patch("rmq_2_isse.non_proc_msg")
@mock.patch("rmq_2_isse.is_valid_name")
@mock.patch("rmq_2_isse.find_files")
@mock.patch("rmq_2_isse.is_valid_msg")
@mock.patch("rmq_2_isse.gen_libs")
@mock.patch("rmq_2_isse.gen_class.Logger")
def test_is_valid_name_false(self, mock_log, mock_lib, mock_valid,
mock_find, mock_name, mock_msg):
"""Function: test_is_valid_name_false
Description: Test with is valid name is false.
Arguments:
"""
mock_log.return_value = True
mock_lib.file_search_cnt.return_value = 0
mock_valid.return_value = True
mock_find.return_value = ["File1.txt"]
mock_lib.gen_libs.write_file.return_value = True
mock_name.return_value = False
mock_msg.return_value = True
self.assertFalse(rmq_2_isse.process_msg(self.rq, mock_log, self.cfg,
self.method, self.body))
@mock.patch("rmq_2_isse.is_valid_name")
@mock.patch("rmq_2_isse.is_valid_ext")
@mock.patch("rmq_2_isse.find_files")
@mock.patch("rmq_2_isse.is_valid_msg")
@mock.patch("rmq_2_isse.gen_libs")
@mock.patch("rmq_2_isse.gen_class.Logger")
def test_is_valid_name_true(self, mock_log, mock_lib, mock_valid,
mock_find, mock_ext, mock_name):
"""Function: test_is_valid_name_true
Description: Test with is valid name is true.
Arguments:
"""
mock_log.return_value = True
mock_lib.file_search_cnt.return_value = 0
mock_valid.return_value = True
mock_find.return_value = ["File1.txt"]
mock_ext.return_value = True
mock_lib.gen_libs.write_file.return_value = True
mock_lib.gen_libs.cp_file2.return_value = True
mock_name.return_value = True
self.assertFalse(rmq_2_isse.process_msg(self.rq, mock_log, self.cfg,
self.method, self.body))
@mock.patch("rmq_2_isse.gen_class.Logger")
def test_empty_line_body(self, mock_log):
"""Function: test_empty_line_body
Description: Test process_msg function with empty line body.
Arguments:
"""
mock_log.return_value = True
self.body = ""
self.assertFalse(rmq_2_isse.process_msg(self.rq, mock_log, self.cfg,
self.method, self.body))
@mock.patch("rmq_2_isse.non_proc_msg")
@mock.patch("rmq_2_isse.gen_libs.file_search_cnt")
@mock.patch("rmq_2_isse.gen_class.Logger")
def test_one_line_body(self, mock_log, mock_cnt, mock_msg):
"""Function: test_one_line_body
Description: Test process_msg function with an one line body.
Arguments:
"""
mock_log.return_value = True
mock_cnt.return_value = self.cfg.max_resend + 1
mock_msg.return_value = True
self.assertFalse(rmq_2_isse.process_msg(self.rq, mock_log, self.cfg,
self.method, self.body))
@mock.patch("rmq_2_isse.non_proc_msg")
@mock.patch("rmq_2_isse.gen_libs.file_search_cnt")
@mock.patch("rmq_2_isse.gen_class.Logger")
def test_one_line_multi_body(self, mock_log, mock_cnt, mock_msg):
"""Function: test_one_line_multi_body
Description: Test process_msg function with multiple line body
argument.
Arguments:
"""
mock_log.return_value = True
mock_cnt.return_value = self.cfg.max_resend + 1
mock_msg.return_value = True
self.body = "File1.txt File2.txt"
self.assertFalse(rmq_2_isse.process_msg(self.rq, mock_log, self.cfg,
self.method, self.body))
@mock.patch("rmq_2_isse.non_proc_msg")
@mock.patch("rmq_2_isse.gen_libs.file_search_cnt")
@mock.patch("rmq_2_isse.gen_class.Logger")
def test_multi_line_body(self, mock_log, mock_cnt, mock_msg):
"""Function: test_multi_line_body
Description: Test process_msg function with multi line body.
Arguments:
"""
mock_log.return_value = True
mock_cnt.return_value = self.cfg.max_resend + 1
mock_msg.return_value = True
self.body = "File1.txt\nFile2.txt"
self.assertFalse(rmq_2_isse.process_msg(self.rq, mock_log, self.cfg,
self.method, self.body))
@mock.patch("rmq_2_isse.non_proc_msg")
@mock.patch("rmq_2_isse.gen_libs.file_search_cnt")
@mock.patch("rmq_2_isse.gen_class.Logger")
def test_multi_line_multi_body(self, mock_log, mock_cnt, mock_msg):
"""Function: test_multi_line_multi_body
Description: Test process_msg function with multi line multi body.
Arguments:
"""
mock_log.return_value = True
mock_cnt.return_value = self.cfg.max_resend + 1
mock_msg.return_value = True
self.body = "File1.txt\nFile2.txt File3.txt"
self.assertFalse(rmq_2_isse.process_msg(self.rq, mock_log, self.cfg,
self.method, self.body))
@mock.patch("rmq_2_isse.is_valid_msg")
@mock.patch("rmq_2_isse.non_proc_msg")
@mock.patch("rmq_2_isse.gen_libs")
@mock.patch("rmq_2_isse.gen_class.Logger")
def test_file_search_zero(self, mock_log, mock_lib, mock_msg, mock_valid):
"""Function: test_file_search_zero
Description: Test process_msg function with zero count on resends.
Arguments:
"""
mock_log.return_value = True
mock_lib.file_search_cnt.return_value = 0
mock_lib.gen_libs.write_file.return_value = True
mock_msg.return_value = True
mock_valid.return_value = False
self.assertFalse(rmq_2_isse.process_msg(self.rq, mock_log, self.cfg,
self.method, self.body))
@mock.patch("rmq_2_isse.is_valid_msg")
@mock.patch("rmq_2_isse.non_proc_msg")
@mock.patch("rmq_2_isse.gen_libs")
@mock.patch("rmq_2_isse.gen_class.Logger")
def test_file_search_one(self, mock_log, mock_lib, mock_msg, mock_valid):
"""Function: test_file_search_one
Description: Test process_msg function with one count on resends.
Arguments:
"""
mock_log.return_value = True
mock_lib.file_search_cnt.return_value = 1
mock_lib.gen_libs.write_file.return_value = True
mock_msg.return_value = True
mock_valid.return_value = False
self.assertFalse(rmq_2_isse.process_msg(self.rq, mock_log, self.cfg,
self.method, self.body))
@mock.patch("rmq_2_isse.is_valid_msg")
@mock.patch("rmq_2_isse.non_proc_msg")
@mock.patch("rmq_2_isse.gen_libs")
@mock.patch("rmq_2_isse.gen_class.Logger")
def test_file_search_equal(self, mock_log, mock_lib, mock_msg, mock_valid):
"""Function: test_file_search_equal
Description: Test process_msg function with count equal to resends.
Arguments:
"""
mock_log.return_value = True
mock_lib.file_search_cnt.return_value = self.cfg.max_resend
mock_lib.gen_libs.write_file.return_value = True
mock_msg.return_value = True
mock_valid.return_value = False
self.assertFalse(rmq_2_isse.process_msg(self.rq, mock_log, self.cfg,
self.method, self.body))
@mock.patch("rmq_2_isse.non_proc_msg")
@mock.patch("rmq_2_isse.gen_libs")
@mock.patch("rmq_2_isse.gen_class.Logger")
def test_file_search_greater(self, mock_log, mock_lib, mock_msg):
"""Function: test_file_search_greater
Description: Test process_msg function with count greater than
resends.
Arguments:
"""
mock_log.return_value = True
mock_lib.file_search_cnt.return_value = self.cfg.max_resend + 1
mock_lib.gen_libs.write_file.return_value = True
mock_msg.return_value = True
self.assertFalse(rmq_2_isse.process_msg(self.rq, mock_log, self.cfg,
self.method, self.body))
@mock.patch("rmq_2_isse.is_valid_msg")
@mock.patch("rmq_2_isse.non_proc_msg")
@mock.patch("rmq_2_isse.gen_libs")
@mock.patch("rmq_2_isse.gen_class.Logger")
def test_valid_msg_false(self, mock_log, mock_lib, mock_msg, mock_valid):
"""Function: test_valid_msg_false
Description: Test process_msg function with valid message is false.
Arguments:
"""
mock_log.return_value = True
mock_lib.file_search_cnt.return_value = 0
mock_lib.gen_libs.write_file.return_value = True
mock_msg.return_value = True
mock_valid.return_value = False
self.assertFalse(rmq_2_isse.process_msg(self.rq, mock_log, self.cfg,
self.method, self.body))
@mock.patch("rmq_2_isse.find_files")
@mock.patch("rmq_2_isse.is_valid_msg")
@mock.patch("rmq_2_isse.non_proc_msg")
@mock.patch("rmq_2_isse.gen_libs")
@mock.patch("rmq_2_isse.gen_class.Logger")
def test_valid_msg_true(self, mock_log, mock_lib, mock_msg, mock_valid,
mock_find):
"""Function: test_valid_msg_true
Description: Test process_msg function with valid message is true.
Arguments:
"""
mock_log.return_value = True
mock_lib.file_search_cnt.return_value = 0
mock_lib.gen_libs.write_file.return_value = True
mock_msg.return_value = True
mock_valid.return_value = True
mock_find.return_value = []
self.assertFalse(rmq_2_isse.process_msg(self.rq, mock_log, self.cfg,
self.method, self.body))
@mock.patch("rmq_2_isse.find_files")
@mock.patch("rmq_2_isse.is_valid_msg")
@mock.patch("rmq_2_isse.non_proc_msg")
@mock.patch("rmq_2_isse.gen_libs")
@mock.patch("rmq_2_isse.gen_class.Logger")
def test_file_list_empty_list(self, mock_log, mock_lib, mock_msg,
mock_valid, mock_find):
"""Function: test_file_list_empty_list
Description: Test process_msg function with file list returns empty
list.
Arguments:
"""
mock_log.return_value = True
mock_lib.file_search_cnt.return_value = 0
mock_lib.gen_libs.write_file.return_value = True
mock_msg.return_value = True
mock_valid.return_value = True
mock_find.return_value = []
self.assertFalse(rmq_2_isse.process_msg(self.rq, mock_log, self.cfg,
self.method, self.body))
@mock.patch("rmq_2_isse.is_valid_name")
@mock.patch("rmq_2_isse.is_valid_ext")
@mock.patch("rmq_2_isse.find_files")
@mock.patch("rmq_2_isse.is_valid_msg")
@mock.patch("rmq_2_isse.gen_libs")
@mock.patch("rmq_2_isse.gen_class.Logger")
def test_file_list_data_list(self, mock_log, mock_lib, mock_valid,
mock_find, mock_ext, mock_name):
"""Function: test_file_list_data_list
Description: Test process_msg function with file list returns data in
list.
Arguments:
"""
mock_log.return_value = True
mock_lib.file_search_cnt.return_value = 0
mock_valid.return_value = True
mock_find.return_value = ["File1.txt"]
mock_ext.return_value = False
mock_lib.gen_libs.write_file.return_value = True
mock_name.return_value = True
self.assertFalse(rmq_2_isse.process_msg(self.rq, mock_log, self.cfg,
self.method, self.body))
@mock.patch("rmq_2_isse.find_files")
@mock.patch("rmq_2_isse.is_valid_msg")
@mock.patch("rmq_2_isse.non_proc_msg")
@mock.patch("rmq_2_isse.gen_libs")
@mock.patch("rmq_2_isse.gen_class.Logger")
def test_empty_line_file_list(self, mock_log, mock_lib, mock_msg,
mock_valid, mock_find):
"""Function: test_empty_line_file_list
Description: Test process_msg function with empty file list.
Arguments:
"""
mock_log.return_value = True
mock_lib.file_search_cnt.return_value = 0
mock_lib.gen_libs.write_file.return_value = True
mock_msg.return_value = True
mock_valid.return_value = True
mock_find.return_value = []
self.assertFalse(rmq_2_isse.process_msg(self.rq, mock_log, self.cfg,
self.method, self.body))
@mock.patch("rmq_2_isse.is_valid_name")
@mock.patch("rmq_2_isse.is_valid_ext")
@mock.patch("rmq_2_isse.find_files")
@mock.patch("rmq_2_isse.is_valid_msg")
@mock.patch("rmq_2_isse.gen_libs")
@mock.patch("rmq_2_isse.gen_class.Logger")
def test_one_line_file_list(self, mock_log, mock_lib, mock_valid,
mock_find, mock_ext, mock_name):
"""Function: test_one_line_file_list
Description: Test process_msg function with one entry in file list.
Arguments:
"""
mock_log.return_value = True
mock_lib.file_search_cnt.return_value = 0
mock_valid.return_value = True
mock_find.return_value = ["File1.txt"]
mock_ext.return_value = False
mock_lib.gen_libs.write_file.return_value = True
mock_name.return_value = True
self.assertFalse(rmq_2_isse.process_msg(self.rq, mock_log, self.cfg,
self.method, self.body))
@mock.patch("rmq_2_isse.is_valid_name")
@mock.patch("rmq_2_isse.is_valid_ext")
@mock.patch("rmq_2_isse.find_files")
@mock.patch("rmq_2_isse.is_valid_msg")
@mock.patch("rmq_2_isse.gen_libs")
@mock.patch("rmq_2_isse.gen_class.Logger")
def test_multi_line_file_list(self, mock_log, mock_lib, mock_valid,
mock_find, mock_ext, mock_name):
"""Function: test_multi_line_file_list
Description: Test process_msg function with multiple entries in file
list.
Arguments:
"""
mock_log.return_value = True
mock_lib.file_search_cnt.return_value = 0
mock_valid.return_value = True
mock_find.return_value = ["File1.txt", "File2.txt"]
mock_ext.return_value = False
mock_lib.gen_libs.write_file.return_value = True
mock_name.return_value = True
self.assertFalse(rmq_2_isse.process_msg(self.rq, mock_log, self.cfg,
self.method, self.body))
@mock.patch("rmq_2_isse.is_valid_name")
@mock.patch("rmq_2_isse.is_valid_ext")
@mock.patch("rmq_2_isse.find_files")
@mock.patch("rmq_2_isse.is_valid_msg")
@mock.patch("rmq_2_isse.gen_libs")
@mock.patch("rmq_2_isse.gen_class.Logger")
def test_is_valid_ext_false(self, mock_log, mock_lib, mock_valid,
mock_find, mock_ext, mock_name):
"""Function: test_is_valid_ext_false
Description: Test process_msg function with is valid extension is
false.
Arguments:
"""
mock_log.return_value = True
mock_lib.file_search_cnt.return_value = 0
mock_valid.return_value = True
mock_find.return_value = ["File1.txt"]
mock_ext.return_value = False
mock_lib.gen_libs.write_file.return_value = True
mock_name.return_value = True
self.assertFalse(rmq_2_isse.process_msg(self.rq, mock_log, self.cfg,
self.method, self.body))
@mock.patch("rmq_2_isse.is_valid_name")
@mock.patch("rmq_2_isse.is_valid_ext")
@mock.patch("rmq_2_isse.find_files")
@mock.patch("rmq_2_isse.is_valid_msg")
@mock.patch("rmq_2_isse.gen_libs")
@mock.patch("rmq_2_isse.gen_class.Logger")
def test_is_valid_ext_true(self, mock_log, mock_lib, mock_valid, mock_find,
mock_ext, mock_name):
"""Function: test_is_valid_ext_true
Description: Test process_msg function with is valid extension is
true.
Arguments:
"""
mock_log.return_value = True
mock_lib.file_search_cnt.return_value = 0
mock_valid.return_value = True
mock_find.return_value = ["File1.txt"]
mock_ext.return_value = True
mock_lib.gen_libs.write_file.return_value = True
mock_lib.gen_libs.cp_file2.return_value = True
mock_name.return_value = True
self.assertFalse(rmq_2_isse.process_msg(self.rq, mock_log, self.cfg,
self.method, self.body))
if __name__ == "__main__":
unittest.main()
| 32.74183 | 79 | 0.637688 | 2,761 | 20,038 | 4.250996 | 0.047084 | 0.03817 | 0.07634 | 0.098577 | 0.901764 | 0.850132 | 0.827469 | 0.797563 | 0.779927 | 0.746187 | 0 | 0.010409 | 0.271235 | 20,038 | 611 | 80 | 32.795417 | 0.79333 | 0.216089 | 0 | 0.808874 | 0 | 0 | 0.157395 | 0.123111 | 0 | 0 | 0 | 0 | 0.068259 | 1 | 0.075085 | false | 0 | 0.023891 | 0 | 0.105802 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
82c11eb627cfc7c7f1f565610146d2c2123572d3 | 14,035 | py | Python | tests/test_euclidean_distance.py | jiayingwang/smart-match | 3f273aafce87dc1a97da20baa49b3adceaf53979 | [
"MIT"
] | 14 | 2020-09-27T10:20:00.000Z | 2022-01-08T19:12:29.000Z | tests/test_euclidean_distance.py | jiayingwang/smart-match | 3f273aafce87dc1a97da20baa49b3adceaf53979 | [
"MIT"
] | 5 | 2020-09-28T13:45:13.000Z | 2020-12-08T09:24:49.000Z | tests/test_euclidean_distance.py | jiayingwang/smart-match | 3f273aafce87dc1a97da20baa49b3adceaf53979 | [
"MIT"
] | 19 | 2020-09-25T12:36:09.000Z | 2021-08-16T09:30:48.000Z | import unittest
import smart_match
class TestEuclideanDistance(unittest.TestCase):
def setUp(self):
smart_match.use('Euclidean')
def test_similarity(self):
smart_match.set_params(level='term')
self.assertAlmostEqual(smart_match.similarity('hello', 'hero'), 0.0)
self.assertAlmostEqual(smart_match.similarity('hello', 'ehllo'),0.0)
self.assertAlmostEqual(smart_match.similarity('test string1', 'test string2'), 0.5)
self.assertAlmostEqual(smart_match.similarity('test','test string2'),0.5527864045000421)
self.assertAlmostEqual(smart_match.similarity('','test string2'),0.2928932188134524)
self.assertAlmostEqual(smart_match.similarity('aaa bbb ccc ddd','aaa bbb ccc eee'),0.75)
self.assertAlmostEqual(smart_match.similarity('a b c d','a b c e'),0.75)
self.assertAlmostEqual(smart_match.similarity('a b c d','a b c e f'),0.7294991095997703)
self.assertAlmostEqual(smart_match.similarity('a b c d','a b e f'),0.6464466094067263)
self.assertAlmostEqual(smart_match.similarity('a b c','a b c e f g'),0.7418011102528389)
self.assertAlmostEqual(smart_match.similarity('a b b c c','a b c e f g'),0.7137008328430658)
self.assertAlmostEqual(smart_match.similarity('Healed','Sealed'),0.0)
self.assertAlmostEqual(smart_match.similarity('Healed','Healthy'),0.0)
self.assertAlmostEqual(smart_match.similarity('Healed','Heard'),0.0)
self.assertAlmostEqual(smart_match.similarity('Healed','Herded'),0.0)
self.assertAlmostEqual(smart_match.similarity('Healed','Help'),0.0)
self.assertAlmostEqual(smart_match.similarity('Healed','Sold'),0.0)
self.assertAlmostEqual(smart_match.similarity('Healed','Help'),0.0)
self.assertAlmostEqual(smart_match.similarity('Healed','So'),0.0)
self.assertAlmostEqual(smart_match.similarity('Sam J Chapman','Samuel John Chapman'),0.5285954792089682)
self.assertAlmostEqual(smart_match.similarity('Sam Chapman','S Chapman'),0.5)
self.assertAlmostEqual(smart_match.similarity('John Smith','Samuel John Chapman'),0.5196155385847385)
self.assertAlmostEqual(smart_match.similarity('John Smith','Sam Chapman'),0.29289321881345254)
self.assertAlmostEqual(smart_match.similarity('John Smith','Sam J Chapman'),0.3798263270539577)
self.assertAlmostEqual(smart_match.similarity('John Smith','S Chapman'),0.29289321881345254)
self.assertAlmostEqual(smart_match.similarity('Web Database Applications','Web Database Applications with PHP & MySQL'),0.7373871342805549)
self.assertAlmostEqual(smart_match.similarity('Web Database Applications','Creating Database Web Applications with PHP and ASP'),0.7382880387048931)
self.assertAlmostEqual(smart_match.similarity('Web Database Applications','Building Database Applications on the Web Using PHP3'),0.7382880387048931)
self.assertAlmostEqual(smart_match.similarity('Web Database Applications','Building Web Database Applications with Visual Studio 6'),0.7382880387048931)
self.assertAlmostEqual(smart_match.similarity('Web Database Applications','Web Application Development With PHP'),0.5799159747915972)
self.assertAlmostEqual(smart_match.similarity('Web Database Applications','WebRAD: Building Database Applications on the Web with Visual FoxPro and Web Connection'),0.7629772684300113)
self.assertAlmostEqual(smart_match.similarity('Web Database Applications','Structural Assessment: The Role of Large and Full-Scale Testing'),0.6348516283298893)
self.assertAlmostEqual(smart_match.similarity('Web Database Applications','How to Find a Scholarship Online'),0.5527864045000421)
self.assertAlmostEqual(smart_match.similarity('Web Aplications','Web Database Applications with PHP & MySQL'),0.6365781078441844)
self.assertAlmostEqual(smart_match.similarity('Web Aplications','Creating Database Web Applications with PHP and ASP'),0.6570028297149824)
self.assertAlmostEqual(smart_match.similarity('Web Aplications','Building Database Applications on the Web Using PHP3'),0.6570028297149824)
self.assertAlmostEqual(smart_match.similarity('Web Aplications','Web Application Development With PHP'),0.5847726007313001)
self.assertAlmostEqual(smart_match.similarity('Web Aplications','WebRAD: Building Database Applications on the Web with Visual FoxPro and Web Connection'),0.7258750126848699)
self.assertAlmostEqual(smart_match.similarity('Web Aplications','Structural Assessment: The Role of Large and Full-Scale Testing'),0.6402615329077492)
self.assertAlmostEqual(smart_match.similarity('Web Aplications','How to Find a Scholarship Online'),0.5527864045000421)
def test_dissimilarity(self):
self.assertAlmostEqual(smart_match.dissimilarity('hello', 'hero'), 0.34921514788478913)
self.assertAlmostEqual(smart_match.dissimilarity('hello', 'ehllo'),0.0)
self.assertAlmostEqual(smart_match.dissimilarity('test string1', 'test string2'), 0.08333333333333334)
self.assertAlmostEqual(smart_match.dissimilarity('test','test string2'),0.22360679774997896)
self.assertAlmostEqual(smart_match.dissimilarity('','test string2'),0.37267799624996495)
self.assertAlmostEqual(smart_match.dissimilarity('aaa bbb ccc ddd','aaa bbb ccc eee'),0.19999999999999998)
self.assertAlmostEqual(smart_match.dissimilarity('a b c d','a b c e'),0.14285714285714288)
self.assertAlmostEqual(smart_match.dissimilarity('a b c d','a b c e f'),0.17541160386140586)
self.assertAlmostEqual(smart_match.dissimilarity('a b c d','a b e f'),0.20203050891044214)
self.assertAlmostEqual(smart_match.dissimilarity('a b c','a b c e f g'),0.28669108954049793)
self.assertAlmostEqual(smart_match.dissimilarity('a b b c c','a b c e f g'),0.1723454968864278)
self.assertAlmostEqual(smart_match.dissimilarity('Healed','Sealed'),0.16666666666666669)
self.assertAlmostEqual(smart_match.dissimilarity('Healed','Healthy'),0.242535625036333)
self.assertAlmostEqual(smart_match.dissimilarity('Healed','Heard'),0.22176638128637186)
self.assertAlmostEqual(smart_match.dissimilarity('Healed','Herded'),0.23570226039551587)
self.assertAlmostEqual(smart_match.dissimilarity('Healed','Help'),0.2773500981126146)
self.assertAlmostEqual(smart_match.dissimilarity('Healed','Sold'),0.3922322702763681)
self.assertAlmostEqual(smart_match.dissimilarity('Healed','Help'),0.2773500981126146)
self.assertAlmostEqual(smart_match.dissimilarity('Healed','So'),0.5)
self.assertAlmostEqual(smart_match.dissimilarity('Sam J Chapman','Samuel John Chapman'),0.1063990353197863)
self.assertAlmostEqual(smart_match.dissimilarity('Sam Chapman','S Chapman'),0.09950371902099893)
self.assertAlmostEqual(smart_match.dissimilarity('John Smith','Samuel John Chapman'),0.2030141634031955)
self.assertAlmostEqual(smart_match.dissimilarity('John Smith','Sam Chapman'),0.2773500981126146)
self.assertAlmostEqual(smart_match.dissimilarity('John Smith','Sam J Chapman'),0.25139018680589903)
self.assertAlmostEqual(smart_match.dissimilarity('John Smith','S Chapman'),0.2465227791969404)
self.assertAlmostEqual(smart_match.dissimilarity('Web Database Applications','Web Database Applications with PHP & MySQL'),0.11391286426309005)
self.assertAlmostEqual(smart_match.dissimilarity('Web Database Applications','Creating Database Web Applications with PHP and ASP'),0.13637772569945572)
self.assertAlmostEqual(smart_match.dissimilarity('Web Database Applications','Building Database Applications on the Web Using PHP3'),0.13756671829544148)
self.assertAlmostEqual(smart_match.dissimilarity('Web Database Applications','Building Web Database Applications with Visual Studio 6'),0.14988580127769788)
self.assertAlmostEqual(smart_match.dissimilarity('Web Database Applications','Web Application Development With PHP'),0.13878329029749603)
self.assertAlmostEqual(smart_match.dissimilarity('Web Database Applications','WebRAD: Building Database Applications on the Web with Visual FoxPro and Web Connection'),0.1760633165448035)
self.assertAlmostEqual(smart_match.dissimilarity('Web Database Applications','Structural Assessment: The Role of Large and Full-Scale Testing'),0.17827109516018724)
self.assertAlmostEqual(smart_match.dissimilarity('Web Database Applications','How to Find a Scholarship Online'),0.16519463133513965)
self.assertAlmostEqual(smart_match.dissimilarity('Web Aplications','Web Database Applications with PHP & MySQL'),0.16928560745943824)
self.assertAlmostEqual(smart_match.dissimilarity('Web Aplications','Creating Database Web Applications with PHP and ASP'),0.19183621228155442)
self.assertAlmostEqual(smart_match.dissimilarity('Web Aplications','Building Database Applications on the Web Using PHP3'),0.18009519124771117)
self.assertAlmostEqual(smart_match.dissimilarity('Web Aplications','Web Application Development With PHP'),0.16418267275468842)
self.assertAlmostEqual(smart_match.dissimilarity('Web Aplications','WebRAD: Building Database Applications on the Web with Visual FoxPro and Web Connection'),0.20388829980917544)
self.assertAlmostEqual(smart_match.dissimilarity('Web Aplications','Structural Assessment: The Role of Large and Full-Scale Testing'),0.20945691294067176)
self.assertAlmostEqual(smart_match.dissimilarity('Web Aplications','How to Find a Scholarship Online'),0.18118018933869962)
def test_distance(self):
self.assertEqual(smart_match.distance('hello', 'hero'), 2.23606797749979)
self.assertEqual(smart_match.distance('hello', 'ehllo'), 0.0)
self.assertEqual(smart_match.distance('test string1', 'test string2'),1.4142135623730951)
self.assertEqual(smart_match.distance('test','test string2'),2.8284271247461903)
self.assertEqual(smart_match.distance('','test string2'),4.47213595499958)
self.assertEqual(smart_match.distance('aaa bbb ccc ddd','aaa bbb ccc eee'),4.242640687119285)
self.assertEqual(smart_match.distance('a b c d','a b c e'),1.4142135623730951)
self.assertEqual(smart_match.distance('a b c d','a b c e f'),2.0)
self.assertEqual(smart_match.distance('a b c d','a b e f'),2.0)
self.assertEqual(smart_match.distance('a b c','a b c e f g'),3.4641016151377544)
self.assertEqual(smart_match.distance('a b b c c','a b c e f g'),2.449489742783178)
self.assertEqual(smart_match.distance('Healed','Sealed'),1.4142135623730951)
self.assertEqual(smart_match.distance('Healed','Healthy'),2.23606797749979)
self.assertEqual(smart_match.distance('Healed','Heard'),1.7320508075688772)
self.assertEqual(smart_match.distance('Healed','Herded'),2.0)
self.assertEqual(smart_match.distance('Healed','Help'),2.0)
self.assertEqual(smart_match.distance('Healed','Sold'),2.8284271247461903)
self.assertEqual(smart_match.distance('Healed','Help'),2.0)
self.assertEqual(smart_match.distance('Healed','So'),3.1622776601683795)
self.assertEqual(smart_match.distance('Sam J Chapman','Samuel John Chapman'),2.449489742783178)
self.assertEqual(smart_match.distance('Sam Chapman','S Chapman'),1.4142135623730951)
self.assertEqual(smart_match.distance('John Smith','Samuel John Chapman'),4.358898943540674)
self.assertEqual(smart_match.distance('John Smith','Sam Chapman'),4.123105625617661)
self.assertEqual(smart_match.distance('John Smith','Sam J Chapman'),4.123105625617661)
self.assertEqual(smart_match.distance('John Smith','S Chapman'),3.3166247903554)
self.assertEqual(smart_match.distance('Web Database Applications','Web Database Applications with PHP & MySQL'),5.5677643628300215)
self.assertEqual(smart_match.distance('Web Database Applications','Creating Database Web Applications with PHP and ASP'),7.745966692414834)
self.assertEqual(smart_match.distance('Web Database Applications','Building Database Applications on the Web Using PHP3'),7.937253933193772)
self.assertEqual(smart_match.distance('Web Database Applications','Building Web Database Applications with Visual Studio 6'),9.055385138137417)
self.assertEqual(smart_match.distance('Web Database Applications','Web Application Development With PHP'),6.082762530298219)
self.assertEqual(smart_match.distance('Web Database Applications','WebRAD: Building Database Applications on the Web with Visual FoxPro and Web Connection'),15.937377450509228)
self.assertEqual(smart_match.distance('Web Database Applications','Structural Assessment: The Role of Large and Full-Scale Testing'),12.083045973594572)
self.assertEqual(smart_match.distance('Web Database Applications','How to Find a Scholarship Online'),6.708203932499369)
self.assertEqual(smart_match.distance('Web Aplications','Web Database Applications with PHP & MySQL'),7.54983443527075)
self.assertEqual(smart_match.distance('Web Aplications','Creating Database Web Applications with PHP and ASP'),10.198039027185569)
self.assertEqual(smart_match.distance('Web Aplications','Building Database Applications on the Web Using PHP3'),9.746794344808963)
self.assertEqual(smart_match.distance('Web Aplications','Web Application Development With PHP'),6.4031242374328485)
self.assertEqual(smart_match.distance('Web Aplications','WebRAD: Building Database Applications on the Web with Visual FoxPro and Web Connection'),18.0)
self.assertEqual(smart_match.distance('Web Aplications','Structural Assessment: The Role of Large and Full-Scale Testing'),13.564659966250536)
self.assertEqual(smart_match.distance('Web Aplications','How to Find a Scholarship Online'),6.4031242374328485)
if __name__ == '__main__':
unittest.main()
| 101.702899 | 195 | 0.760884 | 1,683 | 14,035 | 6.265003 | 0.111705 | 0.116654 | 0.197269 | 0.235205 | 0.847401 | 0.825873 | 0.739093 | 0.585926 | 0.436552 | 0.346832 | 0 | 0.140267 | 0.123762 | 14,035 | 137 | 196 | 102.445255 | 0.717108 | 0 | 0 | 0.045802 | 0 | 0 | 0.321482 | 0 | 0 | 0 | 0 | 0 | 0.916031 | 1 | 0.030534 | false | 0 | 0.015267 | 0 | 0.053435 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
82cb77ee370a7be1ef3216dd5736c19f67977f48 | 12,378 | py | Python | tests/test_graph2.py | SymbiFlow/rr_graph_lib | 81853325715c2670d0bdb921657eea6a3b0ddeba | [
"0BSD"
] | 1 | 2020-03-26T01:06:28.000Z | 2020-03-26T01:06:28.000Z | tests/test_graph2.py | SymbiFlow/rr_graph_lib | 81853325715c2670d0bdb921657eea6a3b0ddeba | [
"0BSD"
] | 1 | 2021-04-11T17:04:25.000Z | 2021-04-12T09:22:03.000Z | tests/test_graph2.py | SymbiFlow/rr_graph_lib | 81853325715c2670d0bdb921657eea6a3b0ddeba | [
"0BSD"
] | 1 | 2020-03-12T16:40:46.000Z | 2020-03-12T16:40:46.000Z | import unittest
from copy import deepcopy
from rr_graph.graph2 import SwitchTiming, SwitchSizing, Switch, SwitchType, \
Graph, SegmentTiming, Segment, PinClass, Pin, PinType, \
BlockType, GridLoc, NodeTiming, NodeSegment, Node, NodeType, \
NodeDirection, NodeLoc
from rr_graph.tracks import Track, Direction
class Graph2Tests(unittest.TestCase):
def setUp(self):
switch_timing = SwitchTiming(
r=0, c_in=1, c_out=2, t_del=0, c_internal=0, p_cost=0
)
switch_sizing = SwitchSizing(mux_trans_size=0, buf_size=1)
delayless = Switch(
id=0,
name='__vpr_delayless_switch__',
type=SwitchType.SHORT,
timing=switch_timing,
sizing=switch_sizing
)
self.graph = Graph([delayless], [], [], [], [])
def test_init(self):
switch_timing = SwitchTiming(
r=0, c_in=1, c_out=2, t_del=0, c_internal=0, p_cost=0
)
switch_sizing = SwitchSizing(mux_trans_size=0, buf_size=1)
self.switches = [
Switch(
id=0,
name='mux',
type=SwitchType.MUX,
timing=switch_timing,
sizing=switch_sizing
),
Switch(
id=1,
name='__vpr_delayless_switch__',
type=SwitchType.SHORT,
timing=switch_timing,
sizing=switch_sizing
),
]
seg_timing = SegmentTiming(r_per_meter=1, c_per_meter=1)
self.segments = [Segment(id=0, name='s0', timing=seg_timing)]
pin_classes = [
PinClass(type=PinType.INPUT, pin=[Pin(ptc=0, name='p1')]),
PinClass(type=PinType.OUTPUT, pin=[Pin(ptc=1, name='p2')]),
]
self.block_types = [
BlockType(
id=0, name='b0', width=1, height=1, pin_class=pin_classes
)
]
self.grid = [
GridLoc(
x=0, y=0, block_type_id=0, width_offset=0, height_offset=0
),
]
node_timing = NodeTiming(r=0, c=0)
self.nodes = [
Node(
id=0,
type=NodeType.IPIN,
direction=NodeDirection.NO_DIR,
capacity=1,
loc=NodeLoc(
x_low=0,
x_high=0,
y_low=0,
y_high=0,
side=Direction.LEFT,
ptc=0
),
timing=node_timing,
metadata=None,
segment=NodeSegment(segment_id=0),
canonical_loc=None,
connection_box=None,
),
Node(
id=1,
type=NodeType.SINK,
direction=NodeDirection.NO_DIR,
capacity=1,
loc=NodeLoc(
x_low=0,
x_high=0,
y_low=0,
y_high=0,
side=Direction.NO_SIDE,
ptc=0
),
timing=node_timing,
metadata=None,
segment=NodeSegment(segment_id=0),
canonical_loc=None,
connection_box=None,
),
Node(
id=2,
type=NodeType.OPIN,
direction=NodeDirection.NO_DIR,
capacity=1,
loc=NodeLoc(
x_low=0,
x_high=0,
y_low=0,
y_high=0,
side=Direction.LEFT,
ptc=1
),
timing=node_timing,
metadata=None,
segment=NodeSegment(segment_id=0),
canonical_loc=None,
connection_box=None,
),
Node(
id=3,
type=NodeType.SOURCE,
direction=NodeDirection.NO_DIR,
capacity=1,
loc=NodeLoc(
x_low=0,
x_high=0,
y_low=0,
y_high=0,
side=Direction.NO_SIDE,
ptc=1
),
timing=node_timing,
metadata=None,
segment=NodeSegment(segment_id=0),
canonical_loc=None,
connection_box=None,
),
]
self.graph = Graph(
self.switches, self.segments, self.block_types, self.grid,
deepcopy(self.nodes)
)
def test_add_track(self):
trk = Track(direction='Y', x_low=2, x_high=2, y_low=1, y_high=3)
segment_id = -1
node_id = self.graph.add_track(trk, segment_id)
self.assertEqual(len(self.graph.tracks), 1)
self.assertEqual(len(self.graph.nodes), 1)
node = self.graph.nodes[node_id]
self.assertEqual(node.id, len(self.graph.nodes) - 1)
self.assertEqual(node.type, NodeType.CHANY)
self.assertEqual(node.direction, NodeDirection.BI_DIR)
self.assertEqual(node.capacity, 1)
def test_add_edge(self):
trk = Track(direction='Y', x_low=2, x_high=2, y_low=1, y_high=3)
segment_id = -1
self.graph.add_track(trk, segment_id)
trk = Track(direction='X', x_low=1, x_high=3, y_low=1, y_high=1)
segment_id = -1
self.graph.add_track(trk, segment_id)
trk = Track(direction='X', x_low=1, x_high=3, y_low=3, y_high=3)
segment_id = -1
self.graph.add_track(trk, segment_id)
idx = self.graph.add_edge(1, 2, 0)
self.assertEqual(self.graph.edges[idx].src_node, 1)
self.assertEqual(self.graph.edges[idx].sink_node, 2)
self.assertEqual(self.graph.edges[idx].switch_id, 0)
self.assertEqual(self.graph.edges[idx].metadata, None)
def test_add_switch(self):
idx = self.graph.add_switch(
Switch(
id=None,
name='mux',
type=SwitchType.MUX,
timing=None,
sizing=SwitchSizing(mux_trans_size=1, buf_size=0)
)
)
self.assertEqual(idx, len(self.graph.switches) - 1)
self.assertTrue('mux' in self.graph.switch_name_map.keys())
def test_check_ptc(self):
self.graph.check_ptc()
trk = Track(direction='Y', x_low=2, x_high=2, y_low=1, y_high=3)
segment_id = -1
self.graph.add_track(trk, segment_id)
with self.assertRaises(AssertionError):
self.graph.check_ptc()
def test_set_track_ptc(self):
trk = Track(direction='Y', x_low=2, x_high=2, y_low=1, y_high=3)
segment_id = -1
node_id = self.graph.add_track(trk, segment_id)
with self.assertRaises(AssertionError):
self.graph.check_ptc()
self.graph.set_track_ptc(node_id, 0)
self.graph.check_ptc()
def test_block_type_at_loc_asserts(self):
loc = (0, 0)
with self.assertRaises(KeyError):
self.graph.block_type_at_loc(loc)
def test_get_switch_id(self):
with self.assertRaises(KeyError):
self.graph.get_switch_id('mux')
idx = self.graph.add_switch(
Switch(
id=None,
name='mux',
type=SwitchType.MUX,
timing=None,
sizing=SwitchSizing(mux_trans_size=1, buf_size=0)
)
)
lu_idx = self.graph.get_switch_id('mux')
self.assertEqual(idx, lu_idx)
class Graph2MediumTests(unittest.TestCase):
def setUp(self):
switch_timing = SwitchTiming(
r=0, c_in=1, c_out=2, t_del=0, c_internal=0, p_cost=0
)
switch_sizing = SwitchSizing(mux_trans_size=0, buf_size=1)
self.switches = [
Switch(
id=0,
name='mux',
type=SwitchType.MUX,
timing=switch_timing,
sizing=switch_sizing
),
Switch(
id=1,
name='__vpr_delayless_switch__',
type=SwitchType.SHORT,
timing=switch_timing,
sizing=switch_sizing
),
]
seg_timing = SegmentTiming(r_per_meter=1, c_per_meter=1)
self.segments = [Segment(id=0, name='s0', timing=seg_timing)]
pin_classes0 = [
PinClass(type=PinType.INPUT, pin=[Pin(ptc=0, name='p1')]),
PinClass(type=PinType.OUTPUT, pin=[Pin(ptc=1, name='p2')]),
]
pin_classes1 = [
PinClass(type=PinType.OUTPUT, pin=[Pin(ptc=1, name='p3')]),
]
self.block_types = [
BlockType(
id=0, name='b0', width=1, height=1, pin_class=pin_classes0
),
BlockType(
id=1, name='b1', width=1, height=1, pin_class=pin_classes1
),
]
self.grid = [
GridLoc(
x=0, y=0, block_type_id=0, width_offset=0, height_offset=0
),
]
node_timing = NodeTiming(r=0, c=0)
self.nodes = [
Node(
id=0,
type=NodeType.IPIN,
direction=NodeDirection.NO_DIR,
capacity=1,
loc=NodeLoc(
x_low=0,
x_high=0,
y_low=0,
y_high=0,
side=Direction.LEFT,
ptc=0
),
timing=node_timing,
metadata=None,
segment=NodeSegment(segment_id=0),
canonical_loc=None,
connection_box=None,
),
Node(
id=1,
type=NodeType.SINK,
direction=NodeDirection.NO_DIR,
capacity=1,
loc=NodeLoc(
x_low=0,
x_high=0,
y_low=0,
y_high=0,
side=Direction.NO_SIDE,
ptc=0
),
timing=node_timing,
metadata=None,
segment=NodeSegment(segment_id=0),
canonical_loc=None,
connection_box=None,
),
Node(
id=2,
type=NodeType.OPIN,
direction=NodeDirection.NO_DIR,
capacity=1,
loc=NodeLoc(
x_low=0,
x_high=0,
y_low=0,
y_high=0,
side=Direction.LEFT,
ptc=1
),
timing=node_timing,
metadata=None,
segment=NodeSegment(segment_id=0),
canonical_loc=None,
connection_box=None,
),
Node(
id=3,
type=NodeType.SOURCE,
direction=NodeDirection.NO_DIR,
capacity=1,
loc=NodeLoc(
x_low=0,
x_high=0,
y_low=0,
y_high=0,
side=Direction.NO_SIDE,
ptc=1
),
timing=node_timing,
metadata=None,
segment=NodeSegment(segment_id=0),
canonical_loc=None,
connection_box=None,
),
]
self.graph = Graph(
self.switches, self.segments, self.block_types, self.grid,
deepcopy(self.nodes)
)
def test_block_type_at_loc(self):
loc = (0, 0)
name = self.graph.block_type_at_loc(loc)
self.assertEqual(name, 'b0')
def test_get_nodes_for_pin(self):
nodes = self.graph.get_nodes_for_pin((0, 0), 'p1')
self.assertEqual(nodes, [
(0, Direction.LEFT),
])
with self.assertRaises(KeyError):
self.graph.get_nodes_for_pin((0, 0), 'd1')
with self.assertRaises(AssertionError):
self.graph.get_nodes_for_pin((0, 0), 'p3')
def test_create_channels(self):
pass
| 31.257576 | 77 | 0.479722 | 1,336 | 12,378 | 4.228293 | 0.099551 | 0.054169 | 0.017702 | 0.038237 | 0.818021 | 0.803151 | 0.764383 | 0.727208 | 0.713932 | 0.707028 | 0 | 0.028567 | 0.420262 | 12,378 | 395 | 78 | 31.336709 | 0.75864 | 0 | 0 | 0.771186 | 0 | 0 | 0.01026 | 0.005817 | 0 | 0 | 0 | 0 | 0.062147 | 1 | 0.036723 | false | 0.002825 | 0.011299 | 0 | 0.053672 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
82d27648eff53519e402b8b4de9f64da76d998e0 | 360,344 | py | Python | embyapi/api/image_service_api.py | stanionascu/python-embyapi | a3f7aa49aea4052277cc43605c0d89bc6ff21913 | [
"BSD-3-Clause"
] | null | null | null | embyapi/api/image_service_api.py | stanionascu/python-embyapi | a3f7aa49aea4052277cc43605c0d89bc6ff21913 | [
"BSD-3-Clause"
] | null | null | null | embyapi/api/image_service_api.py | stanionascu/python-embyapi | a3f7aa49aea4052277cc43605c0d89bc6ff21913 | [
"BSD-3-Clause"
] | null | null | null | # coding: utf-8
"""
Emby Server API
Explore the Emby Server API # noqa: E501
OpenAPI spec version: 4.1.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from embyapi.api_client import ApiClient
class ImageServiceApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def delete_items_by_id_images_by_type(self, id, type, **kwargs): # noqa: E501
"""delete_items_by_id_images_by_type # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_items_by_id_images_by_type(id, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str type: Image Type (required)
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_items_by_id_images_by_type_with_http_info(id, type, **kwargs) # noqa: E501
else:
(data) = self.delete_items_by_id_images_by_type_with_http_info(id, type, **kwargs) # noqa: E501
return data
def delete_items_by_id_images_by_type_with_http_info(self, id, type, **kwargs): # noqa: E501
"""delete_items_by_id_images_by_type # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_items_by_id_images_by_type_with_http_info(id, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str type: Image Type (required)
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'type', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_items_by_id_images_by_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_items_by_id_images_by_type`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `delete_items_by_id_images_by_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
query_params = []
if 'index' in params:
query_params.append(('Index', params['index'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Items/{Id}/Images/{Type}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_items_by_id_images_by_type_by_index(self, id, type, index, **kwargs): # noqa: E501
"""delete_items_by_id_images_by_type_by_index # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_items_by_id_images_by_type_by_index(id, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_items_by_id_images_by_type_by_index_with_http_info(id, type, index, **kwargs) # noqa: E501
else:
(data) = self.delete_items_by_id_images_by_type_by_index_with_http_info(id, type, index, **kwargs) # noqa: E501
return data
def delete_items_by_id_images_by_type_by_index_with_http_info(self, id, type, index, **kwargs): # noqa: E501
"""delete_items_by_id_images_by_type_by_index # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_items_by_id_images_by_type_by_index_with_http_info(id, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'type', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_items_by_id_images_by_type_by_index" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_items_by_id_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `delete_items_by_id_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `delete_items_by_id_images_by_type_by_index`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Items/{Id}/Images/{Type}/{Index}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_users_by_id_images_by_type(self, id, type, **kwargs): # noqa: E501
"""delete_users_by_id_images_by_type # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_users_by_id_images_by_type(id, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: User Id (required)
:param str type: Image Type (required)
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_users_by_id_images_by_type_with_http_info(id, type, **kwargs) # noqa: E501
else:
(data) = self.delete_users_by_id_images_by_type_with_http_info(id, type, **kwargs) # noqa: E501
return data
def delete_users_by_id_images_by_type_with_http_info(self, id, type, **kwargs): # noqa: E501
"""delete_users_by_id_images_by_type # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_users_by_id_images_by_type_with_http_info(id, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: User Id (required)
:param str type: Image Type (required)
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'type', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_users_by_id_images_by_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_users_by_id_images_by_type`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `delete_users_by_id_images_by_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
query_params = []
if 'index' in params:
query_params.append(('Index', params['index'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}/Images/{Type}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_users_by_id_images_by_type_by_index(self, id, type, index, **kwargs): # noqa: E501
"""delete_users_by_id_images_by_type_by_index # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_users_by_id_images_by_type_by_index(id, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: User Id (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_users_by_id_images_by_type_by_index_with_http_info(id, type, index, **kwargs) # noqa: E501
else:
(data) = self.delete_users_by_id_images_by_type_by_index_with_http_info(id, type, index, **kwargs) # noqa: E501
return data
def delete_users_by_id_images_by_type_by_index_with_http_info(self, id, type, index, **kwargs): # noqa: E501
"""delete_users_by_id_images_by_type_by_index # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_users_by_id_images_by_type_by_index_with_http_info(id, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: User Id (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'type', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_users_by_id_images_by_type_by_index" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_users_by_id_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `delete_users_by_id_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `delete_users_by_id_images_by_type_by_index`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}/Images/{Type}/{Index}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_artists_by_name_images_by_type(self, name, type, **kwargs): # noqa: E501
"""get_artists_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_artists_by_name_images_by_type(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_artists_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
else:
(data) = self.get_artists_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
return data
def get_artists_by_name_images_by_type_with_http_info(self, name, type, **kwargs): # noqa: E501
"""get_artists_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_artists_by_name_images_by_type_with_http_info(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_artists_by_name_images_by_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_artists_by_name_images_by_type`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `get_artists_by_name_images_by_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
if 'index' in params:
query_params.append(('Index', params['index'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Artists/{Name}/Images/{Type}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_artists_by_name_images_by_type_by_index(self, name, type, index, **kwargs): # noqa: E501
"""get_artists_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_artists_by_name_images_by_type_by_index(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_artists_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
else:
(data) = self.get_artists_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
return data
def get_artists_by_name_images_by_type_by_index_with_http_info(self, name, type, index, **kwargs): # noqa: E501
"""get_artists_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_artists_by_name_images_by_type_by_index_with_http_info(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'index', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_artists_by_name_images_by_type_by_index" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_artists_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `get_artists_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `get_artists_by_name_images_by_type_by_index`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Artists/{Name}/Images/{Type}/{Index}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_gamegenres_by_name_images_by_type(self, name, type, **kwargs): # noqa: E501
"""get_gamegenres_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_gamegenres_by_name_images_by_type(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_gamegenres_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
else:
(data) = self.get_gamegenres_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
return data
def get_gamegenres_by_name_images_by_type_with_http_info(self, name, type, **kwargs): # noqa: E501
"""get_gamegenres_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_gamegenres_by_name_images_by_type_with_http_info(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamegenres_by_name_images_by_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_gamegenres_by_name_images_by_type`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `get_gamegenres_by_name_images_by_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
if 'index' in params:
query_params.append(('Index', params['index'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/GameGenres/{Name}/Images/{Type}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_gamegenres_by_name_images_by_type_by_index(self, name, type, index, **kwargs): # noqa: E501
"""get_gamegenres_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_gamegenres_by_name_images_by_type_by_index(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_gamegenres_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
else:
(data) = self.get_gamegenres_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
return data
def get_gamegenres_by_name_images_by_type_by_index_with_http_info(self, name, type, index, **kwargs): # noqa: E501
"""get_gamegenres_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_gamegenres_by_name_images_by_type_by_index_with_http_info(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'index', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_gamegenres_by_name_images_by_type_by_index" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_gamegenres_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `get_gamegenres_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `get_gamegenres_by_name_images_by_type_by_index`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/GameGenres/{Name}/Images/{Type}/{Index}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_genres_by_name_images_by_type(self, name, type, **kwargs): # noqa: E501
"""get_genres_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_genres_by_name_images_by_type(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_genres_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
else:
(data) = self.get_genres_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
return data
def get_genres_by_name_images_by_type_with_http_info(self, name, type, **kwargs): # noqa: E501
"""get_genres_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_genres_by_name_images_by_type_with_http_info(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_genres_by_name_images_by_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_genres_by_name_images_by_type`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `get_genres_by_name_images_by_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
if 'index' in params:
query_params.append(('Index', params['index'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Genres/{Name}/Images/{Type}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_genres_by_name_images_by_type_by_index(self, name, type, index, **kwargs): # noqa: E501
"""get_genres_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_genres_by_name_images_by_type_by_index(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_genres_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
else:
(data) = self.get_genres_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
return data
def get_genres_by_name_images_by_type_by_index_with_http_info(self, name, type, index, **kwargs): # noqa: E501
"""get_genres_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_genres_by_name_images_by_type_by_index_with_http_info(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'index', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_genres_by_name_images_by_type_by_index" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_genres_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `get_genres_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `get_genres_by_name_images_by_type_by_index`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Genres/{Name}/Images/{Type}/{Index}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_items_by_id_images(self, id, **kwargs): # noqa: E501
"""Gets information about an item's images # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_images(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:return: list[ImageInfo]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_items_by_id_images_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_items_by_id_images_with_http_info(id, **kwargs) # noqa: E501
return data
def get_items_by_id_images_with_http_info(self, id, **kwargs): # noqa: E501
"""Gets information about an item's images # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_images_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:return: list[ImageInfo]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_items_by_id_images" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_items_by_id_images`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Items/{Id}/Images', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ImageInfo]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_items_by_id_images_by_type(self, id, type, **kwargs): # noqa: E501
"""get_items_by_id_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_images_by_type(id, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_items_by_id_images_by_type_with_http_info(id, type, **kwargs) # noqa: E501
else:
(data) = self.get_items_by_id_images_by_type_with_http_info(id, type, **kwargs) # noqa: E501
return data
def get_items_by_id_images_by_type_with_http_info(self, id, type, **kwargs): # noqa: E501
"""get_items_by_id_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_images_by_type_with_http_info(id, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'type', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_items_by_id_images_by_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_items_by_id_images_by_type`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `get_items_by_id_images_by_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
if 'index' in params:
query_params.append(('Index', params['index'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Items/{Id}/Images/{Type}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_items_by_id_images_by_type_by_index(self, id, type, index, **kwargs): # noqa: E501
"""get_items_by_id_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_images_by_type_by_index(id, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_items_by_id_images_by_type_by_index_with_http_info(id, type, index, **kwargs) # noqa: E501
else:
(data) = self.get_items_by_id_images_by_type_by_index_with_http_info(id, type, index, **kwargs) # noqa: E501
return data
def get_items_by_id_images_by_type_by_index_with_http_info(self, id, type, index, **kwargs): # noqa: E501
"""get_items_by_id_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_images_by_type_by_index_with_http_info(id, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'type', 'index', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_items_by_id_images_by_type_by_index" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_items_by_id_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `get_items_by_id_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `get_items_by_id_images_by_type_by_index`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Items/{Id}/Images/{Type}/{Index}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount(self, id, max_width, max_height, tag, format, percent_played, unplayed_count, type, index, **kwargs): # noqa: E501
"""get_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount(id, max_width, max_height, tag, format, percent_played, unplayed_count, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param int max_width: The maximum image width to return. (required)
:param int max_height: The maximum image height to return. (required)
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers. (required)
:param str format: Determines the output foramt of the image - original,gif,jpg,png (required)
:param float percent_played: Optional percent to render for the percent played overlay (required)
:param int unplayed_count: Optional unplayed count overlay to render (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param bool add_played_indicator: Optional. Add a played indicator
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount_with_http_info(id, max_width, max_height, tag, format, percent_played, unplayed_count, type, index, **kwargs) # noqa: E501
else:
(data) = self.get_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount_with_http_info(id, max_width, max_height, tag, format, percent_played, unplayed_count, type, index, **kwargs) # noqa: E501
return data
def get_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount_with_http_info(self, id, max_width, max_height, tag, format, percent_played, unplayed_count, type, index, **kwargs): # noqa: E501
"""get_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount_with_http_info(id, max_width, max_height, tag, format, percent_played, unplayed_count, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param int max_width: The maximum image width to return. (required)
:param int max_height: The maximum image height to return. (required)
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers. (required)
:param str format: Determines the output foramt of the image - original,gif,jpg,png (required)
:param float percent_played: Optional percent to render for the percent played overlay (required)
:param int unplayed_count: Optional unplayed count overlay to render (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param bool add_played_indicator: Optional. Add a played indicator
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'max_width', 'max_height', 'tag', 'format', 'percent_played', 'unplayed_count', 'type', 'index', 'width', 'height', 'quality', 'crop_whitespace', 'enable_image_enhancers', 'add_played_indicator', 'background_color', 'foreground_layer'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount`") # noqa: E501
# verify the required parameter 'max_width' is set
if ('max_width' not in params or
params['max_width'] is None):
raise ValueError("Missing the required parameter `max_width` when calling `get_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount`") # noqa: E501
# verify the required parameter 'max_height' is set
if ('max_height' not in params or
params['max_height'] is None):
raise ValueError("Missing the required parameter `max_height` when calling `get_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount`") # noqa: E501
# verify the required parameter 'tag' is set
if ('tag' not in params or
params['tag'] is None):
raise ValueError("Missing the required parameter `tag` when calling `get_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount`") # noqa: E501
# verify the required parameter 'format' is set
if ('format' not in params or
params['format'] is None):
raise ValueError("Missing the required parameter `format` when calling `get_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount`") # noqa: E501
# verify the required parameter 'percent_played' is set
if ('percent_played' not in params or
params['percent_played'] is None):
raise ValueError("Missing the required parameter `percent_played` when calling `get_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount`") # noqa: E501
# verify the required parameter 'unplayed_count' is set
if ('unplayed_count' not in params or
params['unplayed_count'] is None):
raise ValueError("Missing the required parameter `unplayed_count` when calling `get_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `get_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `get_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
if 'max_width' in params:
path_params['MaxWidth'] = params['max_width'] # noqa: E501
if 'max_height' in params:
path_params['MaxHeight'] = params['max_height'] # noqa: E501
if 'tag' in params:
path_params['Tag'] = params['tag'] # noqa: E501
if 'format' in params:
path_params['Format'] = params['format'] # noqa: E501
if 'percent_played' in params:
path_params['PercentPlayed'] = params['percent_played'] # noqa: E501
if 'unplayed_count' in params:
path_params['UnplayedCount'] = params['unplayed_count'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Items/{Id}/Images/{Type}/{Index}/{Tag}/{Format}/{MaxWidth}/{MaxHeight}/{PercentPlayed}/{UnplayedCount}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_musicgenres_by_name_images_by_type(self, name, type, **kwargs): # noqa: E501
"""get_musicgenres_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_musicgenres_by_name_images_by_type(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_musicgenres_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
else:
(data) = self.get_musicgenres_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
return data
def get_musicgenres_by_name_images_by_type_with_http_info(self, name, type, **kwargs): # noqa: E501
"""get_musicgenres_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_musicgenres_by_name_images_by_type_with_http_info(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_musicgenres_by_name_images_by_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_musicgenres_by_name_images_by_type`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `get_musicgenres_by_name_images_by_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
if 'index' in params:
query_params.append(('Index', params['index'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/MusicGenres/{Name}/Images/{Type}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_musicgenres_by_name_images_by_type_by_index(self, name, type, index, **kwargs): # noqa: E501
"""get_musicgenres_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_musicgenres_by_name_images_by_type_by_index(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_musicgenres_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
else:
(data) = self.get_musicgenres_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
return data
def get_musicgenres_by_name_images_by_type_by_index_with_http_info(self, name, type, index, **kwargs): # noqa: E501
"""get_musicgenres_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_musicgenres_by_name_images_by_type_by_index_with_http_info(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'index', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_musicgenres_by_name_images_by_type_by_index" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_musicgenres_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `get_musicgenres_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `get_musicgenres_by_name_images_by_type_by_index`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/MusicGenres/{Name}/Images/{Type}/{Index}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_persons_by_name_images_by_type(self, name, type, **kwargs): # noqa: E501
"""get_persons_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_persons_by_name_images_by_type(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_persons_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
else:
(data) = self.get_persons_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
return data
def get_persons_by_name_images_by_type_with_http_info(self, name, type, **kwargs): # noqa: E501
"""get_persons_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_persons_by_name_images_by_type_with_http_info(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_persons_by_name_images_by_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_persons_by_name_images_by_type`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `get_persons_by_name_images_by_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
if 'index' in params:
query_params.append(('Index', params['index'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Persons/{Name}/Images/{Type}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_persons_by_name_images_by_type_by_index(self, name, type, index, **kwargs): # noqa: E501
"""get_persons_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_persons_by_name_images_by_type_by_index(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_persons_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
else:
(data) = self.get_persons_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
return data
def get_persons_by_name_images_by_type_by_index_with_http_info(self, name, type, index, **kwargs): # noqa: E501
"""get_persons_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_persons_by_name_images_by_type_by_index_with_http_info(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'index', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_persons_by_name_images_by_type_by_index" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_persons_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `get_persons_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `get_persons_by_name_images_by_type_by_index`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Persons/{Name}/Images/{Type}/{Index}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_studios_by_name_images_by_type(self, name, type, **kwargs): # noqa: E501
"""get_studios_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_studios_by_name_images_by_type(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_studios_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
else:
(data) = self.get_studios_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
return data
def get_studios_by_name_images_by_type_with_http_info(self, name, type, **kwargs): # noqa: E501
"""get_studios_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_studios_by_name_images_by_type_with_http_info(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_studios_by_name_images_by_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_studios_by_name_images_by_type`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `get_studios_by_name_images_by_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
if 'index' in params:
query_params.append(('Index', params['index'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Studios/{Name}/Images/{Type}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_studios_by_name_images_by_type_by_index(self, name, type, index, **kwargs): # noqa: E501
"""get_studios_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_studios_by_name_images_by_type_by_index(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_studios_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
else:
(data) = self.get_studios_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
return data
def get_studios_by_name_images_by_type_by_index_with_http_info(self, name, type, index, **kwargs): # noqa: E501
"""get_studios_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_studios_by_name_images_by_type_by_index_with_http_info(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'index', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_studios_by_name_images_by_type_by_index" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_studios_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `get_studios_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `get_studios_by_name_images_by_type_by_index`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Studios/{Name}/Images/{Type}/{Index}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_users_by_id_images_by_type(self, id, type, **kwargs): # noqa: E501
"""get_users_by_id_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_users_by_id_images_by_type(id, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: User Id (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_users_by_id_images_by_type_with_http_info(id, type, **kwargs) # noqa: E501
else:
(data) = self.get_users_by_id_images_by_type_with_http_info(id, type, **kwargs) # noqa: E501
return data
def get_users_by_id_images_by_type_with_http_info(self, id, type, **kwargs): # noqa: E501
"""get_users_by_id_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_users_by_id_images_by_type_with_http_info(id, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: User Id (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'type', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_users_by_id_images_by_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_users_by_id_images_by_type`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `get_users_by_id_images_by_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
if 'index' in params:
query_params.append(('Index', params['index'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}/Images/{Type}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_users_by_id_images_by_type_by_index(self, id, type, index, **kwargs): # noqa: E501
"""get_users_by_id_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_users_by_id_images_by_type_by_index(id, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: User Id (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_users_by_id_images_by_type_by_index_with_http_info(id, type, index, **kwargs) # noqa: E501
else:
(data) = self.get_users_by_id_images_by_type_by_index_with_http_info(id, type, index, **kwargs) # noqa: E501
return data
def get_users_by_id_images_by_type_by_index_with_http_info(self, id, type, index, **kwargs): # noqa: E501
"""get_users_by_id_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_users_by_id_images_by_type_by_index_with_http_info(id, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: User Id (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'type', 'index', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_users_by_id_images_by_type_by_index" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_users_by_id_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `get_users_by_id_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `get_users_by_id_images_by_type_by_index`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}/Images/{Type}/{Index}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def head_artists_by_name_images_by_type(self, name, type, **kwargs): # noqa: E501
"""head_artists_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_artists_by_name_images_by_type(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.head_artists_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
else:
(data) = self.head_artists_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
return data
def head_artists_by_name_images_by_type_with_http_info(self, name, type, **kwargs): # noqa: E501
"""head_artists_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_artists_by_name_images_by_type_with_http_info(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method head_artists_by_name_images_by_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `head_artists_by_name_images_by_type`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `head_artists_by_name_images_by_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
if 'index' in params:
query_params.append(('Index', params['index'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Artists/{Name}/Images/{Type}', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def head_artists_by_name_images_by_type_by_index(self, name, type, index, **kwargs): # noqa: E501
"""head_artists_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_artists_by_name_images_by_type_by_index(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.head_artists_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
else:
(data) = self.head_artists_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
return data
def head_artists_by_name_images_by_type_by_index_with_http_info(self, name, type, index, **kwargs): # noqa: E501
"""head_artists_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_artists_by_name_images_by_type_by_index_with_http_info(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'index', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method head_artists_by_name_images_by_type_by_index" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `head_artists_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `head_artists_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `head_artists_by_name_images_by_type_by_index`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Artists/{Name}/Images/{Type}/{Index}', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def head_gamegenres_by_name_images_by_type(self, name, type, **kwargs): # noqa: E501
"""head_gamegenres_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_gamegenres_by_name_images_by_type(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.head_gamegenres_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
else:
(data) = self.head_gamegenres_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
return data
def head_gamegenres_by_name_images_by_type_with_http_info(self, name, type, **kwargs): # noqa: E501
"""head_gamegenres_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_gamegenres_by_name_images_by_type_with_http_info(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method head_gamegenres_by_name_images_by_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `head_gamegenres_by_name_images_by_type`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `head_gamegenres_by_name_images_by_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
if 'index' in params:
query_params.append(('Index', params['index'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/GameGenres/{Name}/Images/{Type}', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def head_gamegenres_by_name_images_by_type_by_index(self, name, type, index, **kwargs): # noqa: E501
"""head_gamegenres_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_gamegenres_by_name_images_by_type_by_index(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.head_gamegenres_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
else:
(data) = self.head_gamegenres_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
return data
def head_gamegenres_by_name_images_by_type_by_index_with_http_info(self, name, type, index, **kwargs): # noqa: E501
"""head_gamegenres_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_gamegenres_by_name_images_by_type_by_index_with_http_info(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'index', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method head_gamegenres_by_name_images_by_type_by_index" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `head_gamegenres_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `head_gamegenres_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `head_gamegenres_by_name_images_by_type_by_index`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/GameGenres/{Name}/Images/{Type}/{Index}', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def head_genres_by_name_images_by_type(self, name, type, **kwargs): # noqa: E501
"""head_genres_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_genres_by_name_images_by_type(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.head_genres_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
else:
(data) = self.head_genres_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
return data
def head_genres_by_name_images_by_type_with_http_info(self, name, type, **kwargs): # noqa: E501
"""head_genres_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_genres_by_name_images_by_type_with_http_info(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method head_genres_by_name_images_by_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `head_genres_by_name_images_by_type`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `head_genres_by_name_images_by_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
if 'index' in params:
query_params.append(('Index', params['index'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Genres/{Name}/Images/{Type}', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def head_genres_by_name_images_by_type_by_index(self, name, type, index, **kwargs): # noqa: E501
"""head_genres_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_genres_by_name_images_by_type_by_index(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.head_genres_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
else:
(data) = self.head_genres_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
return data
def head_genres_by_name_images_by_type_by_index_with_http_info(self, name, type, index, **kwargs): # noqa: E501
"""head_genres_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_genres_by_name_images_by_type_by_index_with_http_info(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'index', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method head_genres_by_name_images_by_type_by_index" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `head_genres_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `head_genres_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `head_genres_by_name_images_by_type_by_index`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Genres/{Name}/Images/{Type}/{Index}', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def head_items_by_id_images_by_type(self, id, type, **kwargs): # noqa: E501
"""head_items_by_id_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_items_by_id_images_by_type(id, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.head_items_by_id_images_by_type_with_http_info(id, type, **kwargs) # noqa: E501
else:
(data) = self.head_items_by_id_images_by_type_with_http_info(id, type, **kwargs) # noqa: E501
return data
def head_items_by_id_images_by_type_with_http_info(self, id, type, **kwargs): # noqa: E501
"""head_items_by_id_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_items_by_id_images_by_type_with_http_info(id, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'type', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method head_items_by_id_images_by_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `head_items_by_id_images_by_type`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `head_items_by_id_images_by_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
if 'index' in params:
query_params.append(('Index', params['index'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Items/{Id}/Images/{Type}', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def head_items_by_id_images_by_type_by_index(self, id, type, index, **kwargs): # noqa: E501
"""head_items_by_id_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_items_by_id_images_by_type_by_index(id, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.head_items_by_id_images_by_type_by_index_with_http_info(id, type, index, **kwargs) # noqa: E501
else:
(data) = self.head_items_by_id_images_by_type_by_index_with_http_info(id, type, index, **kwargs) # noqa: E501
return data
def head_items_by_id_images_by_type_by_index_with_http_info(self, id, type, index, **kwargs): # noqa: E501
"""head_items_by_id_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_items_by_id_images_by_type_by_index_with_http_info(id, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'type', 'index', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method head_items_by_id_images_by_type_by_index" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `head_items_by_id_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `head_items_by_id_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `head_items_by_id_images_by_type_by_index`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Items/{Id}/Images/{Type}/{Index}', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def head_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount(self, id, max_width, max_height, tag, format, percent_played, unplayed_count, type, index, **kwargs): # noqa: E501
"""head_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount(id, max_width, max_height, tag, format, percent_played, unplayed_count, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param int max_width: The maximum image width to return. (required)
:param int max_height: The maximum image height to return. (required)
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers. (required)
:param str format: Determines the output foramt of the image - original,gif,jpg,png (required)
:param float percent_played: Optional percent to render for the percent played overlay (required)
:param int unplayed_count: Optional unplayed count overlay to render (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param bool add_played_indicator: Optional. Add a played indicator
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.head_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount_with_http_info(id, max_width, max_height, tag, format, percent_played, unplayed_count, type, index, **kwargs) # noqa: E501
else:
(data) = self.head_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount_with_http_info(id, max_width, max_height, tag, format, percent_played, unplayed_count, type, index, **kwargs) # noqa: E501
return data
def head_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount_with_http_info(self, id, max_width, max_height, tag, format, percent_played, unplayed_count, type, index, **kwargs): # noqa: E501
"""head_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount_with_http_info(id, max_width, max_height, tag, format, percent_played, unplayed_count, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param int max_width: The maximum image width to return. (required)
:param int max_height: The maximum image height to return. (required)
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers. (required)
:param str format: Determines the output foramt of the image - original,gif,jpg,png (required)
:param float percent_played: Optional percent to render for the percent played overlay (required)
:param int unplayed_count: Optional unplayed count overlay to render (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param bool add_played_indicator: Optional. Add a played indicator
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'max_width', 'max_height', 'tag', 'format', 'percent_played', 'unplayed_count', 'type', 'index', 'width', 'height', 'quality', 'crop_whitespace', 'enable_image_enhancers', 'add_played_indicator', 'background_color', 'foreground_layer'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method head_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `head_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount`") # noqa: E501
# verify the required parameter 'max_width' is set
if ('max_width' not in params or
params['max_width'] is None):
raise ValueError("Missing the required parameter `max_width` when calling `head_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount`") # noqa: E501
# verify the required parameter 'max_height' is set
if ('max_height' not in params or
params['max_height'] is None):
raise ValueError("Missing the required parameter `max_height` when calling `head_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount`") # noqa: E501
# verify the required parameter 'tag' is set
if ('tag' not in params or
params['tag'] is None):
raise ValueError("Missing the required parameter `tag` when calling `head_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount`") # noqa: E501
# verify the required parameter 'format' is set
if ('format' not in params or
params['format'] is None):
raise ValueError("Missing the required parameter `format` when calling `head_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount`") # noqa: E501
# verify the required parameter 'percent_played' is set
if ('percent_played' not in params or
params['percent_played'] is None):
raise ValueError("Missing the required parameter `percent_played` when calling `head_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount`") # noqa: E501
# verify the required parameter 'unplayed_count' is set
if ('unplayed_count' not in params or
params['unplayed_count'] is None):
raise ValueError("Missing the required parameter `unplayed_count` when calling `head_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `head_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `head_items_by_id_images_by_type_by_index_by_tag_by_format_by_maxwidth_by_maxheight_by_percentplayed_by_unplayedcount`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
if 'max_width' in params:
path_params['MaxWidth'] = params['max_width'] # noqa: E501
if 'max_height' in params:
path_params['MaxHeight'] = params['max_height'] # noqa: E501
if 'tag' in params:
path_params['Tag'] = params['tag'] # noqa: E501
if 'format' in params:
path_params['Format'] = params['format'] # noqa: E501
if 'percent_played' in params:
path_params['PercentPlayed'] = params['percent_played'] # noqa: E501
if 'unplayed_count' in params:
path_params['UnplayedCount'] = params['unplayed_count'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Items/{Id}/Images/{Type}/{Index}/{Tag}/{Format}/{MaxWidth}/{MaxHeight}/{PercentPlayed}/{UnplayedCount}', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def head_musicgenres_by_name_images_by_type(self, name, type, **kwargs): # noqa: E501
"""head_musicgenres_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_musicgenres_by_name_images_by_type(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.head_musicgenres_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
else:
(data) = self.head_musicgenres_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
return data
def head_musicgenres_by_name_images_by_type_with_http_info(self, name, type, **kwargs): # noqa: E501
"""head_musicgenres_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_musicgenres_by_name_images_by_type_with_http_info(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method head_musicgenres_by_name_images_by_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `head_musicgenres_by_name_images_by_type`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `head_musicgenres_by_name_images_by_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
if 'index' in params:
query_params.append(('Index', params['index'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/MusicGenres/{Name}/Images/{Type}', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def head_musicgenres_by_name_images_by_type_by_index(self, name, type, index, **kwargs): # noqa: E501
"""head_musicgenres_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_musicgenres_by_name_images_by_type_by_index(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.head_musicgenres_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
else:
(data) = self.head_musicgenres_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
return data
def head_musicgenres_by_name_images_by_type_by_index_with_http_info(self, name, type, index, **kwargs): # noqa: E501
"""head_musicgenres_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_musicgenres_by_name_images_by_type_by_index_with_http_info(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'index', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method head_musicgenres_by_name_images_by_type_by_index" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `head_musicgenres_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `head_musicgenres_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `head_musicgenres_by_name_images_by_type_by_index`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/MusicGenres/{Name}/Images/{Type}/{Index}', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def head_persons_by_name_images_by_type(self, name, type, **kwargs): # noqa: E501
"""head_persons_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_persons_by_name_images_by_type(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.head_persons_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
else:
(data) = self.head_persons_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
return data
def head_persons_by_name_images_by_type_with_http_info(self, name, type, **kwargs): # noqa: E501
"""head_persons_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_persons_by_name_images_by_type_with_http_info(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method head_persons_by_name_images_by_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `head_persons_by_name_images_by_type`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `head_persons_by_name_images_by_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
if 'index' in params:
query_params.append(('Index', params['index'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Persons/{Name}/Images/{Type}', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def head_persons_by_name_images_by_type_by_index(self, name, type, index, **kwargs): # noqa: E501
"""head_persons_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_persons_by_name_images_by_type_by_index(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.head_persons_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
else:
(data) = self.head_persons_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
return data
def head_persons_by_name_images_by_type_by_index_with_http_info(self, name, type, index, **kwargs): # noqa: E501
"""head_persons_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_persons_by_name_images_by_type_by_index_with_http_info(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'index', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method head_persons_by_name_images_by_type_by_index" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `head_persons_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `head_persons_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `head_persons_by_name_images_by_type_by_index`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Persons/{Name}/Images/{Type}/{Index}', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def head_studios_by_name_images_by_type(self, name, type, **kwargs): # noqa: E501
"""head_studios_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_studios_by_name_images_by_type(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.head_studios_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
else:
(data) = self.head_studios_by_name_images_by_type_with_http_info(name, type, **kwargs) # noqa: E501
return data
def head_studios_by_name_images_by_type_with_http_info(self, name, type, **kwargs): # noqa: E501
"""head_studios_by_name_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_studios_by_name_images_by_type_with_http_info(name, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method head_studios_by_name_images_by_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `head_studios_by_name_images_by_type`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `head_studios_by_name_images_by_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
if 'index' in params:
query_params.append(('Index', params['index'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Studios/{Name}/Images/{Type}', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def head_studios_by_name_images_by_type_by_index(self, name, type, index, **kwargs): # noqa: E501
"""head_studios_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_studios_by_name_images_by_type_by_index(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.head_studios_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
else:
(data) = self.head_studios_by_name_images_by_type_by_index_with_http_info(name, type, index, **kwargs) # noqa: E501
return data
def head_studios_by_name_images_by_type_by_index_with_http_info(self, name, type, index, **kwargs): # noqa: E501
"""head_studios_by_name_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_studios_by_name_images_by_type_by_index_with_http_info(name, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: Item name (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'index', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method head_studios_by_name_images_by_type_by_index" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `head_studios_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `head_studios_by_name_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `head_studios_by_name_images_by_type_by_index`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name' in params:
path_params['Name'] = params['name'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Studios/{Name}/Images/{Type}/{Index}', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def head_users_by_id_images_by_type(self, id, type, **kwargs): # noqa: E501
"""head_users_by_id_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_users_by_id_images_by_type(id, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: User Id (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.head_users_by_id_images_by_type_with_http_info(id, type, **kwargs) # noqa: E501
else:
(data) = self.head_users_by_id_images_by_type_with_http_info(id, type, **kwargs) # noqa: E501
return data
def head_users_by_id_images_by_type_with_http_info(self, id, type, **kwargs): # noqa: E501
"""head_users_by_id_images_by_type # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_users_by_id_images_by_type_with_http_info(id, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: User Id (required)
:param str type: Image Type (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'type', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method head_users_by_id_images_by_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `head_users_by_id_images_by_type`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `head_users_by_id_images_by_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
if 'index' in params:
query_params.append(('Index', params['index'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}/Images/{Type}', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def head_users_by_id_images_by_type_by_index(self, id, type, index, **kwargs): # noqa: E501
"""head_users_by_id_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_users_by_id_images_by_type_by_index(id, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: User Id (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.head_users_by_id_images_by_type_by_index_with_http_info(id, type, index, **kwargs) # noqa: E501
else:
(data) = self.head_users_by_id_images_by_type_by_index_with_http_info(id, type, index, **kwargs) # noqa: E501
return data
def head_users_by_id_images_by_type_by_index_with_http_info(self, id, type, index, **kwargs): # noqa: E501
"""head_users_by_id_images_by_type_by_index # noqa: E501
No authentication required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.head_users_by_id_images_by_type_by_index_with_http_info(id, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: User Id (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int max_width: The maximum image width to return.
:param int max_height: The maximum image height to return.
:param int width: The fixed image width to return.
:param int height: The fixed image height to return.
:param int quality: Optional quality setting, from 0-100. Defaults to 90 and should suffice in most cases.
:param str tag: Optional. Supply the cache tag from the item object to receive strong caching headers.
:param bool crop_whitespace: Specify if whitespace should be cropped out of the image. True/False. If unspecified, whitespace will be cropped from logos and clear art.
:param bool enable_image_enhancers: Enable or disable image enhancers such as cover art.
:param str format: Determines the output foramt of the image - original,gif,jpg,png
:param bool add_played_indicator: Optional. Add a played indicator
:param float percent_played: Optional percent to render for the percent played overlay
:param int unplayed_count: Optional unplayed count overlay to render
:param str background_color: Optional. Apply a background color for transparent images.
:param str foreground_layer: Optional. Apply a foreground layer on top of the image.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'type', 'index', 'max_width', 'max_height', 'width', 'height', 'quality', 'tag', 'crop_whitespace', 'enable_image_enhancers', 'format', 'add_played_indicator', 'percent_played', 'unplayed_count', 'background_color', 'foreground_layer'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method head_users_by_id_images_by_type_by_index" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `head_users_by_id_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `head_users_by_id_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `head_users_by_id_images_by_type_by_index`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
if 'max_width' in params:
query_params.append(('MaxWidth', params['max_width'])) # noqa: E501
if 'max_height' in params:
query_params.append(('MaxHeight', params['max_height'])) # noqa: E501
if 'width' in params:
query_params.append(('Width', params['width'])) # noqa: E501
if 'height' in params:
query_params.append(('Height', params['height'])) # noqa: E501
if 'quality' in params:
query_params.append(('Quality', params['quality'])) # noqa: E501
if 'tag' in params:
query_params.append(('Tag', params['tag'])) # noqa: E501
if 'crop_whitespace' in params:
query_params.append(('CropWhitespace', params['crop_whitespace'])) # noqa: E501
if 'enable_image_enhancers' in params:
query_params.append(('EnableImageEnhancers', params['enable_image_enhancers'])) # noqa: E501
if 'format' in params:
query_params.append(('Format', params['format'])) # noqa: E501
if 'add_played_indicator' in params:
query_params.append(('AddPlayedIndicator', params['add_played_indicator'])) # noqa: E501
if 'percent_played' in params:
query_params.append(('PercentPlayed', params['percent_played'])) # noqa: E501
if 'unplayed_count' in params:
query_params.append(('UnplayedCount', params['unplayed_count'])) # noqa: E501
if 'background_color' in params:
query_params.append(('BackgroundColor', params['background_color'])) # noqa: E501
if 'foreground_layer' in params:
query_params.append(('ForegroundLayer', params['foreground_layer'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}/Images/{Type}/{Index}', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_items_by_id_images_by_type(self, body, id, type, **kwargs): # noqa: E501
"""post_items_by_id_images_by_type # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_items_by_id_images_by_type(body, id, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Object body: Binary stream (required)
:param str id: Item Id (required)
:param str type: Image Type (required)
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_items_by_id_images_by_type_with_http_info(body, id, type, **kwargs) # noqa: E501
else:
(data) = self.post_items_by_id_images_by_type_with_http_info(body, id, type, **kwargs) # noqa: E501
return data
def post_items_by_id_images_by_type_with_http_info(self, body, id, type, **kwargs): # noqa: E501
"""post_items_by_id_images_by_type # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_items_by_id_images_by_type_with_http_info(body, id, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Object body: Binary stream (required)
:param str id: Item Id (required)
:param str type: Image Type (required)
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'id', 'type', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_items_by_id_images_by_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_items_by_id_images_by_type`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `post_items_by_id_images_by_type`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `post_items_by_id_images_by_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
query_params = []
if 'index' in params:
query_params.append(('Index', params['index'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/octet-stream']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Items/{Id}/Images/{Type}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_items_by_id_images_by_type_by_index(self, body, id, type, index, **kwargs): # noqa: E501
"""post_items_by_id_images_by_type_by_index # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_items_by_id_images_by_type_by_index(body, id, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Object body: Binary stream (required)
:param str id: Item Id (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_items_by_id_images_by_type_by_index_with_http_info(body, id, type, index, **kwargs) # noqa: E501
else:
(data) = self.post_items_by_id_images_by_type_by_index_with_http_info(body, id, type, index, **kwargs) # noqa: E501
return data
def post_items_by_id_images_by_type_by_index_with_http_info(self, body, id, type, index, **kwargs): # noqa: E501
"""post_items_by_id_images_by_type_by_index # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_items_by_id_images_by_type_by_index_with_http_info(body, id, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Object body: Binary stream (required)
:param str id: Item Id (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'id', 'type', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_items_by_id_images_by_type_by_index" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_items_by_id_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `post_items_by_id_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `post_items_by_id_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `post_items_by_id_images_by_type_by_index`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/octet-stream']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Items/{Id}/Images/{Type}/{Index}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_items_by_id_images_by_type_by_index_index(self, id, type, index, new_index, **kwargs): # noqa: E501
"""Updates the index for an item image # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_items_by_id_images_by_type_by_index_index(id, type, index, new_index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int new_index: The new image index (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_items_by_id_images_by_type_by_index_index_with_http_info(id, type, index, new_index, **kwargs) # noqa: E501
else:
(data) = self.post_items_by_id_images_by_type_by_index_index_with_http_info(id, type, index, new_index, **kwargs) # noqa: E501
return data
def post_items_by_id_images_by_type_by_index_index_with_http_info(self, id, type, index, new_index, **kwargs): # noqa: E501
"""Updates the index for an item image # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_items_by_id_images_by_type_by_index_index_with_http_info(id, type, index, new_index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Item Id (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:param int new_index: The new image index (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'type', 'index', 'new_index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_items_by_id_images_by_type_by_index_index" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `post_items_by_id_images_by_type_by_index_index`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `post_items_by_id_images_by_type_by_index_index`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `post_items_by_id_images_by_type_by_index_index`") # noqa: E501
# verify the required parameter 'new_index' is set
if ('new_index' not in params or
params['new_index'] is None):
raise ValueError("Missing the required parameter `new_index` when calling `post_items_by_id_images_by_type_by_index_index`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
if 'new_index' in params:
query_params.append(('NewIndex', params['new_index'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Items/{Id}/Images/{Type}/{Index}/Index', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_by_id_images_by_type(self, body, id, type, **kwargs): # noqa: E501
"""post_users_by_id_images_by_type # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_images_by_type(body, id, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Object body: Binary stream (required)
:param str id: User Id (required)
:param str type: Image Type (required)
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_by_id_images_by_type_with_http_info(body, id, type, **kwargs) # noqa: E501
else:
(data) = self.post_users_by_id_images_by_type_with_http_info(body, id, type, **kwargs) # noqa: E501
return data
def post_users_by_id_images_by_type_with_http_info(self, body, id, type, **kwargs): # noqa: E501
"""post_users_by_id_images_by_type # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_images_by_type_with_http_info(body, id, type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Object body: Binary stream (required)
:param str id: User Id (required)
:param str type: Image Type (required)
:param int index: Image Index
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'id', 'type', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_by_id_images_by_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_users_by_id_images_by_type`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `post_users_by_id_images_by_type`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `post_users_by_id_images_by_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
query_params = []
if 'index' in params:
query_params.append(('Index', params['index'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/octet-stream']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}/Images/{Type}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_users_by_id_images_by_type_by_index(self, body, id, type, index, **kwargs): # noqa: E501
"""post_users_by_id_images_by_type_by_index # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_images_by_type_by_index(body, id, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Object body: Binary stream (required)
:param str id: User Id (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_users_by_id_images_by_type_by_index_with_http_info(body, id, type, index, **kwargs) # noqa: E501
else:
(data) = self.post_users_by_id_images_by_type_by_index_with_http_info(body, id, type, index, **kwargs) # noqa: E501
return data
def post_users_by_id_images_by_type_by_index_with_http_info(self, body, id, type, index, **kwargs): # noqa: E501
"""post_users_by_id_images_by_type_by_index # noqa: E501
Requires authentication as user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_users_by_id_images_by_type_by_index_with_http_info(body, id, type, index, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Object body: Binary stream (required)
:param str id: User Id (required)
:param str type: Image Type (required)
:param int index: Image Index (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'id', 'type', 'index'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_users_by_id_images_by_type_by_index" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_users_by_id_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `post_users_by_id_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'type' is set
if ('type' not in params or
params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `post_users_by_id_images_by_type_by_index`") # noqa: E501
# verify the required parameter 'index' is set
if ('index' not in params or
params['index'] is None):
raise ValueError("Missing the required parameter `index` when calling `post_users_by_id_images_by_type_by_index`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
if 'type' in params:
path_params['Type'] = params['type'] # noqa: E501
if 'index' in params:
path_params['Index'] = params['index'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/octet-stream']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Users/{Id}/Images/{Type}/{Index}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 54.121959 | 279 | 0.651228 | 45,772 | 360,344 | 4.902823 | 0.005506 | 0.043777 | 0.040074 | 0.041063 | 0.997652 | 0.997478 | 0.997237 | 0.996899 | 0.996805 | 0.996515 | 0 | 0.015411 | 0.261353 | 360,344 | 6,657 | 280 | 54.130089 | 0.827713 | 0.416624 | 0 | 0.874237 | 1 | 0.000555 | 0.270208 | 0.079338 | 0 | 0 | 0 | 0 | 0 | 1 | 0.024709 | false | 0 | 0.00111 | 0 | 0.062743 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
7dc81f4fd866125b39e8c8eb10c7d31bcf83f927 | 239 | py | Python | nmigen/compat/genlib/fsm.py | psumesh/nmigen | 7d611b8fc1d9e58853ff268ec38ff8f4131a9774 | [
"BSD-2-Clause"
] | 528 | 2020-01-28T18:21:00.000Z | 2021-12-09T06:27:51.000Z | nmigen/compat/genlib/fsm.py | DX-MON/nmigen | a6a13dd612ee1c9215719c70a5aa410a8775ffdb | [
"BSD-2-Clause"
] | 360 | 2020-01-28T18:34:30.000Z | 2021-12-10T08:03:32.000Z | nmigen/compat/genlib/fsm.py | DX-MON/nmigen | a6a13dd612ee1c9215719c70a5aa410a8775ffdb | [
"BSD-2-Clause"
] | 100 | 2020-02-06T21:55:46.000Z | 2021-11-25T19:20:44.000Z | from amaranth.compat.genlib.fsm import *
from amaranth.compat.genlib.fsm import __all__
import warnings
warnings.warn("instead of nmigen.compat.genlib.fsm, use amaranth.compat.genlib.fsm",
DeprecationWarning, stacklevel=2)
| 29.875 | 84 | 0.769874 | 31 | 239 | 5.806452 | 0.516129 | 0.266667 | 0.333333 | 0.383333 | 0.366667 | 0.366667 | 0 | 0 | 0 | 0 | 0 | 0.004854 | 0.138075 | 239 | 7 | 85 | 34.142857 | 0.868932 | 0 | 0 | 0 | 0 | 0 | 0.280335 | 0.213389 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.6 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
7dcda2740360d1e631250629f85973dc81e41783 | 9,664 | py | Python | tests/test_parenthesize.py | sk-/craftier | c35c6c18b43c28cf7b0c457d00e94e0abeb47f15 | [
"MIT"
] | 3 | 2021-01-11T11:25:30.000Z | 2021-04-16T07:30:34.000Z | tests/test_parenthesize.py | sk-/craftier | c35c6c18b43c28cf7b0c457d00e94e0abeb47f15 | [
"MIT"
] | null | null | null | tests/test_parenthesize.py | sk-/craftier | c35c6c18b43c28cf7b0c457d00e94e0abeb47f15 | [
"MIT"
] | 1 | 2021-05-27T18:46:27.000Z | 2021-05-27T18:46:27.000Z | import itertools
import unittest
import libcst
import parameterized
from craftier import parenthesize
EXAMPLES = (
[
libcst.ensure_type(
libcst.parse_statement(
"if a := 1:\n pass",
config=libcst.PartialParserConfig(python_version="3.8"),
),
libcst.If,
).test,
],
[
libcst.parse_expression("lambda x: x + 1"),
],
[
libcst.parse_expression("x if x else y"),
],
[
libcst.parse_expression("x or y"),
],
[
libcst.parse_expression("x and y"),
],
[
libcst.parse_expression("not x"),
],
[
libcst.parse_expression("x in y"),
libcst.parse_expression("x not in y"),
libcst.parse_expression("x is y"),
libcst.parse_expression("x is not y"),
libcst.parse_expression("x < y"),
libcst.parse_expression("x <= y"),
libcst.parse_expression("x > y"),
libcst.parse_expression("x >= y"),
libcst.parse_expression("x != y"),
libcst.parse_expression("x == y"),
],
[
libcst.parse_expression("x | y"),
libcst.parse_expression("x ^ y"),
libcst.parse_expression("x & y"),
],
[
libcst.parse_expression("x << y"),
libcst.parse_expression("x >> y"),
],
[
libcst.parse_expression("x + y"),
libcst.parse_expression("x - y"),
],
[
libcst.parse_expression("x * y"),
libcst.parse_expression("x @ y"),
libcst.parse_expression("x / y"),
libcst.parse_expression("x // y"),
libcst.parse_expression("x % y"),
],
[
libcst.parse_expression("+x"),
libcst.parse_expression("-x"),
libcst.parse_expression("~x"),
],
[
libcst.parse_expression("x ** y"),
],
[
libcst.parse_expression("await x"),
],
[
libcst.parse_expression("x[a]"),
libcst.parse_expression("x[a:b]"),
libcst.parse_expression("x[a:b:c]"),
libcst.parse_expression("x(a, b, c)"),
libcst.parse_expression("x.a"),
],
[
libcst.parse_expression("(x, y)"),
libcst.parse_expression("[x, y]"),
libcst.parse_expression("[x for x in y]"),
libcst.parse_expression("{x: 1 for x in y}"),
libcst.parse_expression("{x, y}"),
libcst.parse_expression("{x for x in y}"),
],
)
SAME_PRECEDENCE = list(
itertools.chain.from_iterable(
[itertools.product(exps, repeat=2) for exps in EXAMPLES]
)
)
LOWER_PRECEDENCE = list(
itertools.chain.from_iterable(
[
itertools.product(exps, exps_lower)
for exps, exps_lower in zip(EXAMPLES[1:], EXAMPLES)
]
)
)
HIGHER_PRECEDENCE = list(
itertools.chain.from_iterable(
[
itertools.product(exps, exps_higher)
for exps, exps_higher in zip(EXAMPLES, EXAMPLES[1:])
]
)
)
class ParenthesizeTestCase(unittest.TestCase):
"""Mixin for adding parentheses check assertions."""
def assert_has_parentheses(self, node: libcst.CSTNode) -> None:
"""Check the node is properly parenthesized."""
if not getattr(node, "lpar"):
self.fail(f"Node {type(node).__name__} is not parenthesized")
class ParenthesizeUsingParentTest(ParenthesizeTestCase):
def test_expression_already_parenthesized(self) -> None:
node = libcst.parse_expression("(a + b)")
new_node = parenthesize.parenthesize_using_parent(
node, libcst.parse_expression("a * (a + b)")
)
self.assertIs(new_node, node)
def test_not_parenthesizable(self) -> None:
node = libcst.parse_statement("return foo")
new_node = parenthesize.parenthesize_using_parent(
node, libcst.parse_expression("a * (a + b)")
)
self.assertIs(new_node, node)
def test_tuple_requires_paren(self) -> None:
node = libcst.parse_expression("1, 2, 3")
new_node = parenthesize.parenthesize_using_parent(
node, libcst.Call(func=libcst.Name("func"))
)
self.assert_has_parentheses(new_node)
def test_tuple_return(self) -> None:
node = libcst.parse_expression("1, 2, 3")
new_node = parenthesize.parenthesize_using_parent(node, libcst.Return())
self.assertIs(new_node, node)
def test_generator_only_argument_function_call(self) -> None:
node = libcst.parse_expression("(x for x in foo)").with_changes(
lpar=[], rpar=[]
)
new_node = parenthesize.parenthesize_using_parent(
node, libcst.parse_expression("max(x for x in foo)")
)
self.assertIs(new_node, node)
def test_generator_many_argument_function_call(self) -> None:
node = libcst.parse_expression("(x for x in foo)").with_changes(
lpar=[], rpar=[]
)
new_node = parenthesize.parenthesize_using_parent(
node, libcst.parse_expression("max((x for x in foo), foo)")
)
self.assert_has_parentheses(new_node)
def test_generator_return(self) -> None:
node = libcst.parse_expression("(x for x in foo)").with_changes(
lpar=[], rpar=[]
)
new_node = parenthesize.parenthesize_using_parent(
node, libcst.parse_statement("return (x for x in foo)")
)
self.assert_has_parentheses(new_node)
@parameterized.parameterized.expand(HIGHER_PRECEDENCE)
def test_expression_higher_precedence(
self, node: libcst.CSTNode, parent: libcst.CSTNode
) -> None:
new_node = parenthesize.parenthesize_using_parent(node, parent)
self.assert_has_parentheses(new_node)
@parameterized.parameterized.expand(LOWER_PRECEDENCE)
def test_expression_lower_precedence(
self, node: libcst.CSTNode, parent: libcst.CSTNode
) -> None:
new_node = parenthesize.parenthesize_using_parent(node, parent)
self.assertIs(new_node, node)
@parameterized.parameterized.expand(LOWER_PRECEDENCE)
def test_expression_same_precedence(
self, node: libcst.CSTNode, parent: libcst.CSTNode
) -> None:
new_node = parenthesize.parenthesize_using_parent(node, parent)
self.assertIs(new_node, node)
class ParenthesizeUsingPreviousTest(ParenthesizeTestCase):
def test_expression_previous_parenthesized(self) -> None:
node = libcst.parse_expression("a + b")
new_node = parenthesize.parenthesize_using_previous(
node, libcst.parse_expression("(a + b)")
)
self.assert_has_parentheses(new_node)
def test_expression_already_parenthesized(self) -> None:
node = libcst.parse_expression("(a + b)")
new_node = parenthesize.parenthesize_using_previous(
node, libcst.parse_expression("a * (a + b)")
)
self.assertIs(new_node, node)
def test_not_parenthesizable(self) -> None:
node = libcst.parse_statement("return foo")
new_node = parenthesize.parenthesize_using_previous(
node, libcst.parse_expression("a * (a + b)")
)
self.assertIs(new_node, node)
def test_tuple_requires_paren(self) -> None:
node = libcst.parse_expression("1, 2, 3")
new_node = parenthesize.parenthesize_using_previous(
node, libcst.Call(func=libcst.Name("func"))
)
self.assert_has_parentheses(new_node)
def test_tuple_return(self) -> None:
node = libcst.parse_expression("1, 2, 3")
new_node = parenthesize.parenthesize_using_previous(
node, libcst.Return()
)
self.assert_has_parentheses(new_node)
def test_generator_only_argument_function_call(self) -> None:
node = libcst.parse_expression("(x for x in foo)").with_changes(
lpar=[], rpar=[]
)
new_node = parenthesize.parenthesize_using_previous(
node, libcst.parse_expression("max(x for x in foo)")
)
self.assert_has_parentheses(new_node)
def test_generator_many_argument_function_call(self) -> None:
node = libcst.parse_expression("(x for x in foo)").with_changes(
lpar=[], rpar=[]
)
new_node = parenthesize.parenthesize_using_previous(
node, libcst.parse_expression("max((x for x in foo), foo)")
)
self.assert_has_parentheses(new_node)
def test_generator_return(self) -> None:
node = libcst.parse_expression("(x for x in foo)").with_changes(
lpar=[], rpar=[]
)
new_node = parenthesize.parenthesize_using_previous(
node, libcst.parse_statement("return (x for x in foo)")
)
self.assert_has_parentheses(new_node)
@parameterized.parameterized.expand(HIGHER_PRECEDENCE)
def test_expression_higher_precedence(
self, node: libcst.CSTNode, parent: libcst.CSTNode
) -> None:
new_node = parenthesize.parenthesize_using_previous(node, parent)
self.assert_has_parentheses(new_node)
@parameterized.parameterized.expand(LOWER_PRECEDENCE)
def test_expression_lower_precedence(
self, node: libcst.CSTNode, parent: libcst.CSTNode
) -> None:
new_node = parenthesize.parenthesize_using_previous(node, parent)
self.assertIs(new_node, node)
@parameterized.parameterized.expand(LOWER_PRECEDENCE)
def test_expression_same_precedence(
self, node: libcst.CSTNode, parent: libcst.CSTNode
) -> None:
new_node = parenthesize.parenthesize_using_previous(node, parent)
self.assertIs(new_node, node)
| 33.672474 | 80 | 0.628311 | 1,099 | 9,664 | 5.278435 | 0.090992 | 0.132736 | 0.235304 | 0.174453 | 0.862265 | 0.850371 | 0.84089 | 0.836408 | 0.816583 | 0.793829 | 0 | 0.002772 | 0.253518 | 9,664 | 286 | 81 | 33.79021 | 0.801358 | 0.009106 | 0 | 0.515873 | 0 | 0 | 0.075065 | 0.002196 | 0 | 0 | 0 | 0 | 0.087302 | 1 | 0.087302 | false | 0.003968 | 0.019841 | 0 | 0.119048 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
7dd2b079b4e1da87786800f1ac4a68cdfb4227e7 | 55,864 | py | Python | octopus_deploy_swagger_client/octopus_deploy_client/variables_api.py | cvent/octopus-deploy-api-client | 0e03e842e1beb29b132776aee077df570b88366a | [
"Apache-2.0"
] | null | null | null | octopus_deploy_swagger_client/octopus_deploy_client/variables_api.py | cvent/octopus-deploy-api-client | 0e03e842e1beb29b132776aee077df570b88366a | [
"Apache-2.0"
] | null | null | null | octopus_deploy_swagger_client/octopus_deploy_client/variables_api.py | cvent/octopus-deploy-api-client | 0e03e842e1beb29b132776aee077df570b88366a | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Octopus Server API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 2019.6.7+Branch.tags-2019.6.7.Sha.aa18dc6809953218c66f57eff7d26481d9b23d6a
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from octopus_deploy_swagger_client.api_client import ApiClient
class VariablesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def custom_action_response_descriptor_octopus_server_web_api_actions_check_for_non_printable_chars_action(self, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_check_for_non_printable_chars_action # noqa: E501
Get list of non-printable variable characters. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_check_for_non_printable_chars_action(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[int]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_check_for_non_printable_chars_action_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_check_for_non_printable_chars_action_with_http_info(**kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_check_for_non_printable_chars_action_with_http_info(self, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_check_for_non_printable_chars_action # noqa: E501
Get list of non-printable variable characters. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_check_for_non_printable_chars_action_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[int]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_check_for_non_printable_chars_action" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/variables/nonPrintableChars', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[int]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_check_for_non_printable_chars_action_spaces(self, base_space_id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_check_for_non_printable_chars_action_spaces # noqa: E501
Get list of non-printable variable characters. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_check_for_non_printable_chars_action_spaces(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:return: list[int]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_check_for_non_printable_chars_action_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_check_for_non_printable_chars_action_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_check_for_non_printable_chars_action_spaces_with_http_info(self, base_space_id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_check_for_non_printable_chars_action_spaces # noqa: E501
Get list of non-printable variable characters. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_check_for_non_printable_chars_action_spaces_with_http_info(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:return: list[int]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_check_for_non_printable_chars_action_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_check_for_non_printable_chars_action_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/variables/nonPrintableChars', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[int]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_variable_names_list_action(self, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_variable_names_list_action # noqa: E501
List the names of variables that can be used in deployment actions. If a project is specified, this will include variables in that project. If a project environments filter is specified, project variables only scoped to the environment will be excluded. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_variable_names_list_action(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_variable_names_list_action_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_variable_names_list_action_with_http_info(**kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_variable_names_list_action_with_http_info(self, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_variable_names_list_action # noqa: E501
List the names of variables that can be used in deployment actions. If a project is specified, this will include variables in that project. If a project environments filter is specified, project variables only scoped to the environment will be excluded. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_variable_names_list_action_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_variable_names_list_action" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/variables/names', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_variable_names_list_action_spaces(self, base_space_id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_variable_names_list_action_spaces # noqa: E501
List the names of variables that can be used in deployment actions. If a project is specified, this will include variables in that project. If a project environments filter is specified, project variables only scoped to the environment will be excluded. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_variable_names_list_action_spaces(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_variable_names_list_action_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_variable_names_list_action_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_variable_names_list_action_spaces_with_http_info(self, base_space_id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_variable_names_list_action_spaces # noqa: E501
List the names of variables that can be used in deployment actions. If a project is specified, this will include variables in that project. If a project environments filter is specified, project variables only scoped to the environment will be excluded. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_variable_names_list_action_spaces_with_http_info(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_variable_names_list_action_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_variable_names_list_action_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/variables/names', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_variable_preview_list_action(self, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_variable_preview_list_action # noqa: E501
List the evaluated variables for a deployment. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_variable_preview_list_action(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: VariableSetResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_variable_preview_list_action_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_variable_preview_list_action_with_http_info(**kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_variable_preview_list_action_with_http_info(self, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_variable_preview_list_action # noqa: E501
List the evaluated variables for a deployment. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_variable_preview_list_action_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: VariableSetResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_variable_preview_list_action" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/variables/preview', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VariableSetResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_variable_preview_list_action_spaces(self, base_space_id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_variable_preview_list_action_spaces # noqa: E501
List the evaluated variables for a deployment. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_variable_preview_list_action_spaces(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:return: VariableSetResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_variable_preview_list_action_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_variable_preview_list_action_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_variable_preview_list_action_spaces_with_http_info(self, base_space_id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_variable_preview_list_action_spaces # noqa: E501
List the evaluated variables for a deployment. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_variable_preview_list_action_spaces_with_http_info(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:return: VariableSetResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_variable_preview_list_action_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_variable_preview_list_action_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/variables/preview', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VariableSetResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_variable_set_update_action(self, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_variable_set_update_action # noqa: E501
Updates a variable set. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_variable_set_update_action(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the resource (required)
:return: VariableSetResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_variable_set_update_action_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_variable_set_update_action_with_http_info(id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_variable_set_update_action_with_http_info(self, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_variable_set_update_action # noqa: E501
Updates a variable set. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_variable_set_update_action_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the resource (required)
:return: VariableSetResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'library_variable_set_resource'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_variable_set_update_action" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_variable_set_update_action`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'library_variable_set_resource' in params:
body_params = params['library_variable_set_resource']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/variables/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VariableSetResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def custom_action_response_descriptor_octopus_server_web_api_actions_variable_set_update_action_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_variable_set_update_action_spaces # noqa: E501
Updates a variable set. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_variable_set_update_action_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the resource (required)
:return: VariableSetResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.custom_action_response_descriptor_octopus_server_web_api_actions_variable_set_update_action_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.custom_action_response_descriptor_octopus_server_web_api_actions_variable_set_update_action_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def custom_action_response_descriptor_octopus_server_web_api_actions_variable_set_update_action_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""custom_action_response_descriptor_octopus_server_web_api_actions_variable_set_update_action_spaces # noqa: E501
Updates a variable set. NOTE: This definition is not complete. We will be adding more detail in future releases of Octopus. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_action_response_descriptor_octopus_server_web_api_actions_variable_set_update_action_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the resource (required)
:return: VariableSetResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id', 'library_variable_set_resource'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method custom_action_response_descriptor_octopus_server_web_api_actions_variable_set_update_action_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_variable_set_update_action_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `custom_action_response_descriptor_octopus_server_web_api_actions_variable_set_update_action_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'library_variable_set_resource' in params:
body_params = params['library_variable_set_resource']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/variables/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VariableSetResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_all_response_descriptor_variables_variable_set_variable_set_resource(self, **kwargs): # noqa: E501
"""Get a list of VariableSetResources # noqa: E501
Lists all the variable sets in the supplied Octopus Deploy Space. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_all_response_descriptor_variables_variable_set_variable_set_resource(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[VariableSetResource]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_all_response_descriptor_variables_variable_set_variable_set_resource_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_all_response_descriptor_variables_variable_set_variable_set_resource_with_http_info(**kwargs) # noqa: E501
return data
def list_all_response_descriptor_variables_variable_set_variable_set_resource_with_http_info(self, **kwargs): # noqa: E501
"""Get a list of VariableSetResources # noqa: E501
Lists all the variable sets in the supplied Octopus Deploy Space. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_all_response_descriptor_variables_variable_set_variable_set_resource_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[VariableSetResource]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_all_response_descriptor_variables_variable_set_variable_set_resource" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/variables/all', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[VariableSetResource]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_all_response_descriptor_variables_variable_set_variable_set_resource_spaces(self, base_space_id, **kwargs): # noqa: E501
"""Get a list of VariableSetResources # noqa: E501
Lists all the variable sets in the supplied Octopus Deploy Space. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_all_response_descriptor_variables_variable_set_variable_set_resource_spaces(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:return: list[VariableSetResource]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_all_response_descriptor_variables_variable_set_variable_set_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
else:
(data) = self.list_all_response_descriptor_variables_variable_set_variable_set_resource_spaces_with_http_info(base_space_id, **kwargs) # noqa: E501
return data
def list_all_response_descriptor_variables_variable_set_variable_set_resource_spaces_with_http_info(self, base_space_id, **kwargs): # noqa: E501
"""Get a list of VariableSetResources # noqa: E501
Lists all the variable sets in the supplied Octopus Deploy Space. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_all_response_descriptor_variables_variable_set_variable_set_resource_spaces_with_http_info(base_space_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:return: list[VariableSetResource]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_all_response_descriptor_variables_variable_set_variable_set_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `list_all_response_descriptor_variables_variable_set_variable_set_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/variables/all', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[VariableSetResource]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def load_response_descriptor_variables_variable_set_variable_set_resource(self, id, **kwargs): # noqa: E501
"""Get a VariableSetResource by ID # noqa: E501
Gets a variable set by Id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.load_response_descriptor_variables_variable_set_variable_set_resource(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the VariableSetResource to load (required)
:return: VariableSetResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.load_response_descriptor_variables_variable_set_variable_set_resource_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.load_response_descriptor_variables_variable_set_variable_set_resource_with_http_info(id, **kwargs) # noqa: E501
return data
def load_response_descriptor_variables_variable_set_variable_set_resource_with_http_info(self, id, **kwargs): # noqa: E501
"""Get a VariableSetResource by ID # noqa: E501
Gets a variable set by Id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.load_response_descriptor_variables_variable_set_variable_set_resource_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: ID of the VariableSetResource to load (required)
:return: VariableSetResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method load_response_descriptor_variables_variable_set_variable_set_resource" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `load_response_descriptor_variables_variable_set_variable_set_resource`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/variables/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VariableSetResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def load_response_descriptor_variables_variable_set_variable_set_resource_spaces(self, base_space_id, id, **kwargs): # noqa: E501
"""Get a VariableSetResource by ID # noqa: E501
Gets a variable set by Id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.load_response_descriptor_variables_variable_set_variable_set_resource_spaces(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the VariableSetResource to load (required)
:return: VariableSetResource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.load_response_descriptor_variables_variable_set_variable_set_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
else:
(data) = self.load_response_descriptor_variables_variable_set_variable_set_resource_spaces_with_http_info(base_space_id, id, **kwargs) # noqa: E501
return data
def load_response_descriptor_variables_variable_set_variable_set_resource_spaces_with_http_info(self, base_space_id, id, **kwargs): # noqa: E501
"""Get a VariableSetResource by ID # noqa: E501
Gets a variable set by Id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.load_response_descriptor_variables_variable_set_variable_set_resource_spaces_with_http_info(base_space_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str base_space_id: ID of the space (required)
:param str id: ID of the VariableSetResource to load (required)
:return: VariableSetResource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['base_space_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method load_response_descriptor_variables_variable_set_variable_set_resource_spaces" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'base_space_id' is set
if ('base_space_id' not in params or
params['base_space_id'] is None):
raise ValueError("Missing the required parameter `base_space_id` when calling `load_response_descriptor_variables_variable_set_variable_set_resource_spaces`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `load_response_descriptor_variables_variable_set_variable_set_resource_spaces`") # noqa: E501
collection_formats = {}
path_params = {}
if 'base_space_id' in params:
path_params['baseSpaceId'] = params['base_space_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['APIKeyHeader', 'APIKeyQuery', 'NugetApiKeyHeader'] # noqa: E501
return self.api_client.call_api(
'/api/{baseSpaceId}/variables/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VariableSetResource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 48.034394 | 378 | 0.675086 | 6,743 | 55,864 | 5.216373 | 0.030847 | 0.037528 | 0.028146 | 0.066526 | 0.985359 | 0.985359 | 0.985359 | 0.982117 | 0.982117 | 0.982117 | 0 | 0.012892 | 0.251611 | 55,864 | 1,162 | 379 | 48.075732 | 0.82843 | 0.359265 | 0 | 0.829346 | 1 | 0 | 0.214611 | 0.098166 | 0 | 0 | 0 | 0 | 0 | 1 | 0.039872 | false | 0 | 0.00638 | 0 | 0.105263 | 0.017544 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
7dd985832c2c806351be5a04d831c08530f3d255 | 2,501 | py | Python | tests/test_language_detection.py | leowmjw/Malaya | 33f39835eca08c238d2dd68aeca3b09c5d0a45ab | [
"MIT"
] | 2 | 2019-06-23T20:19:22.000Z | 2020-04-16T13:02:32.000Z | tests/test_language_detection.py | aizatrosli/Malaya | d326384d2c0925c139a7224b77ac20f0ad57f237 | [
"MIT"
] | null | null | null | tests/test_language_detection.py | aizatrosli/Malaya | d326384d2c0925c139a7224b77ac20f0ad57f237 | [
"MIT"
] | 1 | 2020-03-03T02:04:06.000Z | 2020-03-03T02:04:06.000Z | import malaya
def test_lang_labels():
assert malaya.get_language_labels()[0] == 'OTHER'
def test_multinomial_lang_sentence():
multinomial = malaya.multinomial_detect_languages()
malay_text = 'beliau berkata program Inisitif Peduli Rakyat (IPR) yang diperkenalkan oleh kerajaan negeri Selangor lebih besar sumbangannya'
assert multinomial.predict(malay_text) == 'MALAY'
def test_multinomial_lang_sentence_proba():
multinomial = malaya.multinomial_detect_languages()
malay_text = 'beliau berkata program Inisitif Peduli Rakyat (IPR) yang diperkenalkan oleh kerajaan negeri Selangor lebih besar sumbangannya'
assert multinomial.predict(malay_text,get_proba=True)['MALAY'] > 0
def test_multinomial_lang_sentences():
multinomial = malaya.multinomial_detect_languages()
malay_text = 'beliau berkata program Inisitif Peduli Rakyat (IPR) yang diperkenalkan oleh kerajaan negeri Selangor lebih besar sumbangannya'
assert multinomial.predict_batch([malay_text,malay_text])[0] == 'MALAY'
def test_multinomial_lang_sentences_proba():
multinomial = malaya.multinomial_detect_languages()
malay_text = 'beliau berkata program Inisitif Peduli Rakyat (IPR) yang diperkenalkan oleh kerajaan negeri Selangor lebih besar sumbangannya'
assert multinomial.predict_batch([malay_text,malay_text],get_proba=True)[0]['MALAY'] > 0
def test_xgb_lang_sentence():
xgb = malaya.xgb_detect_languages()
malay_text = 'beliau berkata program Inisitif Peduli Rakyat (IPR) yang diperkenalkan oleh kerajaan negeri Selangor lebih besar sumbangannya'
assert xgb.predict(malay_text) == 'MALAY'
def test_xgb_lang_sentence_proba():
xgb = malaya.xgb_detect_languages()
malay_text = 'beliau berkata program Inisitif Peduli Rakyat (IPR) yang diperkenalkan oleh kerajaan negeri Selangor lebih besar sumbangannya'
assert xgb.predict(malay_text,get_proba=True)['MALAY'] > 0
def test_xgb_lang_sentences():
xgb = malaya.xgb_detect_languages()
malay_text = 'beliau berkata program Inisitif Peduli Rakyat (IPR) yang diperkenalkan oleh kerajaan negeri Selangor lebih besar sumbangannya'
assert xgb.predict_batch([malay_text,malay_text])[0] == 'MALAY'
def test_xgb_lang_sentences_proba():
xgb = malaya.xgb_detect_languages()
malay_text = 'beliau berkata program Inisitif Peduli Rakyat (IPR) yang diperkenalkan oleh kerajaan negeri Selangor lebih besar sumbangannya'
assert xgb.predict_batch([malay_text,malay_text],get_proba=True)[0]['MALAY'] > 0
| 55.577778 | 144 | 0.788485 | 321 | 2,501 | 5.906542 | 0.121495 | 0.094937 | 0.084388 | 0.101266 | 0.966245 | 0.887658 | 0.863924 | 0.863924 | 0.863924 | 0.863924 | 0 | 0.004151 | 0.133147 | 2,501 | 44 | 145 | 56.840909 | 0.870387 | 0 | 0 | 0.457143 | 0 | 0 | 0.417833 | 0 | 0 | 0 | 0 | 0 | 0.257143 | 1 | 0.257143 | false | 0 | 0.028571 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
7de5778322894808138351d4f2fdcc71b83bc990 | 150 | py | Python | schema_registry/serializers/__init__.py | fabiencelier/python-schema-registry-client | acca4c19559d79bf8b95d765d8b1af6708e18be8 | [
"MIT"
] | null | null | null | schema_registry/serializers/__init__.py | fabiencelier/python-schema-registry-client | acca4c19559d79bf8b95d765d8b1af6708e18be8 | [
"MIT"
] | null | null | null | schema_registry/serializers/__init__.py | fabiencelier/python-schema-registry-client | acca4c19559d79bf8b95d765d8b1af6708e18be8 | [
"MIT"
] | null | null | null | from schema_registry.serializers.message_serializer import MessageSerializer
from schema_registry.serializers.faust_serializer import FaustSerializer
| 50 | 76 | 0.92 | 16 | 150 | 8.375 | 0.625 | 0.149254 | 0.268657 | 0.432836 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.053333 | 150 | 2 | 77 | 75 | 0.943662 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
81b64e6b212defb4c571bde3a2c5431fada7ae25 | 21,928 | py | Python | keystone_tempest_plugin/tests/rbac/v3/test_project.py | openstack/keystone-tempest-plugin | 32e48a7ea5db99e31dae916f7964c8219025f257 | [
"Apache-2.0"
] | 8 | 2017-06-20T10:42:19.000Z | 2019-01-28T22:03:43.000Z | keystone_tempest_plugin/tests/rbac/v3/test_project.py | openstack/keystone-tempest-plugin | 32e48a7ea5db99e31dae916f7964c8219025f257 | [
"Apache-2.0"
] | null | null | null | keystone_tempest_plugin/tests/rbac/v3/test_project.py | openstack/keystone-tempest-plugin | 32e48a7ea5db99e31dae916f7964c8219025f257 | [
"Apache-2.0"
] | 2 | 2018-06-15T06:45:32.000Z | 2019-09-27T00:01:35.000Z | # Copyright 2020 SUSE LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from tempest.api.identity import base
from tempest.lib.common.utils import data_utils
from tempest.lib import exceptions
from keystone_tempest_plugin.tests.rbac.v3 import base as rbac_base
class IdentityV3RbacProjectsTests(rbac_base.IdentityV3RbacBaseTests,
metaclass=abc.ABCMeta):
@classmethod
def setup_clients(cls):
super(IdentityV3RbacProjectsTests, cls).setup_clients()
cls.persona = getattr(cls, 'os_%s' % cls.credentials[0])
cls.client = cls.persona.projects_client
cls.users_client = cls.persona.users_v3_client
cls.admin_client = cls.os_system_admin
cls.admin_projects_client = cls.admin_client.projects_client
@abc.abstractmethod
def test_identity_create_project(self):
"""Test identity:create_project policy.
This test must check:
* whether the persona can create a project
* whether the persona can create a project in their own domain
* whether the persona can create a project in another domain
"""
pass
@abc.abstractmethod
def test_identity_get_project(self):
"""Test identity:get_project policy.
This test must check:
* whether the persona can get a project
* whether the persona can get a project in their own domain
* whether the persona can get a project in another domain
* whether the persona can get a project that does not exist
* whether the persona can get their own project
"""
pass
@abc.abstractmethod
def test_identity_list_projects(self):
"""Test identity:list_projects policy.
This test must check:
* whether the persona can list all projects
* whether the persona can list all projects in their own domain
* whether the persona can list all projects in another domain
"""
pass
@abc.abstractmethod
def test_identity_list_user_projects(self):
"""Test identity:list_user_projects policy.
This test must check:
* whether the persona can list projects of a user
* whether the persona can list projects of a user in their own domain
* whether the persona can list projects of a user in another domain
* whether the persona can list projects for themself
"""
pass
@abc.abstractmethod
def test_identity_update_project(self):
"""Test identity:update_project policy.
This test must check:
* whether the persona can update a project
* whether the persona can update a project in their own domain
* whether the persona can update a project in another domain
* whether the persona can update a project that does not exist
"""
pass
@abc.abstractmethod
def test_identity_delete_project(self):
"""Test identity:delete_project policy.
This test must check
* whether the persona can delete a project
* whether the persona can delete a project in their own domain
* whether the persona can delete a project in another domain
* whether the persona can delete a project that does not exist
"""
pass
class SystemAdminTests(IdentityV3RbacProjectsTests, base.BaseIdentityTest):
credentials = ['system_admin']
def test_identity_create_project(self):
project_id = self.do_request(
'create_project', expected_status=201, name=data_utils.rand_name()
)['project']['id']
self.addCleanup(self.admin_projects_client.delete_project, project_id)
def test_identity_get_project(self):
project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name())['project']['id']
self.addCleanup(self.admin_projects_client.delete_project, project_id)
self.do_request('show_project', project_id=project_id)
# user gets a 404 for nonexistent project
self.do_request('show_project', expected_status=exceptions.NotFound,
project_id=data_utils.rand_uuid_hex())
def test_identity_list_projects(self):
project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name())['project']['id']
self.addCleanup(self.admin_projects_client.delete_project, project_id)
resp = self.do_request('list_projects')
self.assertIn(project_id, [p['id'] for p in resp['projects']])
def test_identity_list_user_projects(self):
user_id = self.admin_client.users_v3_client.create_user(
name=data_utils.rand_name())['user']['id']
self.addCleanup(self.admin_client.users_v3_client.delete_user, user_id)
project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name())['project']['id']
self.addCleanup(self.admin_projects_client.delete_project, project_id)
role_id = self.admin_client.roles_v3_client.create_role(
name=data_utils.rand_name())['role']['id']
self.addCleanup(self.admin_client.roles_v3_client.delete_role,
role_id)
self.admin_client.roles_v3_client.create_user_role_on_project(
project_id, user_id, role_id)
# user can list projects for arbitrary user
resp = self.do_request('list_user_projects', client=self.users_client,
user_id=user_id)
self.assertIn(project_id, [p['id'] for p in resp['projects']])
# user can list projects for self
resp = self.do_request('list_user_projects', client=self.users_client,
user_id=self.persona.credentials.user_id)
self.assertEqual(0, len([p['id'] for p in resp['projects']]))
def test_identity_update_project(self):
project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name())['project']['id']
self.addCleanup(self.admin_projects_client.delete_project, project_id)
self.do_request('update_project',
project_id=project_id,
description=data_utils.arbitrary_string())
# user gets a 404 for nonexistent domain
self.do_request('update_project', expected_status=exceptions.NotFound,
project_id=data_utils.rand_uuid_hex(),
description=data_utils.arbitrary_string())
def test_identity_delete_project(self):
project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name())['project']['id']
self.do_request('delete_project', expected_status=204,
project_id=project_id)
class SystemMemberTests(SystemAdminTests, base.BaseIdentityTest):
credentials = ['system_member', 'system_admin']
def test_identity_create_project(self):
self.do_request('create_project', expected_status=exceptions.Forbidden,
name=data_utils.rand_name())
def test_identity_update_project(self):
project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name())['project']['id']
self.addCleanup(self.admin_projects_client.delete_project, project_id)
self.do_request('update_project', expected_status=exceptions.Forbidden,
project_id=project_id,
description=data_utils.arbitrary_string())
# user gets a 403 for nonexistent domain
self.do_request('update_project', expected_status=exceptions.Forbidden,
project_id=data_utils.rand_uuid_hex())
def test_identity_delete_project(self):
project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name())['project']['id']
self.addCleanup(self.admin_projects_client.delete_project, project_id)
self.do_request('delete_project', expected_status=exceptions.Forbidden,
project_id=project_id)
class SystemReaderTests(SystemMemberTests):
credentials = ['system_reader', 'system_admin']
class DomainAdminTests(IdentityV3RbacProjectsTests, base.BaseIdentityTest):
credentials = ['domain_admin', 'system_admin']
def setUp(self):
super(DomainAdminTests, self).setUp()
self.own_domain = self.persona.credentials.domain_id
self.other_domain = self.admin_client.domains_client.create_domain(
name=data_utils.rand_name())['domain']['id']
self.addCleanup(self.admin_client.domains_client.delete_domain,
self.other_domain)
self.addCleanup(self.admin_client.domains_client.update_domain,
domain_id=self.other_domain, enabled=False)
def test_identity_create_project(self):
# user can create project in own domain
project_id = self.do_request(
'create_project', expected_status=201, name=data_utils.rand_name(),
domain_id=self.own_domain
)['project']['id']
self.addCleanup(self.admin_projects_client.delete_project, project_id)
# user cannot create project in other domain
self.do_request(
'create_project', expected_status=exceptions.Forbidden,
name=data_utils.rand_name(), domain_id=self.other_domain
)
def test_identity_get_project(self):
# user can get project in own domain
project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name(),
domain_id=self.own_domain)['project']['id']
self.addCleanup(self.admin_projects_client.delete_project, project_id)
self.do_request('show_project', project_id=project_id)
# user cannot get project in other domain
project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name(),
domain_id=self.other_domain)['project']['id']
self.addCleanup(self.admin_projects_client.delete_project, project_id)
self.do_request('show_project', expected_status=exceptions.Forbidden,
project_id=project_id)
# user gets a 403 for nonexistent project
self.do_request('show_project', expected_status=exceptions.Forbidden,
project_id=data_utils.rand_uuid_hex())
def test_identity_list_projects(self):
# user can list projects but cannot see project in other domain
own_project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name(),
domain_id=self.own_domain)['project']['id']
self.addCleanup(self.admin_projects_client.delete_project,
own_project_id)
other_project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name(),
domain_id=self.other_domain)['project']['id']
self.addCleanup(self.admin_projects_client.delete_project,
other_project_id)
resp = self.do_request('list_projects')
self.assertIn(own_project_id, [d['id'] for d in resp['projects']])
self.assertNotIn(other_project_id, [d['id'] for d in resp['projects']])
def test_identity_list_user_projects(self):
# user can list projects for user in own domain
user_id = self.admin_client.users_v3_client.create_user(
name=data_utils.rand_name(),
domain_id=self.own_domain)['user']['id']
self.addCleanup(self.admin_client.users_v3_client.delete_user, user_id)
project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name())['project']['id']
self.addCleanup(self.admin_projects_client.delete_project, project_id)
role_id = self.admin_client.roles_v3_client.create_role(
name=data_utils.rand_name())['role']['id']
self.addCleanup(self.admin_client.roles_v3_client.delete_role,
role_id)
self.admin_client.roles_v3_client.create_user_role_on_project(
project_id, user_id, role_id)
resp = self.do_request('list_user_projects', client=self.users_client,
user_id=user_id)
self.assertIn(project_id, [p['id'] for p in resp['projects']])
# user cannot list projects for user in other domain
user_id = self.admin_client.users_v3_client.create_user(
name=data_utils.rand_name(),
domain_id=self.other_domain)['user']['id']
self.addCleanup(self.admin_client.users_v3_client.delete_user, user_id)
project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name())['project']['id']
self.addCleanup(self.admin_projects_client.delete_project, project_id)
role_id = self.admin_client.roles_v3_client.create_role(
name=data_utils.rand_name())['role']['id']
self.addCleanup(self.admin_client.roles_v3_client.delete_role,
role_id)
self.admin_client.roles_v3_client.create_user_role_on_project(
project_id, user_id, role_id)
self.do_request('list_user_projects', client=self.users_client,
expected_status=exceptions.Forbidden,
user_id=user_id)
# user can list projects for self
resp = self.do_request('list_user_projects', client=self.users_client,
user_id=self.persona.credentials.user_id)
self.assertEqual(0, len([p['id'] for p in resp['projects']]))
def test_identity_update_project(self):
# user can update project in own domain
project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name(),
domain_id=self.own_domain)['project']['id']
self.addCleanup(self.admin_projects_client.delete_project, project_id)
self.do_request('update_project',
project_id=project_id,
description=data_utils.arbitrary_string())
# user cannot update project in other domain
project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name(),
domain_id=self.other_domain)['project']['id']
self.addCleanup(self.admin_projects_client.delete_project, project_id)
self.do_request('update_project',
expected_status=exceptions.Forbidden,
project_id=project_id,
description=data_utils.arbitrary_string())
# user gets a 403 for nonexistent domain
self.do_request('update_project', expected_status=exceptions.Forbidden,
project_id=data_utils.rand_uuid_hex(),
description=data_utils.arbitrary_string())
def test_identity_delete_project(self):
# user can delete project in own domain
project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name(),
domain_id=self.own_domain)['project']['id']
self.do_request('delete_project', expected_status=204,
project_id=project_id)
# user cannot delete project in other domain
project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name(),
domain_id=self.other_domain)['project']['id']
self.addCleanup(self.admin_projects_client.delete_project, project_id)
self.do_request('delete_project', expected_status=exceptions.Forbidden,
project_id=project_id)
class DomainMemberTests(DomainAdminTests, base.BaseIdentityTest):
credentials = ['domain_member', 'system_admin']
def test_identity_create_project(self):
# user cannot create project in own domain
self.do_request(
'create_project', expected_status=exceptions.Forbidden,
name=data_utils.rand_name(),
domain_id=self.own_domain
)
# user cannot create project in other domain
self.do_request(
'create_project', expected_status=exceptions.Forbidden,
name=data_utils.rand_name(), domain_id=self.other_domain
)
def test_identity_update_project(self):
# user cannot update project in own domain
project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name(),
domain_id=self.own_domain)['project']['id']
self.addCleanup(self.admin_projects_client.delete_project, project_id)
self.do_request('update_project',
expected_status=exceptions.Forbidden,
project_id=project_id,
description=data_utils.arbitrary_string())
# user cannot update project in other domain
project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name(),
domain_id=self.other_domain)['project']['id']
self.addCleanup(self.admin_projects_client.delete_project, project_id)
self.do_request('update_project',
expected_status=exceptions.Forbidden,
project_id=project_id,
description=data_utils.arbitrary_string())
# user gets a 403 for nonexistent domain
self.do_request('update_project', expected_status=exceptions.Forbidden,
project_id=data_utils.rand_uuid_hex(),
description=data_utils.arbitrary_string())
def test_identity_delete_project(self):
# user cannot delete project in own domain
project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name(),
domain_id=self.own_domain)['project']['id']
self.do_request('delete_project', expected_status=exceptions.Forbidden,
project_id=project_id)
# user cannot delete project in other domain
project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name(),
domain_id=self.other_domain)['project']['id']
self.addCleanup(self.admin_projects_client.delete_project, project_id)
self.do_request('delete_project', expected_status=exceptions.Forbidden,
project_id=project_id)
class DomainReaderTests(DomainMemberTests):
credentials = ['domain_reader', 'system_admin']
class ProjectAdminTests(DomainReaderTests, base.BaseIdentityTest):
credentials = ['project_admin', 'system_admin']
def test_identity_get_project(self):
# user cannot get arbitrary project
project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name(),
domain_id=self.own_domain)['project']['id']
self.addCleanup(self.admin_projects_client.delete_project, project_id)
self.do_request('show_project', expected_status=exceptions.Forbidden,
project_id=project_id)
# user gets a 403 for nonexistent project
self.do_request('show_project', expected_status=exceptions.Forbidden,
project_id=data_utils.rand_uuid_hex())
# user can get own project
self.do_request('show_project',
project_id=self.persona.credentials.project_id)
def test_identity_list_projects(self):
# user cannot list projects
project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name())['project']['id']
self.addCleanup(self.admin_projects_client.delete_project,
project_id)
self.do_request('list_projects', expected_status=exceptions.Forbidden)
def test_identity_list_user_projects(self):
# user can list projects for other user
user_id = self.admin_client.users_v3_client.create_user(
name=data_utils.rand_name())['user']['id']
self.addCleanup(self.admin_client.users_v3_client.delete_user, user_id)
project_id = self.admin_projects_client.create_project(
name=data_utils.rand_name())['project']['id']
self.addCleanup(self.admin_projects_client.delete_project, project_id)
role_id = self.admin_client.roles_v3_client.create_role(
name=data_utils.rand_name())['role']['id']
self.addCleanup(self.admin_client.roles_v3_client.delete_role,
role_id)
self.admin_client.roles_v3_client.create_user_role_on_project(
project_id, user_id, role_id)
self.do_request('list_user_projects', client=self.users_client,
expected_status=exceptions.Forbidden,
user_id=user_id)
# user can list projects for self
resp = self.do_request('list_user_projects', client=self.users_client,
user_id=self.persona.credentials.user_id)
self.assertIn(self.persona.credentials.project_id,
[p['id'] for p in resp['projects']])
class ProjectMemberTests(ProjectAdminTests):
credentials = ['project_member', 'system_admin']
class ProjectReaderTests(ProjectAdminTests):
credentials = ['project_reader', 'system_admin']
| 46.854701 | 79 | 0.670239 | 2,676 | 21,928 | 5.19432 | 0.065396 | 0.081583 | 0.062662 | 0.07777 | 0.849281 | 0.827122 | 0.809137 | 0.773165 | 0.75223 | 0.71813 | 0 | 0.004279 | 0.243342 | 21,928 | 467 | 80 | 46.955032 | 0.833474 | 0.160617 | 0 | 0.792994 | 0 | 0 | 0.064117 | 0 | 0 | 0 | 0 | 0 | 0.025478 | 1 | 0.092357 | false | 0.019108 | 0.015924 | 0 | 0.16879 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
81d42f21cb748573c4118dca954389228e63cbcb | 179 | py | Python | fonduer/snorkel/parser/__init__.py | leewaymay/839_fonduer | 1692f018ef113d88dca4ede69cc2ead55b7b1003 | [
"Apache-2.0"
] | 1 | 2018-05-31T02:44:00.000Z | 2018-05-31T02:44:00.000Z | fonduer/snorkel/parser/__init__.py | leewaymay/839_fonduer | 1692f018ef113d88dca4ede69cc2ead55b7b1003 | [
"Apache-2.0"
] | null | null | null | fonduer/snorkel/parser/__init__.py | leewaymay/839_fonduer | 1692f018ef113d88dca4ede69cc2ead55b7b1003 | [
"Apache-2.0"
] | null | null | null | from __future__ import absolute_import
from .corpus_parser import *
from .doc_preprocessors import *
from .parser import *
from .spacy_parser import *
from .rule_parser import *
| 22.375 | 38 | 0.804469 | 24 | 179 | 5.625 | 0.416667 | 0.37037 | 0.355556 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.139665 | 179 | 7 | 39 | 25.571429 | 0.876623 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
c4de3aa8b1aca816f00e8596e6ff23799ff59baf | 5,250 | py | Python | t/08-tgroups.py | jeffa/Spreadsheet-HTML-python | 9bf28a0c9fcbdad28238ab0adbb3cdd327d572a9 | [
"MIT"
] | null | null | null | t/08-tgroups.py | jeffa/Spreadsheet-HTML-python | 9bf28a0c9fcbdad28238ab0adbb3cdd327d572a9 | [
"MIT"
] | null | null | null | t/08-tgroups.py | jeffa/Spreadsheet-HTML-python | 9bf28a0c9fcbdad28238ab0adbb3cdd327d572a9 | [
"MIT"
] | null | null | null | import unittest
from Spreadsheet.HTML import Table
class TestTGroups(unittest.TestCase):
def test_orientations(self):
data = [
['header1', 'header2', 'header3', 'header4'],
['foo1', 'bar1', 'baz1', 'qux1'],
['foo2', 'bar2', 'baz2', 'qux2'],
['foo3', 'bar3', 'baz3', 'qux3'],
['foo4', 'bar4', 'baz4', 'qux4']
]
gen = Table( { 'data': data, 'tgroups': 2 } )
self.assertEqual(
'<table><thead><tr><th>header1</th><th>header2</th><th>header3</th><th>header4</th></tr></thead><tfoot><tr><td>foo4</td><td>bar4</td><td>baz4</td><td>qux4</td></tr></tfoot><tbody><tr><td>foo1</td><td>bar1</td><td>baz1</td><td>qux1</td></tr><tr><td>foo2</td><td>bar2</td><td>baz2</td><td>qux2</td></tr><tr><td>foo3</td><td>bar3</td><td>baz3</td><td>qux3</td></tr></tbody></table>',
gen.generate(),
"tgroup tags present from generate()"
)
self.assertEqual(
'<table><thead><tr><th>header1</th><th>header2</th><th>header3</th><th>header4</th></tr></thead><tfoot><tr><td>foo4</td><td>bar4</td><td>baz4</td><td>qux4</td></tr></tfoot><tbody><tr><td>foo1</td><td>bar1</td><td>baz1</td><td>qux1</td></tr><tr><td>foo2</td><td>bar2</td><td>baz2</td><td>qux2</td></tr><tr><td>foo3</td><td>bar3</td><td>baz3</td><td>qux3</td></tr></tbody></table>',
gen.north(),
"tgroup tags present from north()"
)
self.assertEqual(
'<table><tr><th>header1</th><td>foo1</td><td>foo2</td><td>foo3</td><td>foo4</td></tr><tr><th>header2</th><td>bar1</td><td>bar2</td><td>bar3</td><td>bar4</td></tr><tr><th>header3</th><td>baz1</td><td>baz2</td><td>baz3</td><td>baz4</td></tr><tr><th>header4</th><td>qux1</td><td>qux2</td><td>qux3</td><td>qux4</td></tr></table>',
gen.landscape(),
"tgroup tags never present from landscape()"
)
self.assertEqual(
'<table><tr><th>header1</th><td>foo1</td><td>foo2</td><td>foo3</td><td>foo4</td></tr><tr><th>header2</th><td>bar1</td><td>bar2</td><td>bar3</td><td>bar4</td></tr><tr><th>header3</th><td>baz1</td><td>baz2</td><td>baz3</td><td>baz4</td></tr><tr><th>header4</th><td>qux1</td><td>qux2</td><td>qux3</td><td>qux4</td></tr></table>',
gen.west(),
"tgroup tags never present from west()"
)
self.assertEqual(
'<table><tr><td>foo1</td><td>bar1</td><td>baz1</td><td>qux1</td></tr><tr><td>foo2</td><td>bar2</td><td>baz2</td><td>qux2</td></tr><tr><td>foo3</td><td>bar3</td><td>baz3</td><td>qux3</td></tr><tr><td>foo4</td><td>bar4</td><td>baz4</td><td>qux4</td></tr><tr><th>header1</th><th>header2</th><th>header3</th><th>header4</th></tr></table>',
gen.south(),
"tgroup tags never present from south()"
)
self.assertEqual(
'<table><tr><td>foo1</td><td>foo2</td><td>foo3</td><td>foo4</td><th>header1</th></tr><tr><td>bar1</td><td>bar2</td><td>bar3</td><td>bar4</td><th>header2</th></tr><tr><td>baz1</td><td>baz2</td><td>baz3</td><td>baz4</td><th>header3</th></tr><tr><td>qux1</td><td>qux2</td><td>qux3</td><td>qux4</td><th>header4</th></tr></table>',
gen.east(),
"tgroup tags never present from east()"
)
def test_ommisions(self):
data = [
['header1', 'header2', 'header3', 'header4'],
['foo1', 'bar1', 'baz1', 'qux1'],
['foo2', 'bar2', 'baz2', 'qux2'],
['foo3', 'bar3', 'baz3', 'qux3'],
['foo4', 'bar4', 'baz4', 'qux4']
]
gen = Table( { 'data': data, 'tgroups': 2 } )
self.assertEqual(
'<table><thead><tr><th>header1</th><th>header2</th><th>header3</th><th>header4</th></tr></thead><tbody><tr><td>foo1</td><td>bar1</td><td>baz1</td><td>qux1</td></tr><tr><td>foo2</td><td>bar2</td><td>baz2</td><td>qux2</td></tr><tr><td>foo3</td><td>bar3</td><td>baz3</td><td>qux3</td></tr><tr><td>foo4</td><td>bar4</td><td>baz4</td><td>qux4</td></tr></tbody></table>',
gen.generate( { 'tgroups': 1 } ),
"tfoot ommited when tgroups is 1"
)
self.assertEqual(
'<table><tbody><tr><td>header1</td><td>header2</td><td>header3</td><td>header4</td></tr><tr><td>foo1</td><td>bar1</td><td>baz1</td><td>qux1</td></tr><tr><td>foo2</td><td>bar2</td><td>baz2</td><td>qux2</td></tr><tr><td>foo3</td><td>bar3</td><td>baz3</td><td>qux3</td></tr><tr><td>foo4</td><td>bar4</td><td>baz4</td><td>qux4</td></tr></tbody></table>',
gen.generate( { 'matrix': 1, 'tgroups': 1 } ),
"thead and tfoot ommited for matrix when tgroups is 1"
)
self.assertEqual(
'<table><tbody><tr><td>header1</td><td>header2</td><td>header3</td><td>header4</td></tr><tr><td>foo1</td><td>bar1</td><td>baz1</td><td>qux1</td></tr><tr><td>foo2</td><td>bar2</td><td>baz2</td><td>qux2</td></tr><tr><td>foo3</td><td>bar3</td><td>baz3</td><td>qux3</td></tr><tr><td>foo4</td><td>bar4</td><td>baz4</td><td>qux4</td></tr></tbody></table>',
gen.generate( { 'matrix': 1, 'tgroups': 2 } ),
"thead and tfoot ommited for matrix when tgroups is 2"
)
if __name__ == '__main__':
unittest.main()
| 60.344828 | 392 | 0.542667 | 846 | 5,250 | 3.355792 | 0.076832 | 0.16062 | 0.052836 | 0.050722 | 0.892568 | 0.855935 | 0.842902 | 0.842902 | 0.828108 | 0.8031 | 0 | 0.052464 | 0.164952 | 5,250 | 86 | 393 | 61.046512 | 0.595119 | 0 | 0 | 0.426471 | 0 | 0.132353 | 0.707429 | 0.592571 | 0 | 0 | 0 | 0 | 0.132353 | 1 | 0.029412 | false | 0 | 0.029412 | 0 | 0.073529 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
4887b3c61e0a9f12f6e7094604878d590f425655 | 2,862 | py | Python | tests/test_mkshapes_shape.py | hsolbrig/csv2shex | f7a195673d7650d0234db9c8bdb644c6866394a8 | [
"CC0-1.0"
] | null | null | null | tests/test_mkshapes_shape.py | hsolbrig/csv2shex | f7a195673d7650d0234db9c8bdb644c6866394a8 | [
"CC0-1.0"
] | null | null | null | tests/test_mkshapes_shape.py | hsolbrig/csv2shex | f7a195673d7650d0234db9c8bdb644c6866394a8 | [
"CC0-1.0"
] | null | null | null | """Shape object holds statements sharing a common shape_id."""
from csv2shex.mkshapes import Shape
SHAPE_OBJECT = Shape(
start=True,
shape_id="@a",
shape_statements=[
{"prop_id": "dct:creator", "value_type": "URI"},
{"prop_id": "dct:subject", "value_type": "URI"},
{"prop_id": "dct:date", "value_type": "String"},
],
)
def test_shape_fields_individually_addressable():
"""Shape fields individually addressable."""
shap = SHAPE_OBJECT
assert shap.start
assert shap.shape_id == "@a"
assert shap.shape_statements[1] == {"prop_id": "dct:subject", "value_type": "URI"}
def test_shape_initialized_by_assignment():
"""Shape fields created by assignment."""
shap = Shape()
shap.start = True
shap.shape_id = "@a"
shap.shape_statements = []
shap.shape_statements.append({"prop_id": "dct:creator", "value_type": "URI"})
shap.shape_statements.append({"prop_id": "dct:subject", "value_type": "URI"})
shap.shape_statements.append({"prop_id": "dct:date", "value_type": "String"})
assert shap == SHAPE_OBJECT
def test_shape_initialized_with_no_propertyvalues_field_should_pass_for_now():
"""Test should pass for now but this condition should raise exception."""
shap = Shape()
shap.start = True
shap.shape_id = "@a"
assert shap == Shape(start=True, shape_id="@a")
def test_shape_initialized_with_no_start_field_should_pass_for_now():
"""Test should pass for now but this condition should raise exception."""
shap = Shape()
shap.shape_id = "@a"
shap.shape_statements = []
shap.shape_statements.append({"prop_id": "dct:creator", "value_type": "URI"})
shap.shape_statements.append({"prop_id": "dct:subject", "value_type": "URI"})
shap.shape_statements.append({"prop_id": "dct:date", "value_type": "String"})
assert shap == Shape(
shape_id="@a",
shape_statements=[
{"prop_id": "dct:creator", "value_type": "URI"},
{"prop_id": "dct:subject", "value_type": "URI"},
{"prop_id": "dct:date", "value_type": "String"},
],
)
def test_shape_initialized_with_no_shapeid_field_should_pass_for_now():
"""Test should pass for now but this condition should raise exception."""
shap = Shape()
shap.start = True
shap.shape_statements = []
shap.shape_statements.append({"prop_id": "dct:creator", "value_type": "URI"})
shap.shape_statements.append({"prop_id": "dct:subject", "value_type": "URI"})
shap.shape_statements.append({"prop_id": "dct:date", "value_type": "String"})
assert shap == Shape(
start=True,
shape_statements=[
{"prop_id": "dct:creator", "value_type": "URI"},
{"prop_id": "dct:subject", "value_type": "URI"},
{"prop_id": "dct:date", "value_type": "String"},
],
)
| 36.692308 | 86 | 0.643606 | 364 | 2,862 | 4.777473 | 0.137363 | 0.13456 | 0.098332 | 0.129385 | 0.829787 | 0.829787 | 0.763657 | 0.723404 | 0.723404 | 0.709028 | 0 | 0.000863 | 0.190077 | 2,862 | 77 | 87 | 37.168831 | 0.749353 | 0.117051 | 0 | 0.728814 | 0 | 0 | 0.241587 | 0 | 0 | 0 | 0 | 0 | 0.118644 | 1 | 0.084746 | false | 0.050847 | 0.016949 | 0 | 0.101695 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
b509226bc25970440df2f5843d9de382dda7e00d | 15,519 | py | Python | tests/sentry/api/endpoints/test_organization_member_team_details.py | pombredanne/django-sentry | 4ad09417fb3cfa3aa4a0d4175ae49fe02837c567 | [
"BSD-3-Clause"
] | null | null | null | tests/sentry/api/endpoints/test_organization_member_team_details.py | pombredanne/django-sentry | 4ad09417fb3cfa3aa4a0d4175ae49fe02837c567 | [
"BSD-3-Clause"
] | null | null | null | tests/sentry/api/endpoints/test_organization_member_team_details.py | pombredanne/django-sentry | 4ad09417fb3cfa3aa4a0d4175ae49fe02837c567 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import absolute_import
from sentry.models import Organization, OrganizationAccessRequest, OrganizationMemberTeam
from sentry.testutils import APITestCase
class CreateOrganizationMemberTeamTest(APITestCase):
endpoint = "sentry-api-0-organization-member-team-details"
method = "post"
def test_can_join_as_owner_without_open_membership(self):
organization = self.create_organization(name="foo", owner=self.user, flags=0)
team = self.create_team(name="foo", organization=organization)
owner = self.create_member(
organization=organization, user=self.create_user(), role="owner", teams=[]
)
self.login_as(owner.user)
resp = self.get_response(organization.slug, owner.id, team.slug)
assert resp.status_code == 201
def test_cannot_join_as_member_without_open_membership(self):
organization = self.create_organization(name="foo", owner=self.user, flags=0)
team = self.create_team(name="foo", organization=organization)
member = self.create_member(
organization=organization, user=self.create_user(), role="member", teams=[]
)
self.login_as(member.user)
resp = self.get_response(organization.slug, member.id, team.slug)
assert resp.status_code == 202
assert not OrganizationMemberTeam.objects.filter(
team=team, organizationmember=member
).exists()
assert OrganizationAccessRequest.objects.filter(team=team, member=member).exists()
def test_can_join_as_member_with_open_membership(self):
organization = self.create_organization(
name="foo", owner=self.user, flags=Organization.flags.allow_joinleave
)
team = self.create_team(name="foo", organization=organization)
member = self.create_member(
organization=organization, user=self.create_user(), role="member", teams=[]
)
self.login_as(member.user)
resp = self.get_response(organization.slug, member.id, team.slug)
assert resp.status_code == 201
assert OrganizationMemberTeam.objects.filter(team=team, organizationmember=member).exists()
def test_member_can_add_member_with_open_membership(self):
organization = self.create_organization(
name="foo", owner=self.user, flags=Organization.flags.allow_joinleave
)
team = self.create_team(name="foo", organization=organization)
member = self.create_member(
organization=organization, user=self.create_user(), role="member"
)
target_member = self.create_member(
organization=organization, user=self.create_user(), role="member", teams=[]
)
self.login_as(member.user)
resp = self.get_response(organization.slug, target_member.id, team.slug)
assert resp.status_code == 201
assert OrganizationMemberTeam.objects.filter(
team=team, organizationmember=target_member
).exists()
def test_owner_can_add_member(self):
user = self.create_user()
organization = self.create_organization(name="foo", owner=user, flags=0)
team = self.create_team(name="foo", organization=organization)
member = self.create_member(
organization=organization, user=self.create_user(), role="member", teams=[]
)
self.login_as(user)
resp = self.get_response(organization.slug, member.id, team.slug)
assert resp.status_code == 201
assert OrganizationMemberTeam.objects.filter(team=team, organizationmember=member).exists()
def test_owner_can_add_manager(self):
user = self.create_user()
organization = self.create_organization(name="foo", owner=user, flags=0)
team = self.create_team(name="foo", organization=organization)
manager = self.create_member(
organization=organization, user=self.create_user(), role="manager", teams=[]
)
self.login_as(user)
resp = self.get_response(organization.slug, manager.id, team.slug)
assert resp.status_code == 201
assert OrganizationMemberTeam.objects.filter(team=team, organizationmember=manager).exists()
def test_owner_can_add_other_owner(self):
user = self.create_user()
organization = self.create_organization(name="foo", owner=user, flags=0)
team = self.create_team(name="foo", organization=organization)
owner = self.create_member(
organization=organization, user=self.create_user(), role="owner", teams=[]
)
self.login_as(user)
resp = self.get_response(organization.slug, owner.id, team.slug)
assert resp.status_code == 201
assert OrganizationMemberTeam.objects.filter(team=team, organizationmember=owner).exists()
def test_manager_can_add_member(self):
organization = self.create_organization(name="foo", flags=0)
team = self.create_team(name="foo", organization=organization)
manager = self.create_member(
organization=organization, user=self.create_user(), role="manager", teams=[team]
)
member = self.create_member(
organization=organization, user=self.create_user(), role="member", teams=[]
)
self.login_as(manager.user)
resp = self.get_response(organization.slug, member.id, team.slug)
assert resp.status_code == 201
assert OrganizationMemberTeam.objects.filter(team=team, organizationmember=member).exists()
def test_manager_cannot_add_owner(self):
organization = self.create_organization(name="foo", flags=0)
team = self.create_team(name="foo", organization=organization)
manager = self.create_member(
organization=organization, user=self.create_user(), role="manager", teams=[team]
)
owner = self.create_member(
organization=organization, user=self.create_user(), role="owner", teams=[]
)
self.login_as(manager.user)
resp = self.get_response(organization.slug, owner.id, team.slug)
assert resp.status_code == 400
assert not OrganizationMemberTeam.objects.filter(
team=team, organizationmember=owner
).exists()
def test_admin_not_in_team_cannot_add_member(self):
organization = self.create_organization(name="foo", owner=self.user, flags=0)
team = self.create_team(name="foo", organization=organization)
admin = self.create_member(
organization=organization, user=self.create_user(), role="admin", teams=[]
)
member = self.create_member(
organization=organization, user=self.create_user(), role="member", teams=[]
)
self.login_as(admin.user)
resp = self.get_response(organization.slug, member.id, team.slug)
assert resp.status_code == 400
assert not OrganizationMemberTeam.objects.filter(
team=team, organizationmember=member
).exists()
def test_admin_in_team_can_add_member(self):
organization = self.create_organization(name="foo", owner=self.user, flags=0)
team = self.create_team(name="foo", organization=organization)
admin = self.create_member(
organization=organization, user=self.create_user(), role="admin", teams=[team]
)
member = self.create_member(
organization=organization, user=self.create_user(), role="member", teams=[]
)
self.login_as(admin.user)
resp = self.get_response(organization.slug, member.id, team.slug)
assert resp.status_code == 201
assert OrganizationMemberTeam.objects.filter(team=team, organizationmember=member).exists()
class DeleteOrganizationMemberTeamTest(APITestCase):
endpoint = "sentry-api-0-organization-member-team-details"
method = "delete"
def test_can_leave_as_member(self):
organization = self.create_organization(name="foo", owner=self.user)
team = self.create_team(name="foo", organization=organization)
member = self.create_member(
organization=organization, user=self.create_user(), role="member", teams=[team]
)
self.login_as(member.user)
resp = self.get_response(organization.slug, member.id, team.slug)
assert resp.status_code == 200
assert not OrganizationMemberTeam.objects.filter(
team=team, organizationmember=member
).exists()
def test_can_leave_as_non_member(self):
organization = self.create_organization(name="foo", owner=self.user)
team = self.create_team(name="foo", organization=organization)
member = self.create_member(
organization=organization,
user=self.create_user(is_superuser=False),
role="member",
teams=[],
)
self.login_as(member.user)
resp = self.get_response(organization.slug, member.id, team.slug)
assert resp.status_code == 200
assert not OrganizationMemberTeam.objects.filter(
team=team, organizationmember=member
).exists()
def test_can_leave_as_superuser_without_membership(self):
organization = self.create_organization(name="foo", owner=self.user)
team = self.create_team(name="foo", organization=organization)
member = self.create_member(
organization=organization,
user=self.create_user(is_superuser=True),
role="member",
teams=[],
)
self.login_as(member.user)
resp = self.get_response(organization.slug, member.id, team.slug)
assert resp.status_code == 200
assert not OrganizationMemberTeam.objects.filter(
team=team, organizationmember=member
).exists()
def test_owner_can_remove_member(self):
user = self.create_user()
organization = self.create_organization(name="foo", owner=user, flags=0)
team = self.create_team(name="foo", organization=organization)
member = self.create_member(
organization=organization, user=self.create_user(), role="member", teams=[team]
)
self.login_as(user)
resp = self.get_response(organization.slug, member.id, team.slug)
assert resp.status_code == 200
assert not OrganizationMemberTeam.objects.filter(
team=team, organizationmember=member
).exists()
def test_owner_can_remove_manager(self):
user = self.create_user()
organization = self.create_organization(name="foo", owner=user, flags=0)
team = self.create_team(name="foo", organization=organization)
manager = self.create_member(
organization=organization, user=self.create_user(), role="manager", teams=[team]
)
self.login_as(user)
resp = self.get_response(organization.slug, manager.id, team.slug)
assert resp.status_code == 200
assert not OrganizationMemberTeam.objects.filter(
team=team, organizationmember=manager
).exists()
def test_owner_can_remove_other_owner(self):
user = self.create_user()
organization = self.create_organization(name="foo", owner=user, flags=0)
team = self.create_team(name="foo", organization=organization)
owner = self.create_member(
organization=organization, user=self.create_user(), role="owner", teams=[team]
)
self.login_as(user)
resp = self.get_response(organization.slug, owner.id, team.slug)
assert resp.status_code == 200
assert not OrganizationMemberTeam.objects.filter(
team=team, organizationmember=owner
).exists()
def test_manager_can_remove_member(self):
organization = self.create_organization(name="foo", flags=0)
team = self.create_team(name="foo", organization=organization)
manager = self.create_member(
organization=organization, user=self.create_user(), role="manager", teams=[team]
)
member = self.create_member(
organization=organization, user=self.create_user(), role="member", teams=[team]
)
self.login_as(manager.user)
resp = self.get_response(organization.slug, member.id, team.slug)
assert resp.status_code == 200
assert not OrganizationMemberTeam.objects.filter(
team=team, organizationmember=member
).exists()
def test_manager_cannot_remove_owner(self):
organization = self.create_organization(name="foo", flags=0)
team = self.create_team(name="foo", organization=organization)
manager = self.create_member(
organization=organization, user=self.create_user(), role="manager", teams=[team]
)
owner = self.create_member(
organization=organization, user=self.create_user(), role="owner", teams=[team]
)
self.login_as(manager.user)
resp = self.get_response(organization.slug, owner.id, team.slug)
assert resp.status_code == 400
assert OrganizationMemberTeam.objects.filter(team=team, organizationmember=owner).exists()
def test_admin_in_team_can_remove_member(self):
organization = self.create_organization(name="foo", flags=0)
team = self.create_team(name="foo", organization=organization)
admin = self.create_member(
organization=organization, user=self.create_user(), role="admin", teams=[team]
)
member = self.create_member(
organization=organization, user=self.create_user(), role="member", teams=[team]
)
self.login_as(admin.user)
resp = self.get_response(organization.slug, member.id, team.slug)
assert resp.status_code == 200
assert not OrganizationMemberTeam.objects.filter(
team=team, organizationmember=member
).exists()
def test_admin_not_in_team_cannot_remove_member(self):
organization = self.create_organization(name="foo", flags=0)
team = self.create_team(name="foo", organization=organization)
admin = self.create_member(
organization=organization, user=self.create_user(), role="admin", teams=[]
)
member = self.create_member(
organization=organization, user=self.create_user(), role="member", teams=[team]
)
self.login_as(admin.user)
resp = self.get_response(organization.slug, member.id, team.slug)
assert resp.status_code == 400
assert OrganizationMemberTeam.objects.filter(team=team, organizationmember=member).exists()
def test_member_cannot_remove_member(self):
organization = self.create_organization(
name="foo", flags=Organization.flags.allow_joinleave
)
team = self.create_team(name="foo", organization=organization)
member = self.create_member(
organization=organization, user=self.create_user(), role="member", teams=[team]
)
target_member = self.create_member(
organization=organization, user=self.create_user(), role="member", teams=[team]
)
self.login_as(member.user)
resp = self.get_response(organization.slug, target_member.id, team.slug)
assert resp.status_code == 400
assert OrganizationMemberTeam.objects.filter(
team=team, organizationmember=target_member
).exists()
| 41.494652 | 100 | 0.669373 | 1,728 | 15,519 | 5.831597 | 0.043403 | 0.113129 | 0.052793 | 0.067877 | 0.963283 | 0.957229 | 0.956336 | 0.953061 | 0.95068 | 0.95068 | 0 | 0.006948 | 0.221019 | 15,519 | 373 | 101 | 41.605898 | 0.826619 | 0 | 0 | 0.755776 | 0 | 0 | 0.027128 | 0.005799 | 0 | 0 | 0 | 0 | 0.145215 | 1 | 0.072607 | false | 0 | 0.009901 | 0 | 0.10231 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d230a80c00f60dc783c9a7fa657c41e5fcdbc489 | 21,096 | py | Python | sdk/python/pulumi_vault/transform/transformation.py | pulumi/pulumi-vault | 1682875f4a5d7d508f36e166529ad2b8aec34090 | [
"ECL-2.0",
"Apache-2.0"
] | 10 | 2019-10-07T17:44:18.000Z | 2022-03-30T20:46:33.000Z | sdk/python/pulumi_vault/transform/transformation.py | pulumi/pulumi-vault | 1682875f4a5d7d508f36e166529ad2b8aec34090 | [
"ECL-2.0",
"Apache-2.0"
] | 79 | 2019-10-11T18:13:07.000Z | 2022-03-31T21:09:41.000Z | sdk/python/pulumi_vault/transform/transformation.py | pulumi/pulumi-vault | 1682875f4a5d7d508f36e166529ad2b8aec34090 | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2019-10-28T10:08:40.000Z | 2020-03-17T14:20:55.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['TransformationArgs', 'Transformation']
@pulumi.input_type
class TransformationArgs:
def __init__(__self__, *,
path: pulumi.Input[str],
allowed_roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
masking_character: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
template: Optional[pulumi.Input[str]] = None,
templates: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tweak_source: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Transformation resource.
:param pulumi.Input[str] path: Path to where the back-end is mounted within Vault.
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_roles: The set of roles allowed to perform this transformation.
:param pulumi.Input[str] masking_character: The character used to replace data when in masking mode
:param pulumi.Input[str] name: The name of the transformation.
:param pulumi.Input[str] template: The name of the template to use.
:param pulumi.Input[Sequence[pulumi.Input[str]]] templates: Templates configured for transformation.
:param pulumi.Input[str] tweak_source: The source of where the tweak value comes from. Only valid when in FPE mode.
:param pulumi.Input[str] type: The type of transformation to perform.
"""
pulumi.set(__self__, "path", path)
if allowed_roles is not None:
pulumi.set(__self__, "allowed_roles", allowed_roles)
if masking_character is not None:
pulumi.set(__self__, "masking_character", masking_character)
if name is not None:
pulumi.set(__self__, "name", name)
if template is not None:
pulumi.set(__self__, "template", template)
if templates is not None:
pulumi.set(__self__, "templates", templates)
if tweak_source is not None:
pulumi.set(__self__, "tweak_source", tweak_source)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def path(self) -> pulumi.Input[str]:
"""
Path to where the back-end is mounted within Vault.
"""
return pulumi.get(self, "path")
@path.setter
def path(self, value: pulumi.Input[str]):
pulumi.set(self, "path", value)
@property
@pulumi.getter(name="allowedRoles")
def allowed_roles(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The set of roles allowed to perform this transformation.
"""
return pulumi.get(self, "allowed_roles")
@allowed_roles.setter
def allowed_roles(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_roles", value)
@property
@pulumi.getter(name="maskingCharacter")
def masking_character(self) -> Optional[pulumi.Input[str]]:
"""
The character used to replace data when in masking mode
"""
return pulumi.get(self, "masking_character")
@masking_character.setter
def masking_character(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "masking_character", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the transformation.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def template(self) -> Optional[pulumi.Input[str]]:
"""
The name of the template to use.
"""
return pulumi.get(self, "template")
@template.setter
def template(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "template", value)
@property
@pulumi.getter
def templates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Templates configured for transformation.
"""
return pulumi.get(self, "templates")
@templates.setter
def templates(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "templates", value)
@property
@pulumi.getter(name="tweakSource")
def tweak_source(self) -> Optional[pulumi.Input[str]]:
"""
The source of where the tweak value comes from. Only valid when in FPE mode.
"""
return pulumi.get(self, "tweak_source")
@tweak_source.setter
def tweak_source(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tweak_source", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The type of transformation to perform.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class _TransformationState:
def __init__(__self__, *,
allowed_roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
masking_character: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
path: Optional[pulumi.Input[str]] = None,
template: Optional[pulumi.Input[str]] = None,
templates: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tweak_source: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Transformation resources.
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_roles: The set of roles allowed to perform this transformation.
:param pulumi.Input[str] masking_character: The character used to replace data when in masking mode
:param pulumi.Input[str] name: The name of the transformation.
:param pulumi.Input[str] path: Path to where the back-end is mounted within Vault.
:param pulumi.Input[str] template: The name of the template to use.
:param pulumi.Input[Sequence[pulumi.Input[str]]] templates: Templates configured for transformation.
:param pulumi.Input[str] tweak_source: The source of where the tweak value comes from. Only valid when in FPE mode.
:param pulumi.Input[str] type: The type of transformation to perform.
"""
if allowed_roles is not None:
pulumi.set(__self__, "allowed_roles", allowed_roles)
if masking_character is not None:
pulumi.set(__self__, "masking_character", masking_character)
if name is not None:
pulumi.set(__self__, "name", name)
if path is not None:
pulumi.set(__self__, "path", path)
if template is not None:
pulumi.set(__self__, "template", template)
if templates is not None:
pulumi.set(__self__, "templates", templates)
if tweak_source is not None:
pulumi.set(__self__, "tweak_source", tweak_source)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="allowedRoles")
def allowed_roles(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The set of roles allowed to perform this transformation.
"""
return pulumi.get(self, "allowed_roles")
@allowed_roles.setter
def allowed_roles(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_roles", value)
@property
@pulumi.getter(name="maskingCharacter")
def masking_character(self) -> Optional[pulumi.Input[str]]:
"""
The character used to replace data when in masking mode
"""
return pulumi.get(self, "masking_character")
@masking_character.setter
def masking_character(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "masking_character", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the transformation.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def path(self) -> Optional[pulumi.Input[str]]:
"""
Path to where the back-end is mounted within Vault.
"""
return pulumi.get(self, "path")
@path.setter
def path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "path", value)
@property
@pulumi.getter
def template(self) -> Optional[pulumi.Input[str]]:
"""
The name of the template to use.
"""
return pulumi.get(self, "template")
@template.setter
def template(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "template", value)
@property
@pulumi.getter
def templates(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Templates configured for transformation.
"""
return pulumi.get(self, "templates")
@templates.setter
def templates(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "templates", value)
@property
@pulumi.getter(name="tweakSource")
def tweak_source(self) -> Optional[pulumi.Input[str]]:
"""
The source of where the tweak value comes from. Only valid when in FPE mode.
"""
return pulumi.get(self, "tweak_source")
@tweak_source.setter
def tweak_source(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tweak_source", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The type of transformation to perform.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
class Transformation(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
allowed_roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
masking_character: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
path: Optional[pulumi.Input[str]] = None,
template: Optional[pulumi.Input[str]] = None,
templates: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tweak_source: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
This resource supports the "/transform/transformation/{name}" Vault endpoint.
It creates or updates a transformation with the given name. If a transformation with the name does not exist,
it will be created. If the transformation exists, it will be updated with the new attributes.
## Example Usage
```python
import pulumi
import pulumi_vault as vault
mount_transform = vault.Mount("mountTransform",
path="transform",
type="transform")
test = vault.transform.Transformation("test",
path=mount_transform.path,
type="fpe",
template="ccn",
tweak_source="internal",
allowed_roles=["payments"])
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_roles: The set of roles allowed to perform this transformation.
:param pulumi.Input[str] masking_character: The character used to replace data when in masking mode
:param pulumi.Input[str] name: The name of the transformation.
:param pulumi.Input[str] path: Path to where the back-end is mounted within Vault.
:param pulumi.Input[str] template: The name of the template to use.
:param pulumi.Input[Sequence[pulumi.Input[str]]] templates: Templates configured for transformation.
:param pulumi.Input[str] tweak_source: The source of where the tweak value comes from. Only valid when in FPE mode.
:param pulumi.Input[str] type: The type of transformation to perform.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: TransformationArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
This resource supports the "/transform/transformation/{name}" Vault endpoint.
It creates or updates a transformation with the given name. If a transformation with the name does not exist,
it will be created. If the transformation exists, it will be updated with the new attributes.
## Example Usage
```python
import pulumi
import pulumi_vault as vault
mount_transform = vault.Mount("mountTransform",
path="transform",
type="transform")
test = vault.transform.Transformation("test",
path=mount_transform.path,
type="fpe",
template="ccn",
tweak_source="internal",
allowed_roles=["payments"])
```
:param str resource_name: The name of the resource.
:param TransformationArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(TransformationArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
allowed_roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
masking_character: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
path: Optional[pulumi.Input[str]] = None,
template: Optional[pulumi.Input[str]] = None,
templates: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tweak_source: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = TransformationArgs.__new__(TransformationArgs)
__props__.__dict__["allowed_roles"] = allowed_roles
__props__.__dict__["masking_character"] = masking_character
__props__.__dict__["name"] = name
if path is None and not opts.urn:
raise TypeError("Missing required property 'path'")
__props__.__dict__["path"] = path
__props__.__dict__["template"] = template
__props__.__dict__["templates"] = templates
__props__.__dict__["tweak_source"] = tweak_source
__props__.__dict__["type"] = type
super(Transformation, __self__).__init__(
'vault:transform/transformation:Transformation',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
allowed_roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
masking_character: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
path: Optional[pulumi.Input[str]] = None,
template: Optional[pulumi.Input[str]] = None,
templates: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tweak_source: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None) -> 'Transformation':
"""
Get an existing Transformation resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_roles: The set of roles allowed to perform this transformation.
:param pulumi.Input[str] masking_character: The character used to replace data when in masking mode
:param pulumi.Input[str] name: The name of the transformation.
:param pulumi.Input[str] path: Path to where the back-end is mounted within Vault.
:param pulumi.Input[str] template: The name of the template to use.
:param pulumi.Input[Sequence[pulumi.Input[str]]] templates: Templates configured for transformation.
:param pulumi.Input[str] tweak_source: The source of where the tweak value comes from. Only valid when in FPE mode.
:param pulumi.Input[str] type: The type of transformation to perform.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _TransformationState.__new__(_TransformationState)
__props__.__dict__["allowed_roles"] = allowed_roles
__props__.__dict__["masking_character"] = masking_character
__props__.__dict__["name"] = name
__props__.__dict__["path"] = path
__props__.__dict__["template"] = template
__props__.__dict__["templates"] = templates
__props__.__dict__["tweak_source"] = tweak_source
__props__.__dict__["type"] = type
return Transformation(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="allowedRoles")
def allowed_roles(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The set of roles allowed to perform this transformation.
"""
return pulumi.get(self, "allowed_roles")
@property
@pulumi.getter(name="maskingCharacter")
def masking_character(self) -> pulumi.Output[Optional[str]]:
"""
The character used to replace data when in masking mode
"""
return pulumi.get(self, "masking_character")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the transformation.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def path(self) -> pulumi.Output[str]:
"""
Path to where the back-end is mounted within Vault.
"""
return pulumi.get(self, "path")
@property
@pulumi.getter
def template(self) -> pulumi.Output[Optional[str]]:
"""
The name of the template to use.
"""
return pulumi.get(self, "template")
@property
@pulumi.getter
def templates(self) -> pulumi.Output[Sequence[str]]:
"""
Templates configured for transformation.
"""
return pulumi.get(self, "templates")
@property
@pulumi.getter(name="tweakSource")
def tweak_source(self) -> pulumi.Output[Optional[str]]:
"""
The source of where the tweak value comes from. Only valid when in FPE mode.
"""
return pulumi.get(self, "tweak_source")
@property
@pulumi.getter
def type(self) -> pulumi.Output[Optional[str]]:
"""
The type of transformation to perform.
"""
return pulumi.get(self, "type")
| 40.725869 | 134 | 0.63211 | 2,437 | 21,096 | 5.291342 | 0.069758 | 0.114308 | 0.115083 | 0.08701 | 0.864133 | 0.850097 | 0.829003 | 0.819155 | 0.8152 | 0.801318 | 0 | 0.000064 | 0.260286 | 21,096 | 517 | 135 | 40.804642 | 0.826274 | 0.297971 | 0 | 0.806897 | 1 | 0 | 0.076429 | 0.00332 | 0 | 0 | 0 | 0 | 0 | 1 | 0.162069 | false | 0.003448 | 0.017241 | 0 | 0.275862 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
d232504d3584a5ad684a3bcfaf4bdeb39587ce58 | 20,794 | py | Python | find_offset.py | ono7/security_and_bug_hunting | 972c79fbae00f1490a9e7fc597ddb8b1f624ecd4 | [
"Unlicense"
] | null | null | null | find_offset.py | ono7/security_and_bug_hunting | 972c79fbae00f1490a9e7fc597ddb8b1f624ecd4 | [
"Unlicense"
] | null | null | null | find_offset.py | ono7/security_and_bug_hunting | 972c79fbae00f1490a9e7fc597ddb8b1f624ecd4 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python
"""
Sat Sep 21 3:47:58 AM 2017
find needle in haystack..
ono7
"""
import sys
if len(sys.argv) != 2:
print("feed me a pattern: po Aa0aA1")
sys.exit()
# Pattern of 20280 bytes :
# ------------------------
# ASCII:
pattern = "Aa0Aa1Aa2Aa3Aa4Aa5Aa6Aa7Aa8Aa9Ab0Ab1Ab2Ab3Ab4Ab5Ab6Ab7Ab8Ab9Ac0Ac1Ac2Ac3Ac4Ac5Ac6Ac7Ac8Ac9Ad0Ad1Ad2Ad3Ad4Ad5Ad6Ad7Ad8Ad9Ae0Ae1Ae2Ae3Ae4Ae5Ae6Ae7Ae8Ae9Af0Af1Af2Af3Af4Af5Af6Af7Af8Af9Ag0Ag1Ag2Ag3Ag4Ag5Ag6Ag7Ag8Ag9Ah0Ah1Ah2Ah3Ah4Ah5Ah6Ah7Ah8Ah9Ai0Ai1Ai2Ai3Ai4Ai5Ai6Ai7Ai8Ai9Aj0Aj1Aj2Aj3Aj4Aj5Aj6Aj7Aj8Aj9Ak0Ak1Ak2Ak3Ak4Ak5Ak6Ak7Ak8Ak9Al0Al1Al2Al3Al4Al5Al6Al7Al8Al9Am0Am1Am2Am3Am4Am5Am6Am7Am8Am9An0An1An2An3An4An5An6An7An8An9Ao0Ao1Ao2Ao3Ao4Ao5Ao6Ao7Ao8Ao9Ap0Ap1Ap2Ap3Ap4Ap5Ap6Ap7Ap8Ap9Aq0Aq1Aq2Aq3Aq4Aq5Aq6Aq7Aq8Aq9Ar0Ar1Ar2Ar3Ar4Ar5Ar6Ar7Ar8Ar9As0As1As2As3As4As5As6As7As8As9At0At1At2At3At4At5At6At7At8At9Au0Au1Au2Au3Au4Au5Au6Au7Au8Au9Av0Av1Av2Av3Av4Av5Av6Av7Av8Av9Aw0Aw1Aw2Aw3Aw4Aw5Aw6Aw7Aw8Aw9Ax0Ax1Ax2Ax3Ax4Ax5Ax6Ax7Ax8Ax9Ay0Ay1Ay2Ay3Ay4Ay5Ay6Ay7Ay8Ay9Az0Az1Az2Az3Az4Az5Az6Az7Az8Az9Ba0Ba1Ba2Ba3Ba4Ba5Ba6Ba7Ba8Ba9Bb0Bb1Bb2Bb3Bb4Bb5Bb6Bb7Bb8Bb9Bc0Bc1Bc2Bc3Bc4Bc5Bc6Bc7Bc8Bc9Bd0Bd1Bd2Bd3Bd4Bd5Bd6Bd7Bd8Bd9Be0Be1Be2Be3Be4Be5Be6Be7Be8Be9Bf0Bf1Bf2Bf3Bf4Bf5Bf6Bf7Bf8Bf9Bg0Bg1Bg2Bg3Bg4Bg5Bg6Bg7Bg8Bg9Bh0Bh1Bh2Bh3Bh4Bh5Bh6Bh7Bh8Bh9Bi0Bi1Bi2Bi3Bi4Bi5Bi6Bi7Bi8Bi9Bj0Bj1Bj2Bj3Bj4Bj5Bj6Bj7Bj8Bj9Bk0Bk1Bk2Bk3Bk4Bk5Bk6Bk7Bk8Bk9Bl0Bl1Bl2Bl3Bl4Bl5Bl6Bl7Bl8Bl9Bm0Bm1Bm2Bm3Bm4Bm5Bm6Bm7Bm8Bm9Bn0Bn1Bn2Bn3Bn4Bn5Bn6Bn7Bn8Bn9Bo0Bo1Bo2Bo3Bo4Bo5Bo6Bo7Bo8Bo9Bp0Bp1Bp2Bp3Bp4Bp5Bp6Bp7Bp8Bp9Bq0Bq1Bq2Bq3Bq4Bq5Bq6Bq7Bq8Bq9Br0Br1Br2Br3Br4Br5Br6Br7Br8Br9Bs0Bs1Bs2Bs3Bs4Bs5Bs6Bs7Bs8Bs9Bt0Bt1Bt2Bt3Bt4Bt5Bt6Bt7Bt8Bt9Bu0Bu1Bu2Bu3Bu4Bu5Bu6Bu7Bu8Bu9Bv0Bv1Bv2Bv3Bv4Bv5Bv6Bv7Bv8Bv9Bw0Bw1Bw2Bw3Bw4Bw5Bw6Bw7Bw8Bw9Bx0Bx1Bx2Bx3Bx4Bx5Bx6Bx7Bx8Bx9By0By1By2By3By4By5By6By7By8By9Bz0Bz1Bz2Bz3Bz4Bz5Bz6Bz7Bz8Bz9Ca0Ca1Ca2Ca3Ca4Ca5Ca6Ca7Ca8Ca9Cb0Cb1Cb2Cb3Cb4Cb5Cb6Cb7Cb8Cb9Cc0Cc1Cc2Cc3Cc4Cc5Cc6Cc7Cc8Cc9Cd0Cd1Cd2Cd3Cd4Cd5Cd6Cd7Cd8Cd9Ce0Ce1Ce2Ce3Ce4Ce5Ce6Ce7Ce8Ce9Cf0Cf1Cf2Cf3Cf4Cf5Cf6Cf7Cf8Cf9Cg0Cg1Cg2Cg3Cg4Cg5Cg6Cg7Cg8Cg9Ch0Ch1Ch2Ch3Ch4Ch5Ch6Ch7Ch8Ch9Ci0Ci1Ci2Ci3Ci4Ci5Ci6Ci7Ci8Ci9Cj0Cj1Cj2Cj3Cj4Cj5Cj6Cj7Cj8Cj9Ck0Ck1Ck2Ck3Ck4Ck5Ck6Ck7Ck8Ck9Cl0Cl1Cl2Cl3Cl4Cl5Cl6Cl7Cl8Cl9Cm0Cm1Cm2Cm3Cm4Cm5Cm6Cm7Cm8Cm9Cn0Cn1Cn2Cn3Cn4Cn5Cn6Cn7Cn8Cn9Co0Co1Co2Co3Co4Co5Co6Co7Co8Co9Cp0Cp1Cp2Cp3Cp4Cp5Cp6Cp7Cp8Cp9Cq0Cq1Cq2Cq3Cq4Cq5Cq6Cq7Cq8Cq9Cr0Cr1Cr2Cr3Cr4Cr5Cr6Cr7Cr8Cr9Cs0Cs1Cs2Cs3Cs4Cs5Cs6Cs7Cs8Cs9Ct0Ct1Ct2Ct3Ct4Ct5Ct6Ct7Ct8Ct9Cu0Cu1Cu2Cu3Cu4Cu5Cu6Cu7Cu8Cu9Cv0Cv1Cv2Cv3Cv4Cv5Cv6Cv7Cv8Cv9Cw0Cw1Cw2Cw3Cw4Cw5Cw6Cw7Cw8Cw9Cx0Cx1Cx2Cx3Cx4Cx5Cx6Cx7Cx8Cx9Cy0Cy1Cy2Cy3Cy4Cy5Cy6Cy7Cy8Cy9Cz0Cz1Cz2Cz3Cz4Cz5Cz6Cz7Cz8Cz9Da0Da1Da2Da3Da4Da5Da6Da7Da8Da9Db0Db1Db2Db3Db4Db5Db6Db7Db8Db9Dc0Dc1Dc2Dc3Dc4Dc5Dc6Dc7Dc8Dc9Dd0Dd1Dd2Dd3Dd4Dd5Dd6Dd7Dd8Dd9De0De1De2De3De4De5De6De7De8De9Df0Df1Df2Df3Df4Df5Df6Df7Df8Df9Dg0Dg1Dg2Dg3Dg4Dg5Dg6Dg7Dg8Dg9Dh0Dh1Dh2Dh3Dh4Dh5Dh6Dh7Dh8Dh9Di0Di1Di2Di3Di4Di5Di6Di7Di8Di9Dj0Dj1Dj2Dj3Dj4Dj5Dj6Dj7Dj8Dj9Dk0Dk1Dk2Dk3Dk4Dk5Dk6Dk7Dk8Dk9Dl0Dl1Dl2Dl3Dl4Dl5Dl6Dl7Dl8Dl9Dm0Dm1Dm2Dm3Dm4Dm5Dm6Dm7Dm8Dm9Dn0Dn1Dn2Dn3Dn4Dn5Dn6Dn7Dn8Dn9Do0Do1Do2Do3Do4Do5Do6Do7Do8Do9Dp0Dp1Dp2Dp3Dp4Dp5Dp6Dp7Dp8Dp9Dq0Dq1Dq2Dq3Dq4Dq5Dq6Dq7Dq8Dq9Dr0Dr1Dr2Dr3Dr4Dr5Dr6Dr7Dr8Dr9Ds0Ds1Ds2Ds3Ds4Ds5Ds6Ds7Ds8Ds9Dt0Dt1Dt2Dt3Dt4Dt5Dt6Dt7Dt8Dt9Du0Du1Du2Du3Du4Du5Du6Du7Du8Du9Dv0Dv1Dv2Dv3Dv4Dv5Dv6Dv7Dv8Dv9Dw0Dw1Dw2Dw3Dw4Dw5Dw6Dw7Dw8Dw9Dx0Dx1Dx2Dx3Dx4Dx5Dx6Dx7Dx8Dx9Dy0Dy1Dy2Dy3Dy4Dy5Dy6Dy7Dy8Dy9Dz0Dz1Dz2Dz3Dz4Dz5Dz6Dz7Dz8Dz9Ea0Ea1Ea2Ea3Ea4Ea5Ea6Ea7Ea8Ea9Eb0Eb1Eb2Eb3Eb4Eb5Eb6Eb7Eb8Eb9Ec0Ec1Ec2Ec3Ec4Ec5Ec6Ec7Ec8Ec9Ed0Ed1Ed2Ed3Ed4Ed5Ed6Ed7Ed8Ed9Ee0Ee1Ee2Ee3Ee4Ee5Ee6Ee7Ee8Ee9Ef0Ef1Ef2Ef3Ef4Ef5Ef6Ef7Ef8Ef9Eg0Eg1Eg2Eg3Eg4Eg5Eg6Eg7Eg8Eg9Eh0Eh1Eh2Eh3Eh4Eh5Eh6Eh7Eh8Eh9Ei0Ei1Ei2Ei3Ei4Ei5Ei6Ei7Ei8Ei9Ej0Ej1Ej2Ej3Ej4Ej5Ej6Ej7Ej8Ej9Ek0Ek1Ek2Ek3Ek4Ek5Ek6Ek7Ek8Ek9El0El1El2El3El4El5El6El7El8El9Em0Em1Em2Em3Em4Em5Em6Em7Em8Em9En0En1En2En3En4En5En6En7En8En9Eo0Eo1Eo2Eo3Eo4Eo5Eo6Eo7Eo8Eo9Ep0Ep1Ep2Ep3Ep4Ep5Ep6Ep7Ep8Ep9Eq0Eq1Eq2Eq3Eq4Eq5Eq6Eq7Eq8Eq9Er0Er1Er2Er3Er4Er5Er6Er7Er8Er9Es0Es1Es2Es3Es4Es5Es6Es7Es8Es9Et0Et1Et2Et3Et4Et5Et6Et7Et8Et9Eu0Eu1Eu2Eu3Eu4Eu5Eu6Eu7Eu8Eu9Ev0Ev1Ev2Ev3Ev4Ev5Ev6Ev7Ev8Ev9Ew0Ew1Ew2Ew3Ew4Ew5Ew6Ew7Ew8Ew9Ex0Ex1Ex2Ex3Ex4Ex5Ex6Ex7Ex8Ex9Ey0Ey1Ey2Ey3Ey4Ey5Ey6Ey7Ey8Ey9Ez0Ez1Ez2Ez3Ez4Ez5Ez6Ez7Ez8Ez9Fa0Fa1Fa2Fa3Fa4Fa5Fa6Fa7Fa8Fa9Fb0Fb1Fb2Fb3Fb4Fb5Fb6Fb7Fb8Fb9Fc0Fc1Fc2Fc3Fc4Fc5Fc6Fc7Fc8Fc9Fd0Fd1Fd2Fd3Fd4Fd5Fd6Fd7Fd8Fd9Fe0Fe1Fe2Fe3Fe4Fe5Fe6Fe7Fe8Fe9Ff0Ff1Ff2Ff3Ff4Ff5Ff6Ff7Ff8Ff9Fg0Fg1Fg2Fg3Fg4Fg5Fg6Fg7Fg8Fg9Fh0Fh1Fh2Fh3Fh4Fh5Fh6Fh7Fh8Fh9Fi0Fi1Fi2Fi3Fi4Fi5Fi6Fi7Fi8Fi9Fj0Fj1Fj2Fj3Fj4Fj5Fj6Fj7Fj8Fj9Fk0Fk1Fk2Fk3Fk4Fk5Fk6Fk7Fk8Fk9Fl0Fl1Fl2Fl3Fl4Fl5Fl6Fl7Fl8Fl9Fm0Fm1Fm2Fm3Fm4Fm5Fm6Fm7Fm8Fm9Fn0Fn1Fn2Fn3Fn4Fn5Fn6Fn7Fn8Fn9Fo0Fo1Fo2Fo3Fo4Fo5Fo6Fo7Fo8Fo9Fp0Fp1Fp2Fp3Fp4Fp5Fp6Fp7Fp8Fp9Fq0Fq1Fq2Fq3Fq4Fq5Fq6Fq7Fq8Fq9Fr0Fr1Fr2Fr3Fr4Fr5Fr6Fr7Fr8Fr9Fs0Fs1Fs2Fs3Fs4Fs5Fs6Fs7Fs8Fs9Ft0Ft1Ft2Ft3Ft4Ft5Ft6Ft7Ft8Ft9Fu0Fu1Fu2Fu3Fu4Fu5Fu6Fu7Fu8Fu9Fv0Fv1Fv2Fv3Fv4Fv5Fv6Fv7Fv8Fv9Fw0Fw1Fw2Fw3Fw4Fw5Fw6Fw7Fw8Fw9Fx0Fx1Fx2Fx3Fx4Fx5Fx6Fx7Fx8Fx9Fy0Fy1Fy2Fy3Fy4Fy5Fy6Fy7Fy8Fy9Fz0Fz1Fz2Fz3Fz4Fz5Fz6Fz7Fz8Fz9Ga0Ga1Ga2Ga3Ga4Ga5Ga6Ga7Ga8Ga9Gb0Gb1Gb2Gb3Gb4Gb5Gb6Gb7Gb8Gb9Gc0Gc1Gc2Gc3Gc4Gc5Gc6Gc7Gc8Gc9Gd0Gd1Gd2Gd3Gd4Gd5Gd6Gd7Gd8Gd9Ge0Ge1Ge2Ge3Ge4Ge5Ge6Ge7Ge8Ge9Gf0Gf1Gf2Gf3Gf4Gf5Gf6Gf7Gf8Gf9Gg0Gg1Gg2Gg3Gg4Gg5Gg6Gg7Gg8Gg9Gh0Gh1Gh2Gh3Gh4Gh5Gh6Gh7Gh8Gh9Gi0Gi1Gi2Gi3Gi4Gi5Gi6Gi7Gi8Gi9Gj0Gj1Gj2Gj3Gj4Gj5Gj6Gj7Gj8Gj9Gk0Gk1Gk2Gk3Gk4Gk5Gk6Gk7Gk8Gk9Gl0Gl1Gl2Gl3Gl4Gl5Gl6Gl7Gl8Gl9Gm0Gm1Gm2Gm3Gm4Gm5Gm6Gm7Gm8Gm9Gn0Gn1Gn2Gn3Gn4Gn5Gn6Gn7Gn8Gn9Go0Go1Go2Go3Go4Go5Go6Go7Go8Go9Gp0Gp1Gp2Gp3Gp4Gp5Gp6Gp7Gp8Gp9Gq0Gq1Gq2Gq3Gq4Gq5Gq6Gq7Gq8Gq9Gr0Gr1Gr2Gr3Gr4Gr5Gr6Gr7Gr8Gr9Gs0Gs1Gs2Gs3Gs4Gs5Gs6Gs7Gs8Gs9Gt0Gt1Gt2Gt3Gt4Gt5Gt6Gt7Gt8Gt9Gu0Gu1Gu2Gu3Gu4Gu5Gu6Gu7Gu8Gu9Gv0Gv1Gv2Gv3Gv4Gv5Gv6Gv7Gv8Gv9Gw0Gw1Gw2Gw3Gw4Gw5Gw6Gw7Gw8Gw9Gx0Gx1Gx2Gx3Gx4Gx5Gx6Gx7Gx8Gx9Gy0Gy1Gy2Gy3Gy4Gy5Gy6Gy7Gy8Gy9Gz0Gz1Gz2Gz3Gz4Gz5Gz6Gz7Gz8Gz9Ha0Ha1Ha2Ha3Ha4Ha5Ha6Ha7Ha8Ha9Hb0Hb1Hb2Hb3Hb4Hb5Hb6Hb7Hb8Hb9Hc0Hc1Hc2Hc3Hc4Hc5Hc6Hc7Hc8Hc9Hd0Hd1Hd2Hd3Hd4Hd5Hd6Hd7Hd8Hd9He0He1He2He3He4He5He6He7He8He9Hf0Hf1Hf2Hf3Hf4Hf5Hf6Hf7Hf8Hf9Hg0Hg1Hg2Hg3Hg4Hg5Hg6Hg7Hg8Hg9Hh0Hh1Hh2Hh3Hh4Hh5Hh6Hh7Hh8Hh9Hi0Hi1Hi2Hi3Hi4Hi5Hi6Hi7Hi8Hi9Hj0Hj1Hj2Hj3Hj4Hj5Hj6Hj7Hj8Hj9Hk0Hk1Hk2Hk3Hk4Hk5Hk6Hk7Hk8Hk9Hl0Hl1Hl2Hl3Hl4Hl5Hl6Hl7Hl8Hl9Hm0Hm1Hm2Hm3Hm4Hm5Hm6Hm7Hm8Hm9Hn0Hn1Hn2Hn3Hn4Hn5Hn6Hn7Hn8Hn9Ho0Ho1Ho2Ho3Ho4Ho5Ho6Ho7Ho8Ho9Hp0Hp1Hp2Hp3Hp4Hp5Hp6Hp7Hp8Hp9Hq0Hq1Hq2Hq3Hq4Hq5Hq6Hq7Hq8Hq9Hr0Hr1Hr2Hr3Hr4Hr5Hr6Hr7Hr8Hr9Hs0Hs1Hs2Hs3Hs4Hs5Hs6Hs7Hs8Hs9Ht0Ht1Ht2Ht3Ht4Ht5Ht6Ht7Ht8Ht9Hu0Hu1Hu2Hu3Hu4Hu5Hu6Hu7Hu8Hu9Hv0Hv1Hv2Hv3Hv4Hv5Hv6Hv7Hv8Hv9Hw0Hw1Hw2Hw3Hw4Hw5Hw6Hw7Hw8Hw9Hx0Hx1Hx2Hx3Hx4Hx5Hx6Hx7Hx8Hx9Hy0Hy1Hy2Hy3Hy4Hy5Hy6Hy7Hy8Hy9Hz0Hz1Hz2Hz3Hz4Hz5Hz6Hz7Hz8Hz9Ia0Ia1Ia2Ia3Ia4Ia5Ia6Ia7Ia8Ia9Ib0Ib1Ib2Ib3Ib4Ib5Ib6Ib7Ib8Ib9Ic0Ic1Ic2Ic3Ic4Ic5Ic6Ic7Ic8Ic9Id0Id1Id2Id3Id4Id5Id6Id7Id8Id9Ie0Ie1Ie2Ie3Ie4Ie5Ie6Ie7Ie8Ie9If0If1If2If3If4If5If6If7If8If9Ig0Ig1Ig2Ig3Ig4Ig5Ig6Ig7Ig8Ig9Ih0Ih1Ih2Ih3Ih4Ih5Ih6Ih7Ih8Ih9Ii0Ii1Ii2Ii3Ii4Ii5Ii6Ii7Ii8Ii9Ij0Ij1Ij2Ij3Ij4Ij5Ij6Ij7Ij8Ij9Ik0Ik1Ik2Ik3Ik4Ik5Ik6Ik7Ik8Ik9Il0Il1Il2Il3Il4Il5Il6Il7Il8Il9Im0Im1Im2Im3Im4Im5Im6Im7Im8Im9In0In1In2In3In4In5In6In7In8In9Io0Io1Io2Io3Io4Io5Io6Io7Io8Io9Ip0Ip1Ip2Ip3Ip4Ip5Ip6Ip7Ip8Ip9Iq0Iq1Iq2Iq3Iq4Iq5Iq6Iq7Iq8Iq9Ir0Ir1Ir2Ir3Ir4Ir5Ir6Ir7Ir8Ir9Is0Is1Is2Is3Is4Is5Is6Is7Is8Is9It0It1It2It3It4It5It6It7It8It9Iu0Iu1Iu2Iu3Iu4Iu5Iu6Iu7Iu8Iu9Iv0Iv1Iv2Iv3Iv4Iv5Iv6Iv7Iv8Iv9Iw0Iw1Iw2Iw3Iw4Iw5Iw6Iw7Iw8Iw9Ix0Ix1Ix2Ix3Ix4Ix5Ix6Ix7Ix8Ix9Iy0Iy1Iy2Iy3Iy4Iy5Iy6Iy7Iy8Iy9Iz0Iz1Iz2Iz3Iz4Iz5Iz6Iz7Iz8Iz9Ja0Ja1Ja2Ja3Ja4Ja5Ja6Ja7Ja8Ja9Jb0Jb1Jb2Jb3Jb4Jb5Jb6Jb7Jb8Jb9Jc0Jc1Jc2Jc3Jc4Jc5Jc6Jc7Jc8Jc9Jd0Jd1Jd2Jd3Jd4Jd5Jd6Jd7Jd8Jd9Je0Je1Je2Je3Je4Je5Je6Je7Je8Je9Jf0Jf1Jf2Jf3Jf4Jf5Jf6Jf7Jf8Jf9Jg0Jg1Jg2Jg3Jg4Jg5Jg6Jg7Jg8Jg9Jh0Jh1Jh2Jh3Jh4Jh5Jh6Jh7Jh8Jh9Ji0Ji1Ji2Ji3Ji4Ji5Ji6Ji7Ji8Ji9Jj0Jj1Jj2Jj3Jj4Jj5Jj6Jj7Jj8Jj9Jk0Jk1Jk2Jk3Jk4Jk5Jk6Jk7Jk8Jk9Jl0Jl1Jl2Jl3Jl4Jl5Jl6Jl7Jl8Jl9Jm0Jm1Jm2Jm3Jm4Jm5Jm6Jm7Jm8Jm9Jn0Jn1Jn2Jn3Jn4Jn5Jn6Jn7Jn8Jn9Jo0Jo1Jo2Jo3Jo4Jo5Jo6Jo7Jo8Jo9Jp0Jp1Jp2Jp3Jp4Jp5Jp6Jp7Jp8Jp9Jq0Jq1Jq2Jq3Jq4Jq5Jq6Jq7Jq8Jq9Jr0Jr1Jr2Jr3Jr4Jr5Jr6Jr7Jr8Jr9Js0Js1Js2Js3Js4Js5Js6Js7Js8Js9Jt0Jt1Jt2Jt3Jt4Jt5Jt6Jt7Jt8Jt9Ju0Ju1Ju2Ju3Ju4Ju5Ju6Ju7Ju8Ju9Jv0Jv1Jv2Jv3Jv4Jv5Jv6Jv7Jv8Jv9Jw0Jw1Jw2Jw3Jw4Jw5Jw6Jw7Jw8Jw9Jx0Jx1Jx2Jx3Jx4Jx5Jx6Jx7Jx8Jx9Jy0Jy1Jy2Jy3Jy4Jy5Jy6Jy7Jy8Jy9Jz0Jz1Jz2Jz3Jz4Jz5Jz6Jz7Jz8Jz9Ka0Ka1Ka2Ka3Ka4Ka5Ka6Ka7Ka8Ka9Kb0Kb1Kb2Kb3Kb4Kb5Kb6Kb7Kb8Kb9Kc0Kc1Kc2Kc3Kc4Kc5Kc6Kc7Kc8Kc9Kd0Kd1Kd2Kd3Kd4Kd5Kd6Kd7Kd8Kd9Ke0Ke1Ke2Ke3Ke4Ke5Ke6Ke7Ke8Ke9Kf0Kf1Kf2Kf3Kf4Kf5Kf6Kf7Kf8Kf9Kg0Kg1Kg2Kg3Kg4Kg5Kg6Kg7Kg8Kg9Kh0Kh1Kh2Kh3Kh4Kh5Kh6Kh7Kh8Kh9Ki0Ki1Ki2Ki3Ki4Ki5Ki6Ki7Ki8Ki9Kj0Kj1Kj2Kj3Kj4Kj5Kj6Kj7Kj8Kj9Kk0Kk1Kk2Kk3Kk4Kk5Kk6Kk7Kk8Kk9Kl0Kl1Kl2Kl3Kl4Kl5Kl6Kl7Kl8Kl9Km0Km1Km2Km3Km4Km5Km6Km7Km8Km9Kn0Kn1Kn2Kn3Kn4Kn5Kn6Kn7Kn8Kn9Ko0Ko1Ko2Ko3Ko4Ko5Ko6Ko7Ko8Ko9Kp0Kp1Kp2Kp3Kp4Kp5Kp6Kp7Kp8Kp9Kq0Kq1Kq2Kq3Kq4Kq5Kq6Kq7Kq8Kq9Kr0Kr1Kr2Kr3Kr4Kr5Kr6Kr7Kr8Kr9Ks0Ks1Ks2Ks3Ks4Ks5Ks6Ks7Ks8Ks9Kt0Kt1Kt2Kt3Kt4Kt5Kt6Kt7Kt8Kt9Ku0Ku1Ku2Ku3Ku4Ku5Ku6Ku7Ku8Ku9Kv0Kv1Kv2Kv3Kv4Kv5Kv6Kv7Kv8Kv9Kw0Kw1Kw2Kw3Kw4Kw5Kw6Kw7Kw8Kw9Kx0Kx1Kx2Kx3Kx4Kx5Kx6Kx7Kx8Kx9Ky0Ky1Ky2Ky3Ky4Ky5Ky6Ky7Ky8Ky9Kz0Kz1Kz2Kz3Kz4Kz5Kz6Kz7Kz8Kz9La0La1La2La3La4La5La6La7La8La9Lb0Lb1Lb2Lb3Lb4Lb5Lb6Lb7Lb8Lb9Lc0Lc1Lc2Lc3Lc4Lc5Lc6Lc7Lc8Lc9Ld0Ld1Ld2Ld3Ld4Ld5Ld6Ld7Ld8Ld9Le0Le1Le2Le3Le4Le5Le6Le7Le8Le9Lf0Lf1Lf2Lf3Lf4Lf5Lf6Lf7Lf8Lf9Lg0Lg1Lg2Lg3Lg4Lg5Lg6Lg7Lg8Lg9Lh0Lh1Lh2Lh3Lh4Lh5Lh6Lh7Lh8Lh9Li0Li1Li2Li3Li4Li5Li6Li7Li8Li9Lj0Lj1Lj2Lj3Lj4Lj5Lj6Lj7Lj8Lj9Lk0Lk1Lk2Lk3Lk4Lk5Lk6Lk7Lk8Lk9Ll0Ll1Ll2Ll3Ll4Ll5Ll6Ll7Ll8Ll9Lm0Lm1Lm2Lm3Lm4Lm5Lm6Lm7Lm8Lm9Ln0Ln1Ln2Ln3Ln4Ln5Ln6Ln7Ln8Ln9Lo0Lo1Lo2Lo3Lo4Lo5Lo6Lo7Lo8Lo9Lp0Lp1Lp2Lp3Lp4Lp5Lp6Lp7Lp8Lp9Lq0Lq1Lq2Lq3Lq4Lq5Lq6Lq7Lq8Lq9Lr0Lr1Lr2Lr3Lr4Lr5Lr6Lr7Lr8Lr9Ls0Ls1Ls2Ls3Ls4Ls5Ls6Ls7Ls8Ls9Lt0Lt1Lt2Lt3Lt4Lt5Lt6Lt7Lt8Lt9Lu0Lu1Lu2Lu3Lu4Lu5Lu6Lu7Lu8Lu9Lv0Lv1Lv2Lv3Lv4Lv5Lv6Lv7Lv8Lv9Lw0Lw1Lw2Lw3Lw4Lw5Lw6Lw7Lw8Lw9Lx0Lx1Lx2Lx3Lx4Lx5Lx6Lx7Lx8Lx9Ly0Ly1Ly2Ly3Ly4Ly5Ly6Ly7Ly8Ly9Lz0Lz1Lz2Lz3Lz4Lz5Lz6Lz7Lz8Lz9Ma0Ma1Ma2Ma3Ma4Ma5Ma6Ma7Ma8Ma9Mb0Mb1Mb2Mb3Mb4Mb5Mb6Mb7Mb8Mb9Mc0Mc1Mc2Mc3Mc4Mc5Mc6Mc7Mc8Mc9Md0Md1Md2Md3Md4Md5Md6Md7Md8Md9Me0Me1Me2Me3Me4Me5Me6Me7Me8Me9Mf0Mf1Mf2Mf3Mf4Mf5Mf6Mf7Mf8Mf9Mg0Mg1Mg2Mg3Mg4Mg5Mg6Mg7Mg8Mg9Mh0Mh1Mh2Mh3Mh4Mh5Mh6Mh7Mh8Mh9Mi0Mi1Mi2Mi3Mi4Mi5Mi6Mi7Mi8Mi9Mj0Mj1Mj2Mj3Mj4Mj5Mj6Mj7Mj8Mj9Mk0Mk1Mk2Mk3Mk4Mk5Mk6Mk7Mk8Mk9Ml0Ml1Ml2Ml3Ml4Ml5Ml6Ml7Ml8Ml9Mm0Mm1Mm2Mm3Mm4Mm5Mm6Mm7Mm8Mm9Mn0Mn1Mn2Mn3Mn4Mn5Mn6Mn7Mn8Mn9Mo0Mo1Mo2Mo3Mo4Mo5Mo6Mo7Mo8Mo9Mp0Mp1Mp2Mp3Mp4Mp5Mp6Mp7Mp8Mp9Mq0Mq1Mq2Mq3Mq4Mq5Mq6Mq7Mq8Mq9Mr0Mr1Mr2Mr3Mr4Mr5Mr6Mr7Mr8Mr9Ms0Ms1Ms2Ms3Ms4Ms5Ms6Ms7Ms8Ms9Mt0Mt1Mt2Mt3Mt4Mt5Mt6Mt7Mt8Mt9Mu0Mu1Mu2Mu3Mu4Mu5Mu6Mu7Mu8Mu9Mv0Mv1Mv2Mv3Mv4Mv5Mv6Mv7Mv8Mv9Mw0Mw1Mw2Mw3Mw4Mw5Mw6Mw7Mw8Mw9Mx0Mx1Mx2Mx3Mx4Mx5Mx6Mx7Mx8Mx9My0My1My2My3My4My5My6My7My8My9Mz0Mz1Mz2Mz3Mz4Mz5Mz6Mz7Mz8Mz9Na0Na1Na2Na3Na4Na5Na6Na7Na8Na9Nb0Nb1Nb2Nb3Nb4Nb5Nb6Nb7Nb8Nb9Nc0Nc1Nc2Nc3Nc4Nc5Nc6Nc7Nc8Nc9Nd0Nd1Nd2Nd3Nd4Nd5Nd6Nd7Nd8Nd9Ne0Ne1Ne2Ne3Ne4Ne5Ne6Ne7Ne8Ne9Nf0Nf1Nf2Nf3Nf4Nf5Nf6Nf7Nf8Nf9Ng0Ng1Ng2Ng3Ng4Ng5Ng6Ng7Ng8Ng9Nh0Nh1Nh2Nh3Nh4Nh5Nh6Nh7Nh8Nh9Ni0Ni1Ni2Ni3Ni4Ni5Ni6Ni7Ni8Ni9Nj0Nj1Nj2Nj3Nj4Nj5Nj6Nj7Nj8Nj9Nk0Nk1Nk2Nk3Nk4Nk5Nk6Nk7Nk8Nk9Nl0Nl1Nl2Nl3Nl4Nl5Nl6Nl7Nl8Nl9Nm0Nm1Nm2Nm3Nm4Nm5Nm6Nm7Nm8Nm9Nn0Nn1Nn2Nn3Nn4Nn5Nn6Nn7Nn8Nn9No0No1No2No3No4No5No6No7No8No9Np0Np1Np2Np3Np4Np5Np6Np7Np8Np9Nq0Nq1Nq2Nq3Nq4Nq5Nq6Nq7Nq8Nq9Nr0Nr1Nr2Nr3Nr4Nr5Nr6Nr7Nr8Nr9Ns0Ns1Ns2Ns3Ns4Ns5Ns6Ns7Ns8Ns9Nt0Nt1Nt2Nt3Nt4Nt5Nt6Nt7Nt8Nt9Nu0Nu1Nu2Nu3Nu4Nu5Nu6Nu7Nu8Nu9Nv0Nv1Nv2Nv3Nv4Nv5Nv6Nv7Nv8Nv9Nw0Nw1Nw2Nw3Nw4Nw5Nw6Nw7Nw8Nw9Nx0Nx1Nx2Nx3Nx4Nx5Nx6Nx7Nx8Nx9Ny0Ny1Ny2Ny3Ny4Ny5Ny6Ny7Ny8Ny9Nz0Nz1Nz2Nz3Nz4Nz5Nz6Nz7Nz8Nz9Oa0Oa1Oa2Oa3Oa4Oa5Oa6Oa7Oa8Oa9Ob0Ob1Ob2Ob3Ob4Ob5Ob6Ob7Ob8Ob9Oc0Oc1Oc2Oc3Oc4Oc5Oc6Oc7Oc8Oc9Od0Od1Od2Od3Od4Od5Od6Od7Od8Od9Oe0Oe1Oe2Oe3Oe4Oe5Oe6Oe7Oe8Oe9Of0Of1Of2Of3Of4Of5Of6Of7Of8Of9Og0Og1Og2Og3Og4Og5Og6Og7Og8Og9Oh0Oh1Oh2Oh3Oh4Oh5Oh6Oh7Oh8Oh9Oi0Oi1Oi2Oi3Oi4Oi5Oi6Oi7Oi8Oi9Oj0Oj1Oj2Oj3Oj4Oj5Oj6Oj7Oj8Oj9Ok0Ok1Ok2Ok3Ok4Ok5Ok6Ok7Ok8Ok9Ol0Ol1Ol2Ol3Ol4Ol5Ol6Ol7Ol8Ol9Om0Om1Om2Om3Om4Om5Om6Om7Om8Om9On0On1On2On3On4On5On6On7On8On9Oo0Oo1Oo2Oo3Oo4Oo5Oo6Oo7Oo8Oo9Op0Op1Op2Op3Op4Op5Op6Op7Op8Op9Oq0Oq1Oq2Oq3Oq4Oq5Oq6Oq7Oq8Oq9Or0Or1Or2Or3Or4Or5Or6Or7Or8Or9Os0Os1Os2Os3Os4Os5Os6Os7Os8Os9Ot0Ot1Ot2Ot3Ot4Ot5Ot6Ot7Ot8Ot9Ou0Ou1Ou2Ou3Ou4Ou5Ou6Ou7Ou8Ou9Ov0Ov1Ov2Ov3Ov4Ov5Ov6Ov7Ov8Ov9Ow0Ow1Ow2Ow3Ow4Ow5Ow6Ow7Ow8Ow9Ox0Ox1Ox2Ox3Ox4Ox5Ox6Ox7Ox8Ox9Oy0Oy1Oy2Oy3Oy4Oy5Oy6Oy7Oy8Oy9Oz0Oz1Oz2Oz3Oz4Oz5Oz6Oz7Oz8Oz9Pa0Pa1Pa2Pa3Pa4Pa5Pa6Pa7Pa8Pa9Pb0Pb1Pb2Pb3Pb4Pb5Pb6Pb7Pb8Pb9Pc0Pc1Pc2Pc3Pc4Pc5Pc6Pc7Pc8Pc9Pd0Pd1Pd2Pd3Pd4Pd5Pd6Pd7Pd8Pd9Pe0Pe1Pe2Pe3Pe4Pe5Pe6Pe7Pe8Pe9Pf0Pf1Pf2Pf3Pf4Pf5Pf6Pf7Pf8Pf9Pg0Pg1Pg2Pg3Pg4Pg5Pg6Pg7Pg8Pg9Ph0Ph1Ph2Ph3Ph4Ph5Ph6Ph7Ph8Ph9Pi0Pi1Pi2Pi3Pi4Pi5Pi6Pi7Pi8Pi9Pj0Pj1Pj2Pj3Pj4Pj5Pj6Pj7Pj8Pj9Pk0Pk1Pk2Pk3Pk4Pk5Pk6Pk7Pk8Pk9Pl0Pl1Pl2Pl3Pl4Pl5Pl6Pl7Pl8Pl9Pm0Pm1Pm2Pm3Pm4Pm5Pm6Pm7Pm8Pm9Pn0Pn1Pn2Pn3Pn4Pn5Pn6Pn7Pn8Pn9Po0Po1Po2Po3Po4Po5Po6Po7Po8Po9Pp0Pp1Pp2Pp3Pp4Pp5Pp6Pp7Pp8Pp9Pq0Pq1Pq2Pq3Pq4Pq5Pq6Pq7Pq8Pq9Pr0Pr1Pr2Pr3Pr4Pr5Pr6Pr7Pr8Pr9Ps0Ps1Ps2Ps3Ps4Ps5Ps6Ps7Ps8Ps9Pt0Pt1Pt2Pt3Pt4Pt5Pt6Pt7Pt8Pt9Pu0Pu1Pu2Pu3Pu4Pu5Pu6Pu7Pu8Pu9Pv0Pv1Pv2Pv3Pv4Pv5Pv6Pv7Pv8Pv9Pw0Pw1Pw2Pw3Pw4Pw5Pw6Pw7Pw8Pw9Px0Px1Px2Px3Px4Px5Px6Px7Px8Px9Py0Py1Py2Py3Py4Py5Py6Py7Py8Py9Pz0Pz1Pz2Pz3Pz4Pz5Pz6Pz7Pz8Pz9Qa0Qa1Qa2Qa3Qa4Qa5Qa6Qa7Qa8Qa9Qb0Qb1Qb2Qb3Qb4Qb5Qb6Qb7Qb8Qb9Qc0Qc1Qc2Qc3Qc4Qc5Qc6Qc7Qc8Qc9Qd0Qd1Qd2Qd3Qd4Qd5Qd6Qd7Qd8Qd9Qe0Qe1Qe2Qe3Qe4Qe5Qe6Qe7Qe8Qe9Qf0Qf1Qf2Qf3Qf4Qf5Qf6Qf7Qf8Qf9Qg0Qg1Qg2Qg3Qg4Qg5Qg6Qg7Qg8Qg9Qh0Qh1Qh2Qh3Qh4Qh5Qh6Qh7Qh8Qh9Qi0Qi1Qi2Qi3Qi4Qi5Qi6Qi7Qi8Qi9Qj0Qj1Qj2Qj3Qj4Qj5Qj6Qj7Qj8Qj9Qk0Qk1Qk2Qk3Qk4Qk5Qk6Qk7Qk8Qk9Ql0Ql1Ql2Ql3Ql4Ql5Ql6Ql7Ql8Ql9Qm0Qm1Qm2Qm3Qm4Qm5Qm6Qm7Qm8Qm9Qn0Qn1Qn2Qn3Qn4Qn5Qn6Qn7Qn8Qn9Qo0Qo1Qo2Qo3Qo4Qo5Qo6Qo7Qo8Qo9Qp0Qp1Qp2Qp3Qp4Qp5Qp6Qp7Qp8Qp9Qq0Qq1Qq2Qq3Qq4Qq5Qq6Qq7Qq8Qq9Qr0Qr1Qr2Qr3Qr4Qr5Qr6Qr7Qr8Qr9Qs0Qs1Qs2Qs3Qs4Qs5Qs6Qs7Qs8Qs9Qt0Qt1Qt2Qt3Qt4Qt5Qt6Qt7Qt8Qt9Qu0Qu1Qu2Qu3Qu4Qu5Qu6Qu7Qu8Qu9Qv0Qv1Qv2Qv3Qv4Qv5Qv6Qv7Qv8Qv9Qw0Qw1Qw2Qw3Qw4Qw5Qw6Qw7Qw8Qw9Qx0Qx1Qx2Qx3Qx4Qx5Qx6Qx7Qx8Qx9Qy0Qy1Qy2Qy3Qy4Qy5Qy6Qy7Qy8Qy9Qz0Qz1Qz2Qz3Qz4Qz5Qz6Qz7Qz8Qz9Ra0Ra1Ra2Ra3Ra4Ra5Ra6Ra7Ra8Ra9Rb0Rb1Rb2Rb3Rb4Rb5Rb6Rb7Rb8Rb9Rc0Rc1Rc2Rc3Rc4Rc5Rc6Rc7Rc8Rc9Rd0Rd1Rd2Rd3Rd4Rd5Rd6Rd7Rd8Rd9Re0Re1Re2Re3Re4Re5Re6Re7Re8Re9Rf0Rf1Rf2Rf3Rf4Rf5Rf6Rf7Rf8Rf9Rg0Rg1Rg2Rg3Rg4Rg5Rg6Rg7Rg8Rg9Rh0Rh1Rh2Rh3Rh4Rh5Rh6Rh7Rh8Rh9Ri0Ri1Ri2Ri3Ri4Ri5Ri6Ri7Ri8Ri9Rj0Rj1Rj2Rj3Rj4Rj5Rj6Rj7Rj8Rj9Rk0Rk1Rk2Rk3Rk4Rk5Rk6Rk7Rk8Rk9Rl0Rl1Rl2Rl3Rl4Rl5Rl6Rl7Rl8Rl9Rm0Rm1Rm2Rm3Rm4Rm5Rm6Rm7Rm8Rm9Rn0Rn1Rn2Rn3Rn4Rn5Rn6Rn7Rn8Rn9Ro0Ro1Ro2Ro3Ro4Ro5Ro6Ro7Ro8Ro9Rp0Rp1Rp2Rp3Rp4Rp5Rp6Rp7Rp8Rp9Rq0Rq1Rq2Rq3Rq4Rq5Rq6Rq7Rq8Rq9Rr0Rr1Rr2Rr3Rr4Rr5Rr6Rr7Rr8Rr9Rs0Rs1Rs2Rs3Rs4Rs5Rs6Rs7Rs8Rs9Rt0Rt1Rt2Rt3Rt4Rt5Rt6Rt7Rt8Rt9Ru0Ru1Ru2Ru3Ru4Ru5Ru6Ru7Ru8Ru9Rv0Rv1Rv2Rv3Rv4Rv5Rv6Rv7Rv8Rv9Rw0Rw1Rw2Rw3Rw4Rw5Rw6Rw7Rw8Rw9Rx0Rx1Rx2Rx3Rx4Rx5Rx6Rx7Rx8Rx9Ry0Ry1Ry2Ry3Ry4Ry5Ry6Ry7Ry8Ry9Rz0Rz1Rz2Rz3Rz4Rz5Rz6Rz7Rz8Rz9Sa0Sa1Sa2Sa3Sa4Sa5Sa6Sa7Sa8Sa9Sb0Sb1Sb2Sb3Sb4Sb5Sb6Sb7Sb8Sb9Sc0Sc1Sc2Sc3Sc4Sc5Sc6Sc7Sc8Sc9Sd0Sd1Sd2Sd3Sd4Sd5Sd6Sd7Sd8Sd9Se0Se1Se2Se3Se4Se5Se6Se7Se8Se9Sf0Sf1Sf2Sf3Sf4Sf5Sf6Sf7Sf8Sf9Sg0Sg1Sg2Sg3Sg4Sg5Sg6Sg7Sg8Sg9Sh0Sh1Sh2Sh3Sh4Sh5Sh6Sh7Sh8Sh9Si0Si1Si2Si3Si4Si5Si6Si7Si8Si9Sj0Sj1Sj2Sj3Sj4Sj5Sj6Sj7Sj8Sj9Sk0Sk1Sk2Sk3Sk4Sk5Sk6Sk7Sk8Sk9Sl0Sl1Sl2Sl3Sl4Sl5Sl6Sl7Sl8Sl9Sm0Sm1Sm2Sm3Sm4Sm5Sm6Sm7Sm8Sm9Sn0Sn1Sn2Sn3Sn4Sn5Sn6Sn7Sn8Sn9So0So1So2So3So4So5So6So7So8So9Sp0Sp1Sp2Sp3Sp4Sp5Sp6Sp7Sp8Sp9Sq0Sq1Sq2Sq3Sq4Sq5Sq6Sq7Sq8Sq9Sr0Sr1Sr2Sr3Sr4Sr5Sr6Sr7Sr8Sr9Ss0Ss1Ss2Ss3Ss4Ss5Ss6Ss7Ss8Ss9St0St1St2St3St4St5St6St7St8St9Su0Su1Su2Su3Su4Su5Su6Su7Su8Su9Sv0Sv1Sv2Sv3Sv4Sv5Sv6Sv7Sv8Sv9Sw0Sw1Sw2Sw3Sw4Sw5Sw6Sw7Sw8Sw9Sx0Sx1Sx2Sx3Sx4Sx5Sx6Sx7Sx8Sx9Sy0Sy1Sy2Sy3Sy4Sy5Sy6Sy7Sy8Sy9Sz0Sz1Sz2Sz3Sz4Sz5Sz6Sz7Sz8Sz9Ta0Ta1Ta2Ta3Ta4Ta5Ta6Ta7Ta8Ta9Tb0Tb1Tb2Tb3Tb4Tb5Tb6Tb7Tb8Tb9Tc0Tc1Tc2Tc3Tc4Tc5Tc6Tc7Tc8Tc9Td0Td1Td2Td3Td4Td5Td6Td7Td8Td9Te0Te1Te2Te3Te4Te5Te6Te7Te8Te9Tf0Tf1Tf2Tf3Tf4Tf5Tf6Tf7Tf8Tf9Tg0Tg1Tg2Tg3Tg4Tg5Tg6Tg7Tg8Tg9Th0Th1Th2Th3Th4Th5Th6Th7Th8Th9Ti0Ti1Ti2Ti3Ti4Ti5Ti6Ti7Ti8Ti9Tj0Tj1Tj2Tj3Tj4Tj5Tj6Tj7Tj8Tj9Tk0Tk1Tk2Tk3Tk4Tk5Tk6Tk7Tk8Tk9Tl0Tl1Tl2Tl3Tl4Tl5Tl6Tl7Tl8Tl9Tm0Tm1Tm2Tm3Tm4Tm5Tm6Tm7Tm8Tm9Tn0Tn1Tn2Tn3Tn4Tn5Tn6Tn7Tn8Tn9To0To1To2To3To4To5To6To7To8To9Tp0Tp1Tp2Tp3Tp4Tp5Tp6Tp7Tp8Tp9Tq0Tq1Tq2Tq3Tq4Tq5Tq6Tq7Tq8Tq9Tr0Tr1Tr2Tr3Tr4Tr5Tr6Tr7Tr8Tr9Ts0Ts1Ts2Ts3Ts4Ts5Ts6Ts7Ts8Ts9Tt0Tt1Tt2Tt3Tt4Tt5Tt6Tt7Tt8Tt9Tu0Tu1Tu2Tu3Tu4Tu5Tu6Tu7Tu8Tu9Tv0Tv1Tv2Tv3Tv4Tv5Tv6Tv7Tv8Tv9Tw0Tw1Tw2Tw3Tw4Tw5Tw6Tw7Tw8Tw9Tx0Tx1Tx2Tx3Tx4Tx5Tx6Tx7Tx8Tx9Ty0Ty1Ty2Ty3Ty4Ty5Ty6Ty7Ty8Ty9Tz0Tz1Tz2Tz3Tz4Tz5Tz6Tz7Tz8Tz9Ua0Ua1Ua2Ua3Ua4Ua5Ua6Ua7Ua8Ua9Ub0Ub1Ub2Ub3Ub4Ub5Ub6Ub7Ub8Ub9Uc0Uc1Uc2Uc3Uc4Uc5Uc6Uc7Uc8Uc9Ud0Ud1Ud2Ud3Ud4Ud5Ud6Ud7Ud8Ud9Ue0Ue1Ue2Ue3Ue4Ue5Ue6Ue7Ue8Ue9Uf0Uf1Uf2Uf3Uf4Uf5Uf6Uf7Uf8Uf9Ug0Ug1Ug2Ug3Ug4Ug5Ug6Ug7Ug8Ug9Uh0Uh1Uh2Uh3Uh4Uh5Uh6Uh7Uh8Uh9Ui0Ui1Ui2Ui3Ui4Ui5Ui6Ui7Ui8Ui9Uj0Uj1Uj2Uj3Uj4Uj5Uj6Uj7Uj8Uj9Uk0Uk1Uk2Uk3Uk4Uk5Uk6Uk7Uk8Uk9Ul0Ul1Ul2Ul3Ul4Ul5Ul6Ul7Ul8Ul9Um0Um1Um2Um3Um4Um5Um6Um7Um8Um9Un0Un1Un2Un3Un4Un5Un6Un7Un8Un9Uo0Uo1Uo2Uo3Uo4Uo5Uo6Uo7Uo8Uo9Up0Up1Up2Up3Up4Up5Up6Up7Up8Up9Uq0Uq1Uq2Uq3Uq4Uq5Uq6Uq7Uq8Uq9Ur0Ur1Ur2Ur3Ur4Ur5Ur6Ur7Ur8Ur9Us0Us1Us2Us3Us4Us5Us6Us7Us8Us9Ut0Ut1Ut2Ut3Ut4Ut5Ut6Ut7Ut8Ut9Uu0Uu1Uu2Uu3Uu4Uu5Uu6Uu7Uu8Uu9Uv0Uv1Uv2Uv3Uv4Uv5Uv6Uv7Uv8Uv9Uw0Uw1Uw2Uw3Uw4Uw5Uw6Uw7Uw8Uw9Ux0Ux1Ux2Ux3Ux4Ux5Ux6Ux7Ux8Ux9Uy0Uy1Uy2Uy3Uy4Uy5Uy6Uy7Uy8Uy9Uz0Uz1Uz2Uz3Uz4Uz5Uz6Uz7Uz8Uz9Va0Va1Va2Va3Va4Va5Va6Va7Va8Va9Vb0Vb1Vb2Vb3Vb4Vb5Vb6Vb7Vb8Vb9Vc0Vc1Vc2Vc3Vc4Vc5Vc6Vc7Vc8Vc9Vd0Vd1Vd2Vd3Vd4Vd5Vd6Vd7Vd8Vd9Ve0Ve1Ve2Ve3Ve4Ve5Ve6Ve7Ve8Ve9Vf0Vf1Vf2Vf3Vf4Vf5Vf6Vf7Vf8Vf9Vg0Vg1Vg2Vg3Vg4Vg5Vg6Vg7Vg8Vg9Vh0Vh1Vh2Vh3Vh4Vh5Vh6Vh7Vh8Vh9Vi0Vi1Vi2Vi3Vi4Vi5Vi6Vi7Vi8Vi9Vj0Vj1Vj2Vj3Vj4Vj5Vj6Vj7Vj8Vj9Vk0Vk1Vk2Vk3Vk4Vk5Vk6Vk7Vk8Vk9Vl0Vl1Vl2Vl3Vl4Vl5Vl6Vl7Vl8Vl9Vm0Vm1Vm2Vm3Vm4Vm5Vm6Vm7Vm8Vm9Vn0Vn1Vn2Vn3Vn4Vn5Vn6Vn7Vn8Vn9Vo0Vo1Vo2Vo3Vo4Vo5Vo6Vo7Vo8Vo9Vp0Vp1Vp2Vp3Vp4Vp5Vp6Vp7Vp8Vp9Vq0Vq1Vq2Vq3Vq4Vq5Vq6Vq7Vq8Vq9Vr0Vr1Vr2Vr3Vr4Vr5Vr6Vr7Vr8Vr9Vs0Vs1Vs2Vs3Vs4Vs5Vs6Vs7Vs8Vs9Vt0Vt1Vt2Vt3Vt4Vt5Vt6Vt7Vt8Vt9Vu0Vu1Vu2Vu3Vu4Vu5Vu6Vu7Vu8Vu9Vv0Vv1Vv2Vv3Vv4Vv5Vv6Vv7Vv8Vv9Vw0Vw1Vw2Vw3Vw4Vw5Vw6Vw7Vw8Vw9Vx0Vx1Vx2Vx3Vx4Vx5Vx6Vx7Vx8Vx9Vy0Vy1Vy2Vy3Vy4Vy5Vy6Vy7Vy8Vy9Vz0Vz1Vz2Vz3Vz4Vz5Vz6Vz7Vz8Vz9Wa0Wa1Wa2Wa3Wa4Wa5Wa6Wa7Wa8Wa9Wb0Wb1Wb2Wb3Wb4Wb5Wb6Wb7Wb8Wb9Wc0Wc1Wc2Wc3Wc4Wc5Wc6Wc7Wc8Wc9Wd0Wd1Wd2Wd3Wd4Wd5Wd6Wd7Wd8Wd9We0We1We2We3We4We5We6We7We8We9Wf0Wf1Wf2Wf3Wf4Wf5Wf6Wf7Wf8Wf9Wg0Wg1Wg2Wg3Wg4Wg5Wg6Wg7Wg8Wg9Wh0Wh1Wh2Wh3Wh4Wh5Wh6Wh7Wh8Wh9Wi0Wi1Wi2Wi3Wi4Wi5Wi6Wi7Wi8Wi9Wj0Wj1Wj2Wj3Wj4Wj5Wj6Wj7Wj8Wj9Wk0Wk1Wk2Wk3Wk4Wk5Wk6Wk7Wk8Wk9Wl0Wl1Wl2Wl3Wl4Wl5Wl6Wl7Wl8Wl9Wm0Wm1Wm2Wm3Wm4Wm5Wm6Wm7Wm8Wm9Wn0Wn1Wn2Wn3Wn4Wn5Wn6Wn7Wn8Wn9Wo0Wo1Wo2Wo3Wo4Wo5Wo6Wo7Wo8Wo9Wp0Wp1Wp2Wp3Wp4Wp5Wp6Wp7Wp8Wp9Wq0Wq1Wq2Wq3Wq4Wq5Wq6Wq7Wq8Wq9Wr0Wr1Wr2Wr3Wr4Wr5Wr6Wr7Wr8Wr9Ws0Ws1Ws2Ws3Ws4Ws5Ws6Ws7Ws8Ws9Wt0Wt1Wt2Wt3Wt4Wt5Wt6Wt7Wt8Wt9Wu0Wu1Wu2Wu3Wu4Wu5Wu6Wu7Wu8Wu9Wv0Wv1Wv2Wv3Wv4Wv5Wv6Wv7Wv8Wv9Ww0Ww1Ww2Ww3Ww4Ww5Ww6Ww7Ww8Ww9Wx0Wx1Wx2Wx3Wx4Wx5Wx6Wx7Wx8Wx9Wy0Wy1Wy2Wy3Wy4Wy5Wy6Wy7Wy8Wy9Wz0Wz1Wz2Wz3Wz4Wz5Wz6Wz7Wz8Wz9Xa0Xa1Xa2Xa3Xa4Xa5Xa6Xa7Xa8Xa9Xb0Xb1Xb2Xb3Xb4Xb5Xb6Xb7Xb8Xb9Xc0Xc1Xc2Xc3Xc4Xc5Xc6Xc7Xc8Xc9Xd0Xd1Xd2Xd3Xd4Xd5Xd6Xd7Xd8Xd9Xe0Xe1Xe2Xe3Xe4Xe5Xe6Xe7Xe8Xe9Xf0Xf1Xf2Xf3Xf4Xf5Xf6Xf7Xf8Xf9Xg0Xg1Xg2Xg3Xg4Xg5Xg6Xg7Xg8Xg9Xh0Xh1Xh2Xh3Xh4Xh5Xh6Xh7Xh8Xh9Xi0Xi1Xi2Xi3Xi4Xi5Xi6Xi7Xi8Xi9Xj0Xj1Xj2Xj3Xj4Xj5Xj6Xj7Xj8Xj9Xk0Xk1Xk2Xk3Xk4Xk5Xk6Xk7Xk8Xk9Xl0Xl1Xl2Xl3Xl4Xl5Xl6Xl7Xl8Xl9Xm0Xm1Xm2Xm3Xm4Xm5Xm6Xm7Xm8Xm9Xn0Xn1Xn2Xn3Xn4Xn5Xn6Xn7Xn8Xn9Xo0Xo1Xo2Xo3Xo4Xo5Xo6Xo7Xo8Xo9Xp0Xp1Xp2Xp3Xp4Xp5Xp6Xp7Xp8Xp9Xq0Xq1Xq2Xq3Xq4Xq5Xq6Xq7Xq8Xq9Xr0Xr1Xr2Xr3Xr4Xr5Xr6Xr7Xr8Xr9Xs0Xs1Xs2Xs3Xs4Xs5Xs6Xs7Xs8Xs9Xt0Xt1Xt2Xt3Xt4Xt5Xt6Xt7Xt8Xt9Xu0Xu1Xu2Xu3Xu4Xu5Xu6Xu7Xu8Xu9Xv0Xv1Xv2Xv3Xv4Xv5Xv6Xv7Xv8Xv9Xw0Xw1Xw2Xw3Xw4Xw5Xw6Xw7Xw8Xw9Xx0Xx1Xx2Xx3Xx4Xx5Xx6Xx7Xx8Xx9Xy0Xy1Xy2Xy3Xy4Xy5Xy6Xy7Xy8Xy9Xz0Xz1Xz2Xz3Xz4Xz5Xz6Xz7Xz8Xz9Ya0Ya1Ya2Ya3Ya4Ya5Ya6Ya7Ya8Ya9Yb0Yb1Yb2Yb3Yb4Yb5Yb6Yb7Yb8Yb9Yc0Yc1Yc2Yc3Yc4Yc5Yc6Yc7Yc8Yc9Yd0Yd1Yd2Yd3Yd4Yd5Yd6Yd7Yd8Yd9Ye0Ye1Ye2Ye3Ye4Ye5Ye6Ye7Ye8Ye9Yf0Yf1Yf2Yf3Yf4Yf5Yf6Yf7Yf8Yf9Yg0Yg1Yg2Yg3Yg4Yg5Yg6Yg7Yg8Yg9Yh0Yh1Yh2Yh3Yh4Yh5Yh6Yh7Yh8Yh9Yi0Yi1Yi2Yi3Yi4Yi5Yi6Yi7Yi8Yi9Yj0Yj1Yj2Yj3Yj4Yj5Yj6Yj7Yj8Yj9Yk0Yk1Yk2Yk3Yk4Yk5Yk6Yk7Yk8Yk9Yl0Yl1Yl2Yl3Yl4Yl5Yl6Yl7Yl8Yl9Ym0Ym1Ym2Ym3Ym4Ym5Ym6Ym7Ym8Ym9Yn0Yn1Yn2Yn3Yn4Yn5Yn6Yn7Yn8Yn9Yo0Yo1Yo2Yo3Yo4Yo5Yo6Yo7Yo8Yo9Yp0Yp1Yp2Yp3Yp4Yp5Yp6Yp7Yp8Yp9Yq0Yq1Yq2Yq3Yq4Yq5Yq6Yq7Yq8Yq9Yr0Yr1Yr2Yr3Yr4Yr5Yr6Yr7Yr8Yr9Ys0Ys1Ys2Ys3Ys4Ys5Ys6Ys7Ys8Ys9Yt0Yt1Yt2Yt3Yt4Yt5Yt6Yt7Yt8Yt9Yu0Yu1Yu2Yu3Yu4Yu5Yu6Yu7Yu8Yu9Yv0Yv1Yv2Yv3Yv4Yv5Yv6Yv7Yv8Yv9Yw0Yw1Yw2Yw3Yw4Yw5Yw6Yw7Yw8Yw9Yx0Yx1Yx2Yx3Yx4Yx5Yx6Yx7Yx8Yx9Yy0Yy1Yy2Yy3Yy4Yy5Yy6Yy7Yy8Yy9Yz0Yz1Yz2Yz3Yz4Yz5Yz6Yz7Yz8Yz9Za0Za1Za2Za3Za4Za5Za6Za7Za8Za9Zb0Zb1Zb2Zb3Zb4Zb5Zb6Zb7Zb8Zb9Zc0Zc1Zc2Zc3Zc4Zc5Zc6Zc7Zc8Zc9Zd0Zd1Zd2Zd3Zd4Zd5Zd6Zd7Zd8Zd9Ze0Ze1Ze2Ze3Ze4Ze5Ze6Ze7Ze8Ze9Zf0Zf1Zf2Zf3Zf4Zf5Zf6Zf7Zf8Zf9Zg0Zg1Zg2Zg3Zg4Zg5Zg6Zg7Zg8Zg9Zh0Zh1Zh2Zh3Zh4Zh5Zh6Zh7Zh8Zh9Zi0Zi1Zi2Zi3Zi4Zi5Zi6Zi7Zi8Zi9Zj0Zj1Zj2Zj3Zj4Zj5Zj6Zj7Zj8Zj9Zk0Zk1Zk2Zk3Zk4Zk5Zk6Zk7Zk8Zk9Zl0Zl1Zl2Zl3Zl4Zl5Zl6Zl7Zl8Zl9Zm0Zm1Zm2Zm3Zm4Zm5Zm6Zm7Zm8Zm9Zn0Zn1Zn2Zn3Zn4Zn5Zn6Zn7Zn8Zn9Zo0Zo1Zo2Zo3Zo4Zo5Zo6Zo7Zo8Zo9Zp0Zp1Zp2Zp3Zp4Zp5Zp6Zp7Zp8Zp9Zq0Zq1Zq2Zq3Zq4Zq5Zq6Zq7Zq8Zq9Zr0Zr1Zr2Zr3Zr4Zr5Zr6Zr7Zr8Zr9Zs0Zs1Zs2Zs3Zs4Zs5Zs6Zs7Zs8Zs9Zt0Zt1Zt2Zt3Zt4Zt5Zt6Zt7Zt8Zt9Zu0Zu1Zu2Zu3Zu4Zu5Zu6Zu7Zu8Zu9Zv0Zv1Zv2Zv3Zv4Zv5Zv6Zv7Zv8Zv9Zw0Zw1Zw2Zw3Zw4Zw5Zw6Zw7Zw8Zw9Zx0Zx1Zx2Zx3Zx4Zx5Zx6Zx7Zx8Zx9Zy0Zy1Zy2Zy3Zy4Zy5Zy6Zy7Zy8Zy9Zz0Zz1Zz2Zz3Zz4Zz5Zz6Zz7Zz8Zz9"
def get_pattern(size):
return pattern.find(size)
ret = get_pattern(str(sys.argv[1]))
if int(ret) < 0:
print(f"[-] pattern not found: {sys.argv[1]}")
sys.exit(0)
print("# [+] pattern offset:", str(ret))
# print(f'pattern = b"{ret}"')
| 577.611111 | 20,292 | 0.989516 | 80 | 20,794 | 257.175 | 0.625 | 0.001021 | 0.000778 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.32784 | 0.004857 | 20,794 | 35 | 20,293 | 594.114286 | 0.666409 | 0.007983 | 0 | 0 | 0 | 0 | 0.987969 | 0.983797 | 0 | 1 | 0 | 0 | 0 | 1 | 0.083333 | false | 0 | 0.083333 | 0.083333 | 0.25 | 0.25 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d25c78efa2e4de40f980d6b79870f05b3fda33eb | 12,563 | py | Python | beanie/api/shipping_centre_api.py | altoyield/python-beanieclient | 448b8dd328054eaf32dd7d0bdff700e603b5c27d | [
"Apache-2.0"
] | null | null | null | beanie/api/shipping_centre_api.py | altoyield/python-beanieclient | 448b8dd328054eaf32dd7d0bdff700e603b5c27d | [
"Apache-2.0"
] | null | null | null | beanie/api/shipping_centre_api.py | altoyield/python-beanieclient | 448b8dd328054eaf32dd7d0bdff700e603b5c27d | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Beanie ERP API
An API specification for interacting with the Beanie ERP system # noqa: E501
OpenAPI spec version: 0.8
Contact: dev@bean.ie
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from beanie.api_client import ApiClient
class ShippingCentreApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_shipping_centre(self, shipping_centres, **kwargs): # noqa: E501
"""add_shipping_centre # noqa: E501
Creates a new shipping centre in the system # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.add_shipping_centre(shipping_centres, async=True)
>>> result = thread.get()
:param async bool
:param ShippingCentreInput shipping_centres: Shipping centre to add to the system (required)
:return: ShippingCentre
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.add_shipping_centre_with_http_info(shipping_centres, **kwargs) # noqa: E501
else:
(data) = self.add_shipping_centre_with_http_info(shipping_centres, **kwargs) # noqa: E501
return data
def add_shipping_centre_with_http_info(self, shipping_centres, **kwargs): # noqa: E501
"""add_shipping_centre # noqa: E501
Creates a new shipping centre in the system # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.add_shipping_centre_with_http_info(shipping_centres, async=True)
>>> result = thread.get()
:param async bool
:param ShippingCentreInput shipping_centres: Shipping centre to add to the system (required)
:return: ShippingCentre
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['shipping_centres'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_shipping_centre" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'shipping_centres' is set
if ('shipping_centres' not in params or
params['shipping_centres'] is None):
raise ValueError("Missing the required parameter `shipping_centres` when calling `add_shipping_centre`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'shipping_centres' in params:
body_params = params['shipping_centres']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/shipping_centres', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ShippingCentre', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def find_shipping_centre_by_id(self, id, **kwargs): # noqa: E501
"""Find Shipping centre by ID # noqa: E501
Returns a single shipping centre if the user has access # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.find_shipping_centre_by_id(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: ID of shipping centre to fetch (required)
:return: ShippingCentre
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.find_shipping_centre_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.find_shipping_centre_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def find_shipping_centre_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Find Shipping centre by ID # noqa: E501
Returns a single shipping centre if the user has access # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.find_shipping_centre_by_id_with_http_info(id, async=True)
>>> result = thread.get()
:param async bool
:param int id: ID of shipping centre to fetch (required)
:return: ShippingCentre
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_shipping_centre_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `find_shipping_centre_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/shipping_centres/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ShippingCentre', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def find_shipping_centres(self, **kwargs): # noqa: E501
"""All shipping centre # noqa: E501
Returns all shipping centre from the system that the user has access to # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.find_shipping_centres(async=True)
>>> result = thread.get()
:param async bool
:param list[str] tags: tags to filter by
:param int limit: Maximum number of results to return
:return: list[ShippingCentre]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.find_shipping_centres_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.find_shipping_centres_with_http_info(**kwargs) # noqa: E501
return data
def find_shipping_centres_with_http_info(self, **kwargs): # noqa: E501
"""All shipping centre # noqa: E501
Returns all shipping centre from the system that the user has access to # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.find_shipping_centres_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param list[str] tags: tags to filter by
:param int limit: Maximum number of results to return
:return: list[ShippingCentre]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['tags', 'limit'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_shipping_centres" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'tags' in params:
query_params.append(('tags', params['tags'])) # noqa: E501
collection_formats['tags'] = 'csv' # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/shipping_centres', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ShippingCentre]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 37.726727 | 130 | 0.613309 | 1,454 | 12,563 | 5.062586 | 0.117607 | 0.053254 | 0.022823 | 0.029344 | 0.89064 | 0.870941 | 0.844994 | 0.826518 | 0.799891 | 0.798261 | 0 | 0.017588 | 0.298496 | 12,563 | 332 | 131 | 37.840361 | 0.817656 | 0.061689 | 0 | 0.71345 | 1 | 0 | 0.16788 | 0.041838 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.023392 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
d269da4168b21eee3fd033608a1ecf9554eb3b2c | 100 | py | Python | backend/microservices/audio-generator/core/usecases/audio_order/__init__.py | MuhamedAbdalla/Automatic-Audio-Book-Based-On-Emotion-Detection | 72130ad037b900461af5be6d80b27ab29c81de5e | [
"MIT"
] | 3 | 2021-04-26T00:17:14.000Z | 2021-07-04T15:30:09.000Z | backend/microservices/audio-generator/core/usecases/audio_order/__init__.py | MuhamedAbdalla/Automatic-Audio-Book-Based-On-Emotion-Detection | 72130ad037b900461af5be6d80b27ab29c81de5e | [
"MIT"
] | null | null | null | backend/microservices/audio-generator/core/usecases/audio_order/__init__.py | MuhamedAbdalla/Automatic-Audio-Book-Based-On-Emotion-Detection | 72130ad037b900461af5be6d80b27ab29c81de5e | [
"MIT"
] | null | null | null | from .add_audio_order import *
from .get_audio_orders import *
from .update_audio_order import *
| 25 | 34 | 0.79 | 15 | 100 | 4.866667 | 0.533333 | 0.273973 | 0.438356 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.15 | 100 | 3 | 35 | 33.333333 | 0.858824 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
964b4d4c59338e6174ef68d4e80b1b70dd461d8c | 20,975 | py | Python | tests/test_arithmetic_opcodes.py | bezzy199991/emupy6502 | d4efe413c28e43e313f52a12e646eee8b52c3205 | [
"MIT"
] | null | null | null | tests/test_arithmetic_opcodes.py | bezzy199991/emupy6502 | d4efe413c28e43e313f52a12e646eee8b52c3205 | [
"MIT"
] | 2 | 2019-10-31T11:56:28.000Z | 2019-10-31T15:49:09.000Z | tests/test_arithmetic_opcodes.py | bezzy199991/emupy6502 | d4efe413c28e43e313f52a12e646eee8b52c3205 | [
"MIT"
] | 1 | 2019-10-31T10:21:34.000Z | 2019-10-31T10:21:34.000Z | import unittest
from unittest.mock import patch, Mock
from emupy6502.memory_controller import MemoryController
from emupy6502.registers import Registers
from emupy6502.opcodes import OpCode
def execute_adc_carry_clear( actual_opcode, expected_clocks):
opcode = OpCode()
registers = Registers()
registers.accumulator = 5
registers.zero_flag = True
registers.negative_flag = True
with patch.object(MemoryController, 'read') as mock_memory_controller:
mock_memory_controller.read.return_value = 0x22
registers.pc += 1 #need to fake the cpu reading the opcode
count = opcode.execute(actual_opcode, registers, mock_memory_controller)
assert count == expected_clocks
assert registers.accumulator == 0x27
assert registers.zero_flag == False
assert registers.negative_flag == False
assert registers.carry_flag == False
def execute_adc_carry_set( actual_opcode, expected_clocks):
opcode = OpCode()
registers = Registers()
registers.accumulator = 5
registers.zero_flag = True
registers.negative_flag = True
registers.carry_flag = True
with patch.object(MemoryController, 'read') as mock_memory_controller:
mock_memory_controller.read.return_value = 0x22
registers.pc += 1 #need to fake the cpu reading the opcode
count = opcode.execute(actual_opcode, registers, mock_memory_controller)
assert count == expected_clocks
assert registers.accumulator == 0x28
assert registers.zero_flag == False
assert registers.negative_flag == False
assert registers.carry_flag == False
def execute_adc_should_set_carry( actual_opcode, expected_clocks):
opcode = OpCode()
registers = Registers()
registers.accumulator = 1
registers.zero_flag = False
registers.negative_flag = True
with patch.object(MemoryController, 'read') as mock_memory_controller:
mock_memory_controller.read.return_value = 0xff
registers.pc += 1 #need to fake the cpu reading the opcode
count = opcode.execute(actual_opcode, registers, mock_memory_controller)
assert count == expected_clocks
assert registers.accumulator == 0
assert registers.zero_flag
assert registers.negative_flag == False
assert registers.carry_flag
def test_execute_adc_immediate_carry_clear():
execute_adc_carry_clear(0x69, 2)
def test_execute_adc_immediate_carry_set():
execute_adc_carry_set(0x69, 2)
def test_execute_adc_immediate_should_set_carry():
execute_adc_should_set_carry(0x69, 2)
def test_execute_adc_zeropage_carry_clear():
execute_adc_carry_clear(0x65, 3)
def test_execute_adc_zeropage_carry_set():
execute_adc_carry_set(0x65, 3)
def test_execute_adc_zeropage_should_set_carry():
execute_adc_should_set_carry(0x65, 3)
# No need to test all combinations of carry, uses same code as
# other immediate addressing modes
def test_execute_adc_immediate_zp_x():
opcode = OpCode()
registers = Registers()
registers.accumulator = 5
registers.x_index = 3
registers.zero_flag = True
registers.negative_flag = True
with patch.object(MemoryController, 'read') as mock_memory_controller:
# we're mocking 0xb5 0x21 and value at [0x0024] = 1
mock_memory_controller.read.side_effect = [0x21, 1]
registers.pc += 1 #need to fake the cpu reading the opcode
count = opcode.execute(0x75, registers, mock_memory_controller)
assert count == 4
assert registers.accumulator == 6
assert registers.zero_flag == False
assert registers.negative_flag == False
assert registers.carry_flag == False
# No need to test all combinations of carry, uses same code as
# other immediate addressing modes
def test_execute_adc_absolute():
opcode = OpCode()
registers = Registers()
registers.accumulator = 5
registers.zero_flag = True
registers.negative_flag = True
with patch.object(MemoryController, 'read') as mock_memory_controller:
# we're mocking 0xb5 0x21 0x22 and value at [0x2221] = 1
mock_memory_controller.read.side_effect = [0x21, 0x22, 1]
registers.pc += 1 #need to fake the cpu reading the opcode
count = opcode.execute(0x6D, registers, mock_memory_controller)
assert count == 4
assert registers.accumulator == 6
assert registers.zero_flag == False
assert registers.negative_flag == False
assert registers.carry_flag == False
def test_execute_adc_absolute_x():
opcode = OpCode()
registers = Registers()
registers.accumulator = 5
registers.x_index = 3
registers.zero_flag = True
registers.negative_flag = True
with patch.object(MemoryController, 'read') as mock_memory_controller:
# we're mocking 0xBD 0x2100 and value at [0x2103] = 1
mock_memory_controller.read.side_effect = [0, 0x21, 1]
registers.pc += 1 #need to fake the cpu reading the opcode
count = opcode.execute(0x7D, registers, mock_memory_controller)
assert count == 4
assert registers.accumulator == 6
assert registers.zero_flag == False
assert registers.negative_flag == False
assert registers.carry_flag == False
def test_execute_adc_absolute_x_page_boundary():
opcode = OpCode()
registers = Registers()
registers.accumulator = 5
registers.x_index = 3
registers.zero_flag = True
registers.negative_flag = True
with patch.object(MemoryController, 'read') as mock_memory_controller:
# we're mocking 0xBD 0x2100 and value at [0x2202] = 1
mock_memory_controller.read.side_effect = [0xff, 0x21, 1]
registers.pc += 1 #need to fake the cpu reading the opcode
count = opcode.execute(0x7D, registers, mock_memory_controller)
assert count == 5
assert registers.accumulator == 6
assert registers.zero_flag == False
assert registers.negative_flag == False
assert registers.carry_flag == False
def test_execute_adc_absolute_y():
opcode = OpCode()
registers = Registers()
registers.accumulator = 5
registers.y_index = 3
registers.zero_flag = True
registers.negative_flag = True
with patch.object(MemoryController, 'read') as mock_memory_controller:
# we're mocking 0xBD 0x2100 and value at [0x2103] = 1
mock_memory_controller.read.side_effect = [0, 0x21, 1]
registers.pc += 1 #need to fake the cpu reading the opcode
count = opcode.execute(0x79, registers, mock_memory_controller)
assert count == 4
assert registers.accumulator == 6
assert registers.zero_flag == False
assert registers.negative_flag == False
assert registers.carry_flag == False
def test_execute_adc_absolute_y_page_boundary():
opcode = OpCode()
registers = Registers()
registers.accumulator = 5
registers.y_index = 3
registers.zero_flag = True
registers.negative_flag = True
with patch.object(MemoryController, 'read') as mock_memory_controller:
# we're mocking 0xBD 0x2100 and value at [0x2202] = 1
mock_memory_controller.read.side_effect = [0xff, 0x21, 1]
registers.pc += 1 #need to fake the cpu reading the opcode
count = opcode.execute(0x79, registers, mock_memory_controller)
assert count == 5
assert registers.accumulator == 6
assert registers.zero_flag == False
assert registers.negative_flag == False
assert registers.carry_flag == False
def test_execute_adc_indexed_indirect_x():
opcode = OpCode()
registers = Registers()
registers.accumulator = 5
registers.x_index = 3
registers.zero_flag = True
registers.negative_flag = True
with patch.object(MemoryController, 'read') as mock_memory_controller:
# we're mocking 0x61 0x03 and value at [0x06] = 0x1234, [0x1234] = 3
mock_memory_controller.read.side_effect = [3, 0x34, 0x12, 3]
registers.pc += 1 #need to fake the cpu reading the opcode
count = opcode.execute(0x61, registers, mock_memory_controller)
assert count == 6
assert mock_memory_controller.read.call_count == 4
assert registers.accumulator == 8
assert registers.zero_flag == False
assert registers.negative_flag == False
assert registers.carry_flag == False
def test_execute_adc_indirect_indexed_y():
opcode = OpCode()
registers = Registers()
registers.accumulator = 5
registers.y_index = 3
registers.zero_flag = True
registers.negative_flag = True
with patch.object(MemoryController, 'read') as mock_memory_controller:
# we're mocking 0x71 0x2a memory at 0x2a = [0x28, 0x40], [0x402B] = 3
mock_memory_controller.read.side_effect = [0x2a, 0x28, 0x40, 3]
registers.pc += 1 #need to fake the cpu reading the opcode
count = opcode.execute(0x71, registers, mock_memory_controller)
assert count == 5
assert mock_memory_controller.read.call_count == 4
assert registers.accumulator == 8
assert registers.zero_flag == False
assert registers.negative_flag == False
assert registers.carry_flag == False
def test_execute_adc_indirect_indexed_y_page_boundary():
opcode = OpCode()
registers = Registers()
registers.accumulator = 5
registers.y_index = 3
registers.zero_flag = True
registers.negative_flag = True
with patch.object(MemoryController, 'read') as mock_memory_controller:
# we're mocking 0x71 0x2a memory at 0x2a = [0x28, 0x40], [0x4101] = 3
mock_memory_controller.read.side_effect = [0x2a, 0xfe, 0x40, 3]
registers.pc += 1 #need to fake the cpu reading the opcode
count = opcode.execute(0x71, registers, mock_memory_controller)
assert count == 6
assert mock_memory_controller.read.call_count == 4
assert registers.accumulator == 8
assert registers.zero_flag == False
assert registers.negative_flag == False
assert registers.carry_flag == False
def execute_sbc_borrow_in_borrow_out_no_overflow_positive_result( actual_opcode, expected_clocks, mock_memory_controller, **kwargs):
opcode = OpCode()
registers = Registers()
setattr(registers,'accumulator',0x50)
if kwargs:
for arg in kwargs:
setattr(registers, arg, kwargs[arg])
# Mocking 0x150 - 0xf0 (borrow 'in')
registers.pc += 1 #need to fake the cpu reading the opcode
count = opcode.execute(actual_opcode, registers, mock_memory_controller)
assert count == expected_clocks
assert registers.accumulator == 0x5f
assert registers.zero_flag == False
assert registers.negative_flag == False
assert registers.carry_flag == False
assert registers.overflow_flag == False
def test_execute_sbc_immediate_borrow_in_borrow_out_no_overflow_positive_result():
mock_memory_controller = Mock()
mock_memory_controller.read.return_value = 0xf0
execute_sbc_borrow_in_borrow_out_no_overflow_positive_result(0xE9, 2, mock_memory_controller)
assert mock_memory_controller.read.call_count == 1
def test_execute_sbc_zeropage_borrow_in_borrow_out_no_overflow_positive_result():
mock_memory_controller = Mock()
# we're mocking 0xE5 0x20 and [0x20] = 0xf0
mock_memory_controller.read.side_effect = [0x20, 0xf0]
execute_sbc_borrow_in_borrow_out_no_overflow_positive_result(0xE5, 3, mock_memory_controller)
assert mock_memory_controller.read.call_count == 2
def test_execute_sbc_zeropageX_borrow_in_borrow_out_no_overflow_positive_result():
mock_memory_controller = Mock()
# we're mocking 0xF5 0x20 and [0x23] = 0xf0
mock_memory_controller.read.side_effect = [0x20, 0xf0]
execute_sbc_borrow_in_borrow_out_no_overflow_positive_result(0xF5, 4, mock_memory_controller, x_index = 3)
assert mock_memory_controller.read.call_count == 2
def test_execute_sbc_absolute_borrow_in_borrow_out_no_overflow_positive_result():
mock_memory_controller = Mock()
# we're mocking 0xED 0x0 0x20 and [0x2000] = 0xf0
mock_memory_controller.read.side_effect = [0, 0x20, 0xf0]
execute_sbc_borrow_in_borrow_out_no_overflow_positive_result(0xED, 4, mock_memory_controller)
assert mock_memory_controller.read.call_count == 3
def test_execute_sbc_absoluteX_borrow_in_borrow_out_no_overflow_positive_result():
mock_memory_controller = Mock()
# we're mocking 0xFD 0x00 0x20 and [0x2003] = 0xf0
mock_memory_controller.read.side_effect = [0x00, 0x20, 0xf0]
execute_sbc_borrow_in_borrow_out_no_overflow_positive_result(0xFD, 4, mock_memory_controller, x_index = 3)
assert mock_memory_controller.read.call_count == 3
def test_execute_sbc_absoluteX_borrow_in_borrow_out_no_overflow_positive_result_extra_cycle():
mock_memory_controller = Mock()
# we're mocking 0xFD 0xfe 0x20 and [0x2101] = 0xf0
mock_memory_controller.read.side_effect = [0xfe, 0x20, 0xf0]
execute_sbc_borrow_in_borrow_out_no_overflow_positive_result(0xFD, 5, mock_memory_controller, x_index = 3)
assert mock_memory_controller.read.call_count == 3
def test_execute_sbc_absoluteY_borrow_in_borrow_out_no_overflow_positive_result():
mock_memory_controller = Mock()
# we're mocking 0xF9 0x00 0x20 and [0x2003] = 0xf0
mock_memory_controller.read.side_effect = [0x00, 0x20, 0xf0]
execute_sbc_borrow_in_borrow_out_no_overflow_positive_result(0xF9, 4, mock_memory_controller, y_index = 3)
assert mock_memory_controller.read.call_count == 3
def test_execute_sbc_absoluteY_borrow_in_borrow_out_no_overflow_positive_result_extra_cycle():
mock_memory_controller = Mock()
# we're mocking 0xF9 0xfe 0x20 and [0x2101] = 0xf0
mock_memory_controller.read.side_effect = [0xfe, 0x20, 0xf0]
execute_sbc_borrow_in_borrow_out_no_overflow_positive_result(0xF9, 5, mock_memory_controller, y_index = 3)
assert mock_memory_controller.read.call_count == 3
def test_execute_sbc_indirectX_borrow_in_borrow_out_no_overflow_positive_result():
mock_memory_controller = Mock()
# we're mocking 0xE1 0x20 and [0x23] = 0x1234 [0x1234] = 0xf0
mock_memory_controller.read.side_effect = [0x20, 0x34, 0x12, 0xf0]
execute_sbc_borrow_in_borrow_out_no_overflow_positive_result(0xE1, 6, mock_memory_controller, x_index = 3)
assert mock_memory_controller.read.call_count == 4
def test_execute_sbc_indirectY_borrow_in_borrow_out_no_overflow_positive_result():
mock_memory_controller = Mock()
# we're mocking 0xF1 0x20 and [0x20] = 0x1234 [0x1237] = 0xf0
mock_memory_controller.read.side_effect = [0x44, 0x34, 0x12, 0xf0]
execute_sbc_borrow_in_borrow_out_no_overflow_positive_result(0xF1, 5, mock_memory_controller, y_index = 3)
assert mock_memory_controller.read.call_count == 4
def test_execute_sbc_indirectY_borrow_in_borrow_out_no_overflow_positive_result_extra_cycle():
mock_memory_controller = Mock()
# we're mocking 0xF1 0x20 and [0x20] = 0x1234 [0x1301] = 0xf0
mock_memory_controller.read.side_effect = [0x44, 0xfe, 0x12, 0xf0]
execute_sbc_borrow_in_borrow_out_no_overflow_positive_result(0xF1, 6, mock_memory_controller, y_index = 3)
assert mock_memory_controller.read.call_count == 4
def test_execute_sbc_immediate_no_borrow_in_borrow_out_overflow_negative_result():
opcode = OpCode()
registers = Registers()
registers.accumulator = 0x50
registers.carry_flag = True
with patch.object(MemoryController, 'read') as mock_memory_controller:
# Mocking 0xE9 0xb0 so subtracting 0x50 - 0xb0 (80 - -80)
mock_memory_controller.read.return_value = 0xb0
registers.pc += 1 #need to fake the cpu reading the opcode
count = opcode.execute(0xE9, registers, mock_memory_controller)
assert count == 2
assert registers.accumulator == 0xa0
assert registers.zero_flag == False
assert registers.negative_flag
assert registers.carry_flag == False
assert registers.overflow_flag
def test_execute_sbc_immediate_borrow_in_borrow_out_no_overflow_negative_result():
opcode = OpCode()
registers = Registers()
registers.accumulator = 0x50
with patch.object(MemoryController, 'read') as mock_memory_controller:
# Mocking 0xE9 0x70 so subtracting 0x150 - 0x70 (borrow 'in')
mock_memory_controller.read.return_value = 0x70
registers.pc += 1 #need to fake the cpu reading the opcode
count = opcode.execute(0xE9, registers, mock_memory_controller)
assert count == 2
assert registers.accumulator == 0xdf
assert registers.zero_flag == False
assert registers.negative_flag
assert registers.carry_flag == False
assert registers.overflow_flag == False
def test_execute_sbc_immediate_no_borrow_in_no_borrow_out_no_overflow_positive_result():
opcode = OpCode()
registers = Registers()
registers.accumulator = 0x50
registers.carry_flag = True
with patch.object(MemoryController, 'read') as mock_memory_controller:
# Mocking 0xE9 0x70 so subtracting 0x50 - 0x30 (80-48 = 32)
mock_memory_controller.read.return_value = 0x30
registers.pc += 1 #need to fake the cpu reading the opcode
count = opcode.execute(0xE9, registers, mock_memory_controller)
assert count == 2
assert registers.accumulator == 0x20
assert registers.zero_flag == False
assert registers.negative_flag == False
assert registers.carry_flag
assert registers.overflow_flag == False
def test_execute_sbc_immediate_borrow_in_borrow_out_no_overflow_negative_result_2():
opcode = OpCode()
registers = Registers()
registers.accumulator = 0xd0 #start with negative acc this time
with patch.object(MemoryController, 'read') as mock_memory_controller:
# Mocking 0xE9 0x70 so subtracting 0xd0 - 0xf0 (-48 - -16 = -32)
mock_memory_controller.read.return_value = 0xf0
registers.pc += 1 #need to fake the cpu reading the opcode
count = opcode.execute(0xE9, registers, mock_memory_controller)
assert count == 2
assert registers.accumulator == 0xdf
assert registers.zero_flag == False
assert registers.negative_flag
assert registers.carry_flag == False
assert registers.overflow_flag == False
def test_execute_sbc_immediate_no_borrow_in_no_borrow_out_no_overflow_positive_result_2():
opcode = OpCode()
registers = Registers()
registers.accumulator = 0xd0 #start with negative acc this time
registers.carry_flag = True
with patch.object(MemoryController, 'read') as mock_memory_controller:
# Mocking 0xE9 0x70 so subtracting 0xd0 - 0xb0 (-48 - -80 = 32)
mock_memory_controller.read.return_value = 0xb0
registers.pc += 1 #need to fake the cpu reading the opcode
count = opcode.execute(0xE9, registers, mock_memory_controller)
assert count == 2
assert registers.accumulator == 0x20
assert registers.zero_flag == False
assert registers.negative_flag == False
assert registers.carry_flag
assert registers.overflow_flag == False
def test_execute_sbc_immediate_borrow_in_no_borrow_out_overflow_positive_result():
opcode = OpCode()
registers = Registers()
registers.accumulator = 0xd0 #start with negative acc this time
with patch.object(MemoryController, 'read') as mock_memory_controller:
# Mocking 0xE9 0x70 so subtracting 0xd0 - 0x70 (-48 - 112 = 96 (overflow))
mock_memory_controller.read.return_value = 0x70
registers.pc += 1 #need to fake the cpu reading the opcode
count = opcode.execute(0xE9, registers, mock_memory_controller)
assert count == 2
assert registers.accumulator == 0x5f
assert registers.zero_flag == False
assert registers.negative_flag == False
assert registers.carry_flag
assert registers.overflow_flag
def test_execute_sbc_immediate_no_borrow_in_no_borrow_out_no_overflow_negative_result():
opcode = OpCode()
registers = Registers()
registers.accumulator = 0xd0 #start with negative acc this time
registers.carry_flag = True
with patch.object(MemoryController, 'read') as mock_memory_controller:
# Mocking 0xE9 0x30 so subtracting 0xd0 - 0x30 (-48 - 48 = -96)
mock_memory_controller.read.return_value = 0x30
registers.pc += 1 #need to fake the cpu reading the opcode
count = opcode.execute(0xE9, registers, mock_memory_controller)
assert count == 2
assert registers.accumulator == 0xa0
assert registers.zero_flag == False
assert registers.negative_flag
assert registers.carry_flag
assert registers.overflow_flag == False | 38.275547 | 132 | 0.725006 | 2,705 | 20,975 | 5.321257 | 0.062847 | 0.118938 | 0.147284 | 0.073364 | 0.938586 | 0.935668 | 0.923996 | 0.904821 | 0.877727 | 0.870502 | 0 | 0.046733 | 0.203242 | 20,975 | 548 | 133 | 38.275547 | 0.814564 | 0.123719 | 0 | 0.782152 | 0 | 0 | 0.004747 | 0 | 0 | 0 | 0.023791 | 0 | 0.32021 | 1 | 0.097113 | false | 0 | 0.013123 | 0 | 0.110236 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
96570c2230051acc3914d5fb156b3f9d4947db3a | 11,349 | py | Python | kickstarter_django/kickstarter/migrations/0001_initial.py | pratyaymodi/kickstarter | a9bacdcdc0b44482bce57bf1d69a05d8c3c926c8 | [
"MIT"
] | null | null | null | kickstarter_django/kickstarter/migrations/0001_initial.py | pratyaymodi/kickstarter | a9bacdcdc0b44482bce57bf1d69a05d8c3c926c8 | [
"MIT"
] | null | null | null | kickstarter_django/kickstarter/migrations/0001_initial.py | pratyaymodi/kickstarter | a9bacdcdc0b44482bce57bf1d69a05d8c3c926c8 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-05-05 20:31
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='CategoryStatusCount',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('category', models.TextField(blank=True, null=True)),
('canceled', models.IntegerField(blank=True, null=True)),
('failed', models.IntegerField(blank=True, null=True)),
('live', models.IntegerField(blank=True, null=True)),
('successful', models.IntegerField(blank=True, null=True)),
('suspended', models.IntegerField(blank=True, null=True)),
('total', models.IntegerField(blank=True, null=True)),
],
options={
'db_table': 'category_status_count',
'managed': False,
},
),
migrations.CreateModel(
name='CategoryStatusPercent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('category', models.TextField(blank=True, null=True)),
('canceled', models.IntegerField(blank=True, null=True)),
('failed', models.IntegerField(blank=True, null=True)),
('live', models.IntegerField(blank=True, null=True)),
('successful', models.IntegerField(blank=True, null=True)),
('suspended', models.IntegerField(blank=True, null=True)),
],
options={
'db_table': 'category_status_percent',
'managed': False,
},
),
migrations.CreateModel(
name='CountryStatusCount',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('country', models.TextField(blank=True, null=True)),
('canceled', models.IntegerField(blank=True, null=True)),
('failed', models.IntegerField(blank=True, null=True)),
('live', models.IntegerField(blank=True, null=True)),
('successful', models.IntegerField(blank=True, null=True)),
('suspended', models.IntegerField(blank=True, null=True)),
('total', models.IntegerField(blank=True, null=True)),
],
options={
'db_table': 'country_status_count',
'managed': False,
},
),
migrations.CreateModel(
name='CountryStatusPercent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('country', models.TextField(blank=True, null=True)),
('canceled', models.IntegerField(blank=True, null=True)),
('failed', models.IntegerField(blank=True, null=True)),
('live', models.IntegerField(blank=True, null=True)),
('successful', models.IntegerField(blank=True, null=True)),
('suspended', models.IntegerField(blank=True, null=True)),
],
options={
'db_table': 'country_status_percent',
'managed': False,
},
),
migrations.CreateModel(
name='Kickstarter',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status', models.TextField(blank=True, null=True)),
('disable_communication', models.TextField(blank=True, null=True)),
('location_type', models.TextField(blank=True, null=True)),
('category_parent_id', models.IntegerField(blank=True, null=True)),
('sub_category', models.TextField(blank=True, null=True)),
('usd_pledged', models.TextField(blank=True, null=True)),
('launched_at', models.TextField(blank=True, null=True)),
('category_slug', models.TextField(blank=True, null=True)),
('currency', models.TextField(blank=True, null=True)),
('deadline', models.TextField(blank=True, null=True)),
('spotlight', models.TextField(blank=True, null=True)),
('currency_trailing_code', models.TextField(blank=True, null=True)),
('displayable_name', models.TextField(blank=True, null=True)),
('state_changed_at', models.TextField(blank=True, null=True)),
('goal', models.TextField(blank=True, null=True)),
('category', models.TextField(blank=True, null=True)),
('city', models.TextField(blank=True, null=True)),
('name', models.TextField(blank=True, null=True)),
('creator_name', models.TextField(blank=True, null=True)),
('staff_pick', models.TextField(blank=True, null=True)),
('country', models.TextField(blank=True, null=True)),
('pledged', models.TextField(blank=True, null=True)),
('creator', models.TextField(blank=True, null=True)),
('location_code', models.TextField(blank=True, null=True)),
('slug', models.TextField(blank=True, null=True)),
('state', models.TextField(blank=True, null=True)),
('static_usd_rate', models.TextField(blank=True, null=True)),
('location', models.TextField(blank=True, null=True)),
('backers_count', models.TextField(blank=True, null=True)),
('currency_symbol', models.TextField(blank=True, null=True)),
('category_id', models.IntegerField(blank=True, null=True)),
('created_at', models.TextField(blank=True, null=True)),
('blurb', models.TextField(blank=True, null=True)),
('category_position', models.IntegerField(blank=True, null=True)),
],
options={
'db_table': 'ks_project',
'managed': False,
},
),
migrations.CreateModel(
name='MonthStatusCount',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('month', models.TextField(blank=True, null=True)),
('canceled', models.IntegerField(blank=True, null=True)),
('failed', models.IntegerField(blank=True, null=True)),
('live', models.IntegerField(blank=True, null=True)),
('successful', models.IntegerField(blank=True, null=True)),
('suspended', models.IntegerField(blank=True, null=True)),
('total', models.IntegerField(blank=True, null=True)),
],
options={
'db_table': 'monthly_status_count',
'managed': False,
},
),
migrations.CreateModel(
name='MonthStatusPercent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('month', models.TextField(blank=True, null=True)),
('canceled', models.IntegerField(blank=True, null=True)),
('failed', models.IntegerField(blank=True, null=True)),
('live', models.IntegerField(blank=True, null=True)),
('successful', models.IntegerField(blank=True, null=True)),
('suspended', models.IntegerField(blank=True, null=True)),
],
options={
'db_table': 'monthly_status_percent',
'managed': False,
},
),
migrations.CreateModel(
name='Projects',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.TextField(blank=True, null=True)),
('creator_name', models.TextField(blank=True, null=True)),
('blurb', models.TextField(blank=True, null=True)),
('backers_count', models.TextField(blank=True, null=True)),
('goal', models.TextField(blank=True, null=True)),
('pledged', models.TextField(blank=True, null=True)),
('percent_of_goal', models.TextField(blank=True, null=True)),
('status', models.TextField(blank=True, null=True)),
('category', models.TextField(blank=True, null=True)),
('sub_category', models.TextField(blank=True, null=True)),
('launched_at', models.DateTimeField(blank=True, null=True)),
('deadline', models.DateTimeField(blank=True, null=True)),
('created_at', models.DateTimeField(blank=True, null=True)),
('location', models.TextField(blank=True, null=True)),
('country', models.TextField(blank=True, null=True)),
('state', models.TextField(blank=True, null=True)),
('city', models.TextField(blank=True, null=True)),
],
options={
'db_table': 'projects',
'managed': False,
},
),
migrations.CreateModel(
name='SubCategoryStatusCount',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sub_category', models.TextField(blank=True, null=True)),
('canceled', models.IntegerField(blank=True, null=True)),
('failed', models.IntegerField(blank=True, null=True)),
('live', models.IntegerField(blank=True, null=True)),
('successful', models.IntegerField(blank=True, null=True)),
('suspended', models.IntegerField(blank=True, null=True)),
('total', models.IntegerField(blank=True, null=True)),
],
options={
'db_table': 'sub_category_status_count',
'managed': False,
},
),
migrations.CreateModel(
name='SubCategoryStatusPercent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sub_category', models.TextField(blank=True, null=True)),
('canceled', models.IntegerField(blank=True, null=True)),
('failed', models.IntegerField(blank=True, null=True)),
('live', models.IntegerField(blank=True, null=True)),
('successful', models.IntegerField(blank=True, null=True)),
('suspended', models.IntegerField(blank=True, null=True)),
],
options={
'db_table': 'sub_category_status_percent',
'managed': False,
},
),
]
| 51.586364 | 114 | 0.548683 | 1,056 | 11,349 | 5.805871 | 0.098485 | 0.151199 | 0.218398 | 0.285598 | 0.917142 | 0.901974 | 0.885826 | 0.740173 | 0.726472 | 0.708041 | 0 | 0.002143 | 0.301172 | 11,349 | 219 | 115 | 51.821918 | 0.770899 | 0.005992 | 0 | 0.701422 | 1 | 0 | 0.128303 | 0.022167 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.009479 | 0 | 0.028436 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
9697087ca6fbad0d6844ce4758b1c45fbc63a670 | 197 | py | Python | hiyobipy/fk.py | SaidBySolo/HiyobiPy | e38f72c79b6766ebb52b5f95c76f658f5e0c8409 | [
"MIT"
] | 4 | 2020-10-11T15:19:51.000Z | 2022-01-07T08:13:36.000Z | hiyobipy/fk.py | Saebasol/HiyobiPy | e38f72c79b6766ebb52b5f95c76f658f5e0c8409 | [
"MIT"
] | null | null | null | hiyobipy/fk.py | Saebasol/HiyobiPy | e38f72c79b6766ebb52b5f95c76f658f5e0c8409 | [
"MIT"
] | null | null | null | class Response:
def __init__(self, response):
self.data = response
def __getattr__(self, attr):
return self.data.get(attr)
def __dict__(self):
return self.data | 21.888889 | 34 | 0.629442 | 24 | 197 | 4.666667 | 0.458333 | 0.214286 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.274112 | 197 | 9 | 35 | 21.888889 | 0.783217 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.428571 | false | 0 | 0 | 0.285714 | 0.857143 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
96b2ea410424ed410149a1b5253621c817538c25 | 16,480 | py | Python | tyrell/venv/lib/python3.8/site-packages/_rinterface_cffi_abi.py | YuehanLee/CS190I | c5e3dca9f3b936a15b254abfd0c245c470e8c27e | [
"Apache-2.0"
] | null | null | null | tyrell/venv/lib/python3.8/site-packages/_rinterface_cffi_abi.py | YuehanLee/CS190I | c5e3dca9f3b936a15b254abfd0c245c470e8c27e | [
"Apache-2.0"
] | null | null | null | tyrell/venv/lib/python3.8/site-packages/_rinterface_cffi_abi.py | YuehanLee/CS190I | c5e3dca9f3b936a15b254abfd0c245c470e8c27e | [
"Apache-2.0"
] | null | null | null | # auto-generated file
import _cffi_backend
ffi = _cffi_backend.FFI('_rinterface_cffi_abi',
_version = 0x2601,
_types = b'\x00\x00\x21\x0D\x00\x00\x00\x0F\x00\x00\x44\x0D\x00\x01\x23\x03\x00\x00\x00\x0F\x00\x00\x44\x0D\x00\x01\x17\x03\x00\x01\x43\x03\x00\x00\x00\x0F\x00\x01\x22\x0D\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x5D\x0D\x00\x00\x03\x11\x00\x00\x1B\x01\x00\x00\x00\x0F\x00\x00\x64\x0D\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x28\x0D\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x01\x28\x0D\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x6C\x0D\x00\x00\x03\x11\x00\x00\x1B\x01\x00\x00\x00\x0F\x00\x00\x9D\x0D\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x2D\x0D\x00\x01\x1C\x03\x00\x01\x20\x03\x00\x01\x21\x03\x00\x00\x22\x11\x00\x00\x23\x11\x00\x00\x00\x0F\x00\x00\x2D\x0D\x00\x01\x27\x03\x00\x00\x00\x0F\x00\x00\x2D\x0D\x00\x00\x28\x11\x00\x00\xBE\x03\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x2D\x0D\x00\x00\x07\x01\x00\x00\x36\x03\x00\x00\x00\x0F\x00\x00\x2D\x0D\x00\x00\x07\x01\x00\x01\x27\x03\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x2D\x0D\x00\x00\x07\x01\x00\x00\x28\x03\x00\x00\x3B\x11\x00\x00\x28\x11\x00\x00\x00\x0F\x00\x00\x2D\x0D\x00\x00\x07\x01\x00\x00\x3B\x11\x00\x00\x3B\x11\x00\x00\x28\x11\x00\x00\x01\x0B\x00\x00\x28\x11\x00\x00\x00\x0F\x00\x00\x2D\x0D\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x2D\x0D\x00\x00\x03\x11\x00\x00\x04\x0B\x00\x00\x44\x11\x00\x00\x44\x11\x00\x00\x28\x11\x00\x00\x00\x0F\x00\x00\x2D\x0D\x00\x00\x03\x11\x00\x00\x1B\x01\x00\x00\x00\x0F\x00\x00\x2D\x0D\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x0E\x0D\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x09\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x28\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x28\x11\x00\x00\x03\x0B\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x28\x11\x00\x00\x07\x01\x00\x00\x64\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\xC9\x03\x00\x00\x07\x11\x00\x00\xB9\x03\x00\x00\x07\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x44\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x07\x01\x00\x01\x1F\x03\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x1B\x01\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x1B\x01\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x44\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x2D\x03\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x03\x11\x00\x00\x07\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x04\x01\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x08\x01\x00\x00\x1B\x01\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\xD6\x03\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x03\x0D\x00\x00\x07\x11\x00\x00\x00\x0F\x00\x00\x2C\x0D\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\xBE\x0D\x00\x00\x03\x11\x00\x00\x1B\x01\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x00\x07\x0D\x00\x00\x00\x0F\x00\x01\x43\x0D\x00\x00\x02\x0B\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x01\x43\x0D\x00\x00\x28\x11\x00\x00\x00\x0F\x00\x01\x43\x0D\x00\x00\x28\x11\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x01\x43\x0D\x00\x00\x28\x11\x00\x00\x07\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x01\x43\x0D\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x01\x43\x0D\x00\x00\x07\x01\x00\x00\x32\x11\x00\x00\x00\x0F\x00\x01\x43\x0D\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x01\x43\x0D\x00\x00\x03\x11\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x01\x43\x0D\x00\x00\x03\x11\x00\x00\x1B\x01\x00\x00\x0E\x01\x00\x00\x00\x0F\x00\x01\x43\x0D\x00\x00\x03\x11\x00\x00\x1B\x01\x00\x00\x07\x01\x00\x00\x00\x0F\x00\x01\x43\x0D\x00\x00\x03\x11\x00\x00\x1B\x01\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x01\x43\x0D\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x01\x43\x0D\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x03\x11\x00\x00\x00\x0F\x00\x01\x43\x0D\x00\x00\x03\x11\x00\x00\xF0\x03\x00\x00\x00\x0F\x00\x01\x43\x0D\x00\x01\x38\x03\x00\x00\x00\x0F\x00\x01\x43\x0D\x00\x00\x07\x11\x00\x00\x00\x0F\x00\x01\x43\x0D\x00\x00\x00\x0F\x00\x00\x08\x09\x00\x01\x1E\x03\x00\x00\x09\x09\x00\x00\x00\x0B\x00\x00\x01\x09\x00\x00\x02\x09\x00\x00\x5D\x03\x00\x00\x07\x09\x00\x00\x04\x09\x00\x01\x26\x03\x00\x00\x05\x09\x00\x00\x02\x01\x00\x00\x6C\x03\x00\x00\x27\x03\x00\x00\x2A\x03\x00\x00\x34\x03\x00\x00\x39\x03\x00\x00\x3F\x03\x00\x00\x1C\x01\x00\x00\xA4\x03\x00\x00\x0A\x09\x00\x00\x0B\x09\x00\x00\x0C\x09\x00\x00\x0D\x09\x00\x00\x0E\x09\x00\x00\x0F\x09\x00\x00\x10\x09\x00\x00\x11\x09\x00\x00\x06\x09\x00\x00\x1A\x01\x00\x00\x00\x09\x00\x00\xC1\x03\x00\x00\xD8\x03\x00\x00\xDD\x03\x00\x00\xE0\x03\x00\x00\xE4\x03\x00\x00\xE9\x03\x00\x01\x0A\x03\x00\x01\x1A\x03\x00\x00\x00\x01',
_globals = (b'\xFF\xFF\xFF\x1FANYSXP',18,b'\x00\x00\x77\x23ATTRIB',0,b'\xFF\xFF\xFF\x1FBCODESXP',21,b'\xFF\xFF\xFF\x1FBUILTINSXP',8,b'\xFF\xFF\xFF\x0BBytes',0,b'\x00\x00\x77\x23CAR',0,b'\x00\x00\x77\x23CDR',0,b'\xFF\xFF\xFF\x0BCE_ANY',99,b'\xFF\xFF\xFF\x0BCE_BYTES',3,b'\xFF\xFF\xFF\x0BCE_LATIN1',2,b'\xFF\xFF\xFF\x0BCE_NATIVE',0,b'\xFF\xFF\xFF\x0BCE_SYMBOL',5,b'\xFF\xFF\xFF\x0BCE_UTF8',1,b'\xFF\xFF\xFF\x1FCHARSXP',9,b'\x00\x00\x77\x23CLOENV',0,b'\xFF\xFF\xFF\x1FCLOSXP',3,b'\x00\x00\x09\x23COMPLEX',0,b'\x00\x00\x0C\x23COMPLEX_ELT',0,b'\xFF\xFF\xFF\x1FCPLXSXP',15,b'\xFF\xFF\xFF\x0BChars',1,b'\x00\x00\xD3\x23DATAPTR',0,b'\xFF\xFF\xFF\x1FDOTSXP',17,b'\x00\x00\x77\x23ENCLOS',0,b'\x00\x00\x47\x23ENVFLAGS',0,b'\xFF\xFF\xFF\x1FENVSXP',4,b'\xFF\xFF\xFF\x1FEXPRSXP',20,b'\xFF\xFF\xFF\x1FEXTPTRSXP',22,b'\xFF\xFF\xFF\x0BFALSE',0,b'\x00\x00\x77\x23FRAME',0,b'\xFF\xFF\xFF\x1FFREESXP',31,b'\xFF\xFF\xFF\x1FFUNSXP',99,b'\x00\x00\x77\x23HASHTAB',0,b'\x00\x00\x1D\x23INTEGER',0,b'\x00\x00\x51\x23INTEGER_ELT',0,b'\xFF\xFF\xFF\x1FINTSXP',13,b'\xFF\xFF\xFF\x1FLANGSXP',6,b'\xFF\xFF\xFF\x1FLGLSXP',10,b'\xFF\xFF\xFF\x1FLISTSXP',2,b'\x00\x00\x1D\x23LOGICAL',0,b'\x00\x00\x51\x23LOGICAL_ELT',0,b'\xFF\xFF\xFF\x1FNEWSXP',30,b'\xFF\xFF\xFF\x1FNILSXP',0,b'\xFF\xFF\xFF\x0BPARSE_EOF',4,b'\xFF\xFF\xFF\x0BPARSE_ERROR',3,b'\xFF\xFF\xFF\x0BPARSE_INCOMPLETE',2,b'\xFF\xFF\xFF\x0BPARSE_NULL',0,b'\xFF\xFF\xFF\x0BPARSE_OK',1,b'\x00\x00\x77\x23PRINTNAME',0,b'\xFF\xFF\xFF\x1FPROMSXP',5,b'\x00\x00\xCC\x23RAW',0,b'\xFF\xFF\xFF\x1FRAWSXP',24,b'\x00\x00\xCF\x23RAW_ELT',0,b'\x00\x00\x16\x23REAL',0,b'\xFF\xFF\xFF\x1FREALSXP',14,b'\x00\x00\x19\x23REAL_ELT',0,b'\x00\x00\x03\x21R_BaseEnv',0,b'\x00\x00\x03\x21R_BaseNamespace',0,b'\x00\x00\x03\x21R_BlankScalarString',0,b'\x00\x00\x03\x21R_BlankString',0,b'\x00\x00\x13\x23R_CHAR',0,b'\x00\x01\x39\x21R_CStackLimit',0,b'\x00\x01\x39\x21R_CStackStart',0,b'\x00\x00\x03\x21R_ClassSymbol',0,b'\x00\x01\x1A\x23R_CleanTempDir',0,b'\x00\x00\xF0\x23R_ClearExternalPtr',0,b'\x00\x01\x1D\x21R_Consolefile',0,b'\x00\x01\x14\x23R_DefParams',0,b'\x00\x00\x03\x21R_DimSymbol',0,b'\x00\x00\x03\x21R_EmptyEnv',0,b'\x00\x00\x02\x23R_EnvironmentIsLocked',0,b'\x00\x00\xD3\x23R_ExternalPtrAddr',0,b'\x00\x00\x07\x21R_GlobalContext',0,b'\x00\x00\x03\x21R_GlobalEnv',0,b'\x00\x00\x44\x21R_Interactive',0,b'\x00\x00\xC4\x23R_MakeExternalPtr',0,b'\x00\x00\x03\x21R_MissingArg',0,b'\x00\x00\x2D\x21R_NaInt',0,b'\x00\x00\x6C\x21R_NaN',0,b'\x00\x00\x6C\x21R_NaReal',0,b'\x00\x00\x03\x21R_NaString',0,b'\x00\x00\x03\x21R_NameSymbol',0,b'\x00\x00\x6C\x21R_NegInf',0,b'\x00\x00\x03\x21R_NilValue',0,b'\x00\x01\x1D\x21R_Outputfile',0,b'\x00\x00\x82\x23R_ParseVector',0,b'\x00\x00\x6C\x21R_PosInf',0,b'\x00\x00\xF0\x23R_PreserveObject',0,b'\x00\x01\x10\x23R_RegisterCFinalizer',0,b'\x00\x00\xF0\x23R_ReleaseObject',0,b'\x00\x01\x1A\x23R_RunExitFinalizers',0,b'\x00\x01\x14\x23R_SetParams',0,b'\x00\x00\x2D\x21R_SignalHandlers',0,b'\x00\x00\x05\x23R_ToplevelExec',0,b'\x00\x00\x03\x21R_UnboundValue',0,b'\x00\x00\x91\x23R_do_slot',0,b'\x00\x00\x9F\x23R_do_slot_assign',0,b'\x00\x01\x1A\x23R_dot_Last',0,b'\x00\x01\x1A\x23R_gc',0,b'\x00\x00\x00\x23R_getEmbeddingDllInfo',0,b'\x00\x00\x55\x23R_has_slot',0,b'\x00\x00\x7A\x23R_lsInternal',0,b'\x00\x00\x4A\x23R_nchar',0,b'\x00\x00\x20\x23R_registerRoutines',0,b'\x00\x00\xEC\x23R_set_command_line_arguments',0,b'\x00\x00\x71\x23R_tryCatchError',0,b'\x00\x00\x9A\x23R_tryEval',0,b'\x00\x01\x1A\x23Rf_KillAllDevices',0,b'\x00\x00\x5C\x23Rf_ScalarComplex',0,b'\x00\x00\x6E\x23Rf_ScalarInteger',0,b'\x00\x00\x6E\x23Rf_ScalarLogical',0,b'\x00\x00\xBD\x23Rf_ScalarRaw',0,b'\x00\x00\x6B\x23Rf_ScalarReal',0,b'\x00\x00\x77\x23Rf_ScalarString',0,b'\x00\x00\x6E\x23Rf_allocList',0,b'\x00\x00\xC0\x23Rf_allocVector',0,b'\x00\x00\x77\x23Rf_asChar',0,b'\x00\x00\x9F\x23Rf_defineVar',0,b'\x00\x00\x77\x23Rf_duplicate',0,b'\x00\x00\x7E\x23Rf_elt',0,b'\x00\x00\xE9\x23Rf_endEmbeddedR',0,b'\x00\x00\x91\x23Rf_eval',0,b'\x00\x00\x91\x23Rf_findFun',0,b'\x00\x00\x91\x23Rf_findVar',0,b'\x00\x00\x91\x23Rf_findVarInFrame',0,b'\x00\x00\x95\x23Rf_findVarInFrame3',0,b'\x00\x00\x91\x23Rf_getAttrib',0,b'\x00\x00\x10\x23Rf_getCharCE',0,b'\x00\x00\x30\x23Rf_initEmbeddedR',0,b'\x00\x00\x30\x23Rf_initialize_R',0,b'\x00\x00\x5F\x23Rf_install',0,b'\x00\x00\x77\x23Rf_installChar',0,b'\x00\x00\x02\x23Rf_isList',0,b'\x00\x00\x02\x23Rf_isNull',0,b'\x00\x00\x02\x23Rf_isSymbol',0,b'\x00\x00\x77\x23Rf_lang1',0,b'\x00\x00\x91\x23Rf_lang2',0,b'\x00\x00\x9F\x23Rf_lang3',0,b'\x00\x00\xA4\x23Rf_lang4',0,b'\x00\x00\xAA\x23Rf_lang5',0,b'\x00\x00\xB1\x23Rf_lang6',0,b'\x00\x00\x47\x23Rf_length',0,b'\x00\x01\x1A\x23Rf_mainloop',0,b'\x00\x00\x5F\x23Rf_mkChar',0,b'\x00\x00\x62\x23Rf_mkCharCE',0,b'\x00\x00\x66\x23Rf_mkCharLenCE',0,b'\x00\x00\x5F\x23Rf_mkString',0,b'\x00\x00\x91\x23Rf_namesgets',0,b'\x00\x00\x7E\x23Rf_nthcdr',0,b'\x00\x00\x77\x23Rf_protect',0,b'\x00\x00\x9F\x23Rf_setAttrib',0,b'\x00\x00\xE9\x23Rf_unprotect',0,b'\x00\x00\x59\x23Rf_xlength',0,b'\xFF\xFF\xFF\x1FS4SXP',25,b'\xFF\xFF\xFF\x0BSA_DEFAULT',2,b'\xFF\xFF\xFF\x0BSA_NORESTORE',0,b'\xFF\xFF\xFF\x0BSA_NOSAVE',3,b'\xFF\xFF\xFF\x0BSA_RESTORE',1,b'\xFF\xFF\xFF\x0BSA_SAVE',4,b'\xFF\xFF\xFF\x0BSA_SAVEASK',5,b'\xFF\xFF\xFF\x0BSA_SUICIDE',6,b'\x00\x00\x91\x23SETCAR',0,b'\x00\x00\x91\x23SETCDR',0,b'\x00\x01\x06\x23SET_ENCLOS',0,b'\x00\x00\xF3\x23SET_ENVFLAGS',0,b'\x00\x01\x06\x23SET_FRAME',0,b'\x00\x01\x06\x23SET_HASHTAB',0,b'\x00\x00\xFC\x23SET_INTEGER_ELT',0,b'\x00\x00\xFC\x23SET_LOGICAL_ELT',0,b'\x00\x00\xF7\x23SET_REAL_ELT',0,b'\x00\x01\x01\x23SET_STRING_ELT',0,b'\x00\x00\x91\x23SET_TAG',0,b'\x00\x00\x8C\x23SET_VECTOR_ELT',0,b'\xFF\xFF\xFF\x1FSPECIALSXP',7,b'\x00\x00\xD3\x23STDVEC_DATAPTR',0,b'\x00\x00\x88\x23STRING_ELT',0,b'\xFF\xFF\xFF\x1FSTRSXP',16,b'\xFF\xFF\xFF\x1FSYMSXP',1,b'\x00\x00\x77\x23TAG',0,b'\xFF\xFF\xFF\x0BTRUE',1,b'\xFF\xFF\xFF\x1FVECSXP',19,b'\x00\x00\x88\x23VECTOR_ELT',0,b'\xFF\xFF\xFF\x1FWEAKREFSXP',23,b'\xFF\xFF\xFF\x0BWidth',2,b'\x00\x01\x40\x21ptr_R_Busy',0,b'\x00\x01\x2B\x21ptr_R_ChooseFile',0,b'\x00\x01\x3C\x21ptr_R_CleanUp',0,b'\x00\x01\x42\x21ptr_R_ClearerrConsole',0,b'\x00\x01\x29\x21ptr_R_EditFile',0,b'\x00\x01\x2C\x21ptr_R_EditFiles',0,b'\x00\x01\x42\x21ptr_R_FlushConsole',0,b'\x00\x01\x42\x21ptr_R_ProcessEvents',0,b'\x00\x01\x2A\x21ptr_R_ReadConsole',0,b'\x00\x01\x42\x21ptr_R_ResetConsole',0,b'\x00\x01\x2D\x21ptr_R_ShowFiles',0,b'\x00\x01\x3D\x21ptr_R_ShowMessage',0,b'\x00\x01\x3D\x21ptr_R_Suicide',0,b'\x00\x01\x3E\x21ptr_R_WriteConsole',0,b'\x00\x01\x3F\x21ptr_R_WriteConsoleEx',0,b'\x00\x01\x41\x21ptr_R_addhistory',0,b'\x00\x01\x41\x21ptr_R_loadhistory',0,b'\x00\x01\x41\x21ptr_R_savehistory',0,b'\x00\x01\x2F\x21ptr_do_dataentry',0,b'\x00\x01\x2F\x21ptr_do_dataviewer',0,b'\x00\x01\x2F\x21ptr_do_selectlist',0,b'\x00\x01\x1A\x23run_Rmainloop',0,b'\x00\x01\x1A\x23setup_Rmainloop',0),
_struct_unions = ((b'\x00\x00\x01\x3A\x00\x00\x00\x03$1',b'\x00\x01\x33\x11primsxp',b'\x00\x01\x36\x11symsxp',b'\x00\x01\x32\x11listsxp',b'\x00\x01\x31\x11envsxp',b'\x00\x01\x30\x11closxp',b'\x00\x01\x34\x11promsxp'),(b'\x00\x00\x01\x20\x00\x00\x00\x02$R_CMethodDef',b'\x00\x00\x28\x11name',b'\x00\x00\xC5\x11fun',b'\x00\x00\x2D\x11numArgs',b'\x00\x01\x3B\x11types'),(b'\x00\x00\x01\x21\x00\x00\x00\x02$R_CallMethodDef',b'\x00\x00\x28\x11name',b'\x00\x00\xC5\x11fun',b'\x00\x00\x2D\x11numArgs'),(b'\x00\x00\x00\x5D\x00\x00\x00\x02$Rcomplex',b'\x00\x00\x6C\x11r',b'\x00\x00\x6C\x11i'),(b'\x00\x00\x01\x24\x00\x00\x00\x03$SEXPREC_ALIGN',b'\x00\x01\x26\x11s',b'\x00\x00\x6C\x11align'),(b'\x00\x00\x01\x26\x00\x00\x00\x02$VECTOR_SEXPREC',b'\x00\x01\x35\x11sxpinfo',b'\x00\x00\x03\x11attrib',b'\x00\x00\x03\x11gengc_next_node',b'\x00\x00\x03\x11gengc_prev_node',b'\x00\x01\x37\x11vecsxp'),(b'\x00\x00\x01\x38\x00\x00\x00\x02$structRstart',b'\x00\x00\x44\x11R_Quiet',b'\x00\x00\x44\x11R_Slave',b'\x00\x00\x44\x11R_Interactive',b'\x00\x00\x44\x11R_Verbose',b'\x00\x00\x44\x11LoadSiteFile',b'\x00\x00\x44\x11LoadInitFile',b'\x00\x00\x44\x11DebugInitFile',b'\x00\x00\xD9\x11RestoreAction',b'\x00\x00\xD9\x11SaveAction',b'\x00\x01\x2E\x11vsize',b'\x00\x01\x2E\x11nsize',b'\x00\x01\x2E\x11max_vsize',b'\x00\x01\x2E\x11max_nsize',b'\x00\x01\x2E\x11ppsize',b'\x00\x00\x2D\x11NoRenviron'),(b'\x00\x00\x01\x23\x00\x00\x00\x02SEXPREC',b'\x00\x01\x35\x11sxpinfo',b'\x00\x00\x03\x11attrib',b'\x00\x00\x03\x11gengc_next_node',b'\x00\x00\x03\x11gengc_prev_node',b'\x00\x01\x3A\x11u'),(b'\x00\x00\x01\x1C\x00\x00\x00\x10_DllInfo',),(b'\x00\x00\x01\x1E\x00\x00\x00\x10_IO_FILE',),(b'\x00\x00\x01\x30\x00\x00\x00\x02closxp_struct',b'\x00\x00\x03\x11formals',b'\x00\x00\x03\x11body',b'\x00\x00\x03\x11env'),(b'\x00\x00\x01\x31\x00\x00\x00\x02envsxp_struct',b'\x00\x00\x03\x11frame',b'\x00\x00\x03\x11enclos',b'\x00\x00\x03\x11hashtab'),(b'\x00\x00\x01\x32\x00\x00\x00\x02listsxp_struct',b'\x00\x00\x03\x11carval',b'\x00\x00\x03\x11cdrval',b'\x00\x00\x03\x11tagval'),(b'\x00\x00\x01\x33\x00\x00\x00\x02primsxp_struct',b'\x00\x00\x2D\x11offset'),(b'\x00\x00\x01\x34\x00\x00\x00\x02promsxp_struct',b'\x00\x00\x03\x11value',b'\x00\x00\x03\x11expr',b'\x00\x00\x03\x11env'),(b'\x00\x00\x01\x35\x00\x00\x00\x02sxpinfo_struct',b'\x00\x00\xC1\x13\x00\x00\x00\x05type',b'\x00\x00\xC1\x13\x00\x00\x00\x01scalar',b'\x00\x00\xC1\x13\x00\x00\x00\x01alt',b'\x00\x00\xC1\x13\x00\x00\x00\x01obj',b'\x00\x00\xC1\x13\x00\x00\x00\x10gp',b'\x00\x00\xC1\x13\x00\x00\x00\x01mark',b'\x00\x00\xC1\x13\x00\x00\x00\x01debug',b'\x00\x00\xC1\x13\x00\x00\x00\x01trace',b'\x00\x00\xC1\x13\x00\x00\x00\x01spare',b'\x00\x00\xC1\x13\x00\x00\x00\x01gcgen',b'\x00\x00\xC1\x13\x00\x00\x00\x03gccls',b'\x00\x00\xC1\x13\x00\x00\x00\x10named',b'\x00\x00\xC1\x13\x00\x00\x00\x20extra'),(b'\x00\x00\x01\x36\x00\x00\x00\x02symsxp_struct',b'\x00\x00\x03\x11pname',b'\x00\x00\x03\x11value',b'\x00\x00\x03\x11internal'),(b'\x00\x00\x01\x37\x00\x00\x00\x02vecsxp_struct',b'\x00\x00\x0E\x11length',b'\x00\x00\x0E\x11truelength')),
_enums = (b'\x00\x00\x01\x1F\x00\x00\x00\x16$ParseStatus\x00PARSE_NULL,PARSE_OK,PARSE_INCOMPLETE,PARSE_ERROR,PARSE_EOF',b'\x00\x00\x00\x44\x00\x00\x00\x16$Rboolean\x00FALSE,TRUE',b'\x00\x00\x00\xD9\x00\x00\x00\x16$SA_TYPE\x00SA_NORESTORE,SA_RESTORE,SA_DEFAULT,SA_NOSAVE,SA_SAVE,SA_SAVEASK,SA_SUICIDE',b'\x00\x00\x00\x64\x00\x00\x00\x16$cetype_t\x00CE_NATIVE,CE_UTF8,CE_LATIN1,CE_BYTES,CE_SYMBOL,CE_ANY',b'\x00\x00\x00\x4C\x00\x00\x00\x16$nchar_type\x00Bytes,Chars,Width'),
_typenames = (b'\x00\x00\x00\xC5DL_FUNC',b'\x00\x00\x01\x1CDllInfo',b'\x00\x00\x01\x1EFILE',b'\x00\x00\x01\x1FParseStatus',b'\x00\x00\x01\x12R_CFinalizer_t',b'\x00\x00\x01\x20R_CMethodDef',b'\x00\x00\x01\x21R_CallMethodDef',b'\x00\x00\x01\x21R_ExternalMethodDef',b'\x00\x00\x01\x20R_FortranMethodDef',b'\x00\x00\x00\xC1R_NativePrimitiveArgType',b'\x00\x00\x00\x2DR_len_t',b'\x00\x00\x00\x0ER_xlen_t',b'\x00\x00\x00\x44Rboolean',b'\x00\x00\x00\xBERbyte',b'\x00\x00\x00\x5DRcomplex',b'\x00\x00\x01\x15Rstart',b'\x00\x00\x00\xD9SA_TYPE',b'\x00\x00\x00\x03SEXP',b'\x00\x00\x01\x23SEXPREC',b'\x00\x00\x01\x24SEXPREC_ALIGN',b'\x00\x00\x00\xC1SEXPTYPE',b'\x00\x00\x01\x25VECSEXP',b'\x00\x00\x01\x26VECTOR_SEXPREC',b'\x00\x00\x00\x64cetype_t',b'\x00\x00\x00\x4Cnchar_type',b'\x00\x00\x01\x38structRstart'),
)
| 1,373.333333 | 6,817 | 0.751578 | 3,601 | 16,480 | 3.364066 | 0.14135 | 0.333333 | 0.128281 | 0.0667 | 0.518161 | 0.446095 | 0.340928 | 0.31443 | 0.290821 | 0.28141 | 0 | 0.308619 | 0.003155 | 16,480 | 11 | 6,818 | 1,498.181818 | 0.42878 | 0.001153 | 0 | 0 | 1 | 0.666667 | 0.881402 | 0.855884 | 0 | 1 | 0.000365 | 0 | 0 | 1 | 0 | false | 0 | 0.111111 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
7389dd205faec3e7405f296c7a6d15eaee2aa0d5 | 353 | py | Python | tests/fixtures/defxmlschema/chapter16/__init__.py | nimish/xsdata | 7afe2781b66982428cc1731f53c065086acd35c1 | [
"MIT"
] | null | null | null | tests/fixtures/defxmlschema/chapter16/__init__.py | nimish/xsdata | 7afe2781b66982428cc1731f53c065086acd35c1 | [
"MIT"
] | null | null | null | tests/fixtures/defxmlschema/chapter16/__init__.py | nimish/xsdata | 7afe2781b66982428cc1731f53c065086acd35c1 | [
"MIT"
] | null | null | null | from tests.fixtures.defxmlschema.chapter16.example1607 import HatType
from tests.fixtures.defxmlschema.chapter16.example1607 import ShirtType
from tests.fixtures.defxmlschema.chapter16.example1607 import UmbrellaType
from tests.fixtures.defxmlschema.chapter16.example1610 import Hat
from tests.fixtures.defxmlschema.chapter16.example1610 import Product
| 58.833333 | 74 | 0.886686 | 40 | 353 | 7.825 | 0.325 | 0.14377 | 0.271566 | 0.463259 | 0.878594 | 0.878594 | 0.878594 | 0 | 0 | 0 | 0 | 0.09009 | 0.056657 | 353 | 5 | 75 | 70.6 | 0.84985 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 9 |
7398db4797e1b1a444a3beaf7b14ddd2e8446e68 | 18,259 | py | Python | core/globals.py | TheArchives/Nexus | 5482def8b50562fdbae980cda9b1708bfad8bffb | [
"BSD-2-Clause"
] | 1 | 2021-04-06T18:54:31.000Z | 2021-04-06T18:54:31.000Z | core/globals.py | TheArchives/Nexus | 5482def8b50562fdbae980cda9b1708bfad8bffb | [
"BSD-2-Clause"
] | null | null | null | core/globals.py | TheArchives/Nexus | 5482def8b50562fdbae980cda9b1708bfad8bffb | [
"BSD-2-Clause"
] | 1 | 2021-12-20T18:11:25.000Z | 2021-12-20T18:11:25.000Z | # The Nexus software is licensed under the BSD 2-Clause license.
#
# You should have recieved a copy of this license with the software.
# If you did not, you can find one at the following link.
#
# http://opensource.org/licenses/bsd-license.php
import time
def Rank(self, parts, fromloc, overriderank, server=None):
username = parts[2].lower()
year = time.strftime("%Y")
month = time.strftime("%m")
if username == "099":
if not (int(year) > 2012 and int(month) > 3):
return "099 may not be ranked until April 1st 2013."
if server:
factory = server
else:
factory = self.client.factory
if parts[1] == "builder":
if len(parts) > 3:
try:
world = factory.worlds[parts[3]]
except KeyError:
return ("Unknown world \"%s\"" %parts[3])
else:
if not server:
world = self.client.world
else:
return "You must provide a world"
if not server:
if not overriderank:
if not (world.isOp(self.client.username) or world.isOwner(self.client.username) or self.client.isModPlus()):
return ("You are not a high enough rank!")
else:
if fromloc != "console":
if not (world.isOp(parts[-1]) or world.isOwner(parts[-1]) or factory.isModPlus(parts[-1])):
return ("You are not a high enough rank!")
world.builders.add(username)
if username in factory.usernames:
user = factory.usernames[username]
if user.world == world:
user.sendBuilderUpdate()
return ("%s is now a Builder" % username)
elif parts[1] == "op":
if len(parts) > 3:
try:
world = factory.worlds[parts[3]]
except KeyError:
return ("Unknown world \"%s\"" %parts[3])
else:
if not server:
world = self.client.world
else:
return "You must provide a world"
if not server:
if not overriderank:
if not (world.isOwner(self.client.username) or self.client.isModPlus()):
return ("You are not a high enough rank!")
else:
if fromloc != "console":
if not (world.isOwner(parts[-1]) or factory.isModPlus(parts[-1])):
return ("You are not a high enough rank!")
world.ops.add(username)
return ("Opped %s" % username)
elif parts[1] == "worldowner":
if len(parts) > 3:
try:
world = factory.worlds[parts[3]]
except KeyError:
return ("Unknown world \"%s\"" %parts[3])
else:
if not server:
world = self.client.world
else:
return "You must provide a world"
if not server:
if not self.client.isWorldOwnerPlus() or overriderank:
return ("You are not a high enough rank!")
else:
if fromloc != "console":
if not (world.isOwner(parts[-1]) or factory.isModPlus(parts[-1])):
return ("You are not a high enough rank!")
self.client.world.owner = (username)
return ("%s is now a World Owner." % username)
elif parts[1] == "member":
if not server:
if not self.client.isModPlus():
return ("You are not a high enough rank!")
else:
if fromloc != "console":
if not factory.isModPlus(parts[-1]):
return ("You are not a high enough rank!")
factory.members.add(username)
if username in factory.usernames:
factory.usernames[username].sendMemberUpdate()
return ("%s is now a Member." % username)
elif parts[1] == "globalbuilder":
if not server:
if not self.client.isModPlus():
return ("You are not a high enough rank!")
else:
if fromloc != "console":
if not factory.isModPlus(parts[-1]):
return ("You are not a high enough rank!")
factory.globalbuilders.add(username)
if username in factory.usernames:
factory.usernames[username].sendGlobalBuilderUpdate()
return ("%s is now a Global Builder." % username)
elif parts[1] == "mod":
if not server:
if not self.client.isDirectorPlus():
return ("You are not a high enough rank!")
else:
if fromloc != "console":
if not factory.isDirectorPlus(parts[-1]):
return ("You are not a high enough rank!")
factory.mods.add(username)
if username in factory.usernames:
factory.usernames[username].sendModUpdate()
return ("%s is now a Mod." % username)
elif parts[1] == "admin":
if not server:
if not self.client.isDirectorPlus():
return ("You are not a high enough rank!")
else:
if fromloc != "console":
if not factory.isDirectorPlus(parts[-1]):
return ("You are not a high enough rank!")
factory.admins.add(username)
if username in factory.usernames:
factory.usernames[username].sendAdminUpdate()
return ("%s is now an admin." % username)
elif parts[1] == "coder":
if not server:
if not self.client.isDirectorPlus():
return ("You are not a high enough rank!")
else:
if fromloc != "console":
if not factory.isDirectorPlus(parts[-1]):
return ("You are not a high enough rank!")
factory.coders.add(username)
if username in factory.usernames:
factory.usernames[username].sendCoderUpdate()
return ("%s is now a coder." % username)
elif parts[1] == "director":
if not server:
if not self.client.isHiddenPlus():
return ("You are not a high enough rank!")
else:
if fromloc != "console":
if not factory.isHiddenPlus(parts[-1]):
return ("You are not a high enough rank!")
factory.directors.add(username)
if username in factory.usernames:
factory.usernames[username].sendDirectorUpdate()
return ("%s is now an director." % username)
elif parts[1] == "hidden":
if not server:
if not self.client.isServerOwner():
return ("You are not a high enough rank!")
else:
if fromloc != "console":
if not factory.isServerOwner(parts[-1]):
return ("You are not a high enough rank!")
factory.hidden.add(username)
if username in factory.usernames:
factory.usernames[username].sendHiddenUpdate()
return ("%s is now hidden." % username)
else:
return ("Unknown rank \"%s\""%parts[1])
def DeRank(self, parts, fromloc, overriderank, server=None):
username = parts[2].lower()
if server:
factory = server
else:
factory = self.client.factory
if parts[1] == "builder":
if len(parts) > 3:
try:
world = factory.worlds[parts[3]]
except KeyError:
return ("Unknown world \"%s\"" %parts[3])
else:
if not server:
world = self.client.world
else:
return "You must provide a world"
if not server:
if not overriderank:
if not (world.isOp(self.client.username) or world.isOwner(self.client.username) or self.client.isModPlus()):
return ("You are not a high enough rank!")
else:
if fromloc != "console":
if not (world.isOp(parts[-1]) or world.isOwner(parts[-1]) or factory.isModPlus(parts[-1])):
return ("You are not a high enough rank!")
try:
world.builders.remove(username)
except KeyError:
return ("%s is not a Builder." % username)
if username in factory.usernames:
user = factory.usernames[username]
if user.world == world:
user.sendBuilderUpdate()
return ("Removed %s as Builder" % username)
elif parts[1] == "op":
if len(parts) > 3:
try:
world = factory.worlds[parts[3]]
except KeyError:
return ("Unknown world \"%s\"" %parts[3])
else:
if not server:
world = self.client.world
else:
return "You must provide a world"
if not server:
if not overriderank:
if not (world.isOwner(self.client.username) or self.client.isModPlus()) and world != self.client.world:
return ("You are not a World Owner!")
else:
if fromloc != "console":
if not (world.isOwner(parts[-1]) or factory.isModPlus(parts[-1])):
return ("You are not a high enough rank!")
try:
world.ops.remove(username)
except KeyError:
return ("%s is not an op." % username)
if username in factory.usernames:
user = factory.usernames[username]
if user.world == world:
user.sendOpUpdate()
return ("Deopped %s" % username)
elif parts[1] == "worldowner":
if len(parts) > 3:
try:
world = factory.worlds[parts[3]]
except KeyError:
return ("Unknown world \"%s\"" %parts[3])
else:
if not server:
world = self.client.world
else:
return "You must provide a world"
if not server:
if not (world.isOwner(self.client.username) or self.client.isModPlus()) and world != self.client.world:
return ("You are not a World Owner!")
else:
if fromloc != "console":
if not (world.isOwner(parts[-1]) or factory.isModPlus(parts[-1])):
return ("You are not a high enough rank!")
try:
self.client.world.owner = ("")
except KeyError:
return ("%s is not a world owner." % username)
if username in factory.usernames:
user = factory.usernames[username]
if user.world == world:
user.sendOpUpdate()
return ("%s is no longer the World Owner." % username)
elif parts[1] == "member":
if not server:
if not self.client.isModPlus():
return ("You are not a high enough rank!")
else:
if fromloc != "console":
if not factory.isModPlus(parts[-1]):
return ("You are not a high enough rank!")
if username in factory.members:
factory.members.remove(username)
else:
return ("No such member \"%s\"" % username.lower())
if username in factory.usernames:
factory.usernames[username].sendMemberUpdate()
return ("%s is no longer a Member." % username.lower())
elif parts[1] == "globalbuilder":
if not server:
if not self.client.isModPlus():
return ("You are not a high enough rank!")
else:
if fromloc != "console":
if not factory.isModPlus(parts[-1]):
return ("You are not a high enough rank!")
if username in factory.globalbuilders:
factory.globalbuilders.remove(username)
else:
return ("No such global builder \"%s\"" % username.lower())
if username in factory.usernames:
factory.usernames[username].sendGlobalBuilderUpdate()
return ("%s is no longer a Member." % username.lower())
elif parts[1] == "mod":
if not server:
if not self.client.isDirectorPlus():
return ("You are not a high enough rank!")
else:
if fromloc != "console":
if not factory.isDirectorPlus(parts[-1]):
return ("You are not a high enough rank!")
if username in factory.mods:
factory.mods.remove(username)
else:
return ("No such mod \"%s\"" % username.lower())
if username in factory.usernames:
factory.usernames[username].sendModUpdate()
return ("%s is no longer a Mod." % username.lower())
elif parts[1] == "admin":
if not server:
if not self.client.isDirectorPlus():
return ("You are not a high enough rank!")
else:
if fromloc != "console":
if not factory.isDirectorPlus(parts[-1]):
return ("You are not a high enough rank!")
if username in factory.admins:
factory.admins.remove(username)
if username in factory.usernames:
factory.usernames[username].sendAdminUpdate()
return ("%s is no longer an admin." % username.lower())
else:
return ("No such admin \"%s\""% username.lower())
elif parts[1] == "coder":
if not server:
if not self.client.isDirectorPlus():
return ("You are not a high enough rank!")
else:
if fromloc != "console":
if not factory.isDirectorPlus(parts[-1]):
return ("You are not a high enough rank!")
if username in factory.coders:
factory.coders.remove(username)
if username in factory.usernames:
factory.usernames[username].sendCoderUpdate()
return ("%s is no longer a coder." % username.lower())
else:
return ("No such admin \"%s\""% username.lower())
elif parts[1] == "director":
if not server:
if not self.client.isHiddenPlus():
return ("You are not a high enough rank!")
else:
if fromloc != "console":
if not factory.isHiddenPlus(parts[-1]):
return ("You are not a high enough rank!")
if username in factory.directors:
factory.directors.remove(username)
if username in factory.usernames:
factory.usernames[username].sendDirectorUpdate()
return ("%s is no longer an director." % username.lower())
else:
return ("No such director \"%s\""% username.lower())
elif parts[1] == "hidden":
if not server:
if not self.client.isServerOwner():
return ("You are not a high enough rank!")
else:
if fromloc != "console":
if not factory.isServerOwner(parts[-1]):
return ("You are not a high enough rank!")
if username in factory.hidden:
factory.hidden.remove(username)
if username in factory.usernames:
factory.usernames[username].sendHiddenUpdate()
return ("%s is no longer hidden." % username.lower())
else:
return ("No such hidden \"%s\""% username.lower())
else:
return ("Unknown rank \"%s\""%parts[1])
def Spec(self, username, fromloc, overriderank, server=None):
if server:
factory = server
else:
factory = self.client.factory
if username in factory.directors:
return ("You cannot make staff a spec!")
if username in factory.coders:
return ("You cannot make staff a spec!")
if username in factory.admins:
return ("You cannot make staff a spec!")
if username in factory.mods:
return ("You cannot make staff a spec!")
factory.spectators.add(username)
if username in factory.usernames:
factory.usernames[username].sendSpectatorUpdate()
return ("%s is now a spec." % username)
def Staff(self, server=None):
Temp = []
if server:
factory = server
else:
factory = self.client.factory
if len(factory.directors):
Temp.append (["Directors:"] + list(factory.directors))
if len(factory.coders):
Temp.append (["Coders:"] + list(factory.coders))
if len(factory.admins):
Temp.append (["Admins:"] + list(factory.admins))
if len(factory.mods):
Temp.append (["Mods:"] + list(factory.mods))
return Temp
def Credits(self=None):
Temp = []
Temp.append ("Thanks to the following people for making Arc possible...")
Temp.append ("Mojang Specifications (Minecraft): Notch, dock, ez, ...")
Temp.append ("Creator: aera (Myne and The Archives)")
Temp.append ("Devs (Arc/The Archives): Adam01, gdude2002 (arbot), NotMeh, revenant,")
Temp.append ("Devs (iCraft): AndrewPH, destroyerx1, Dwarfy, erronjason, eugo (Knossus), goober, gothfox, ntfwc, Saanix, sk8rjwd, tehcid, Varriount, willempiee")
Temp.append ("Devs (blockBox): fizyplankton, tyteen4a03, UberFoX")
Temp.append ("Others: 099, 2k10, Akai, Antoligy, Aquaskys, aythrea, Bidoof_King, Bioniclegenius (Red_Link), blahblahbal, BlueProtoman, CDRom, fragmer, GLaDOS (Cortana), iMak, Kelraider, MAup, MystX, PyroPyro, Rils, Roadcrosser, Roujo, setveen, TheUndeadFish, TkTech, Uninspired")
return Temp
def makefile(filename):
import os
dir = os.path.dirname(filename)
try:
os.stat(dir)
except:
try:
os.mkdir(dir)
except OSError:
pass
if not os.path.exists(filename):
with open(filename, "w") as f:
f.write("")
del os
def makedatfile(filename):
import os
dir = os.path.dirname(filename)
try:
os.stat(dir)
except:
try:
os.mkdir(dir)
except OSError:
pass
if not os.path.exists(filename):
with open(filename, "w") as f:
f.write("(dp1\n.")
del os
def checkos(self):
try:
if (os.uname()[0] == "Darwin"):
os = "Mac"
else:
os = "Linux"
except:
os = "Windows"
return os
| 40.041667 | 283 | 0.546744 | 2,072 | 18,259 | 4.817085 | 0.11583 | 0.036569 | 0.048091 | 0.060114 | 0.809037 | 0.791704 | 0.771666 | 0.764653 | 0.750426 | 0.750426 | 0 | 0.008812 | 0.341202 | 18,259 | 455 | 284 | 40.12967 | 0.820933 | 0.012706 | 0 | 0.794989 | 0 | 0.004556 | 0.18202 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.018223 | false | 0.004556 | 0.006834 | 0 | 0.236902 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
73cfa1ad7e7de7fabaefeef795e605b81ae36a86 | 40,740 | py | Python | azure-iot-device/tests/provisioning/pipeline/test_pipeline_stages_provisioning.py | cartertinney/azure-iot-sdk-python | a5572b93047b4a54c5b990d9e25905398418c4fd | [
"MIT"
] | null | null | null | azure-iot-device/tests/provisioning/pipeline/test_pipeline_stages_provisioning.py | cartertinney/azure-iot-sdk-python | a5572b93047b4a54c5b990d9e25905398418c4fd | [
"MIT"
] | null | null | null | azure-iot-device/tests/provisioning/pipeline/test_pipeline_stages_provisioning.py | cartertinney/azure-iot-sdk-python | a5572b93047b4a54c5b990d9e25905398418c4fd | [
"MIT"
] | null | null | null | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import logging
import pytest
import sys
import json
import datetime
from azure.iot.device.provisioning.pipeline import (
pipeline_stages_provisioning,
pipeline_ops_provisioning,
)
from azure.iot.device.common.pipeline import pipeline_ops_base, pipeline_events_base
from tests.common.pipeline import pipeline_stage_test
from azure.iot.device.exceptions import ServiceError
from azure.iot.device.provisioning.models.registration_result import (
RegistrationResult,
RegistrationState,
)
from tests.common.pipeline.helpers import StageRunOpTestBase
from azure.iot.device import exceptions
from azure.iot.device.provisioning.pipeline import constant
import threading
logging.basicConfig(level=logging.DEBUG)
this_module = sys.modules[__name__]
pytestmark = pytest.mark.usefixtures("fake_pipeline_thread")
fake_device_id = "elder_wand"
fake_registration_id = "registered_remembrall"
fake_provisioning_host = "hogwarts.com"
fake_id_scope = "weasley_wizard_wheezes"
fake_ca_cert = "fake_ca_cert"
fake_sas_token = "horcrux_token"
fake_request_id = "Request1234"
fake_operation_id = "Operation4567"
fake_status = "Flying"
fake_assigned_hub = "Dumbledore'sArmy"
fake_sub_status = "FlyingOnHippogriff"
fake_created_dttm = datetime.datetime(2020, 5, 17)
fake_last_update_dttm = datetime.datetime(2020, 10, 17)
fake_etag = "HighQualityFlyingBroom"
fake_payload = "petrificus totalus"
fake_symmetric_key = "Zm9vYmFy"
fake_x509_cert_file = "fantastic_beasts"
fake_x509_cert_key_file = "where_to_find_them"
fake_pass_phrase = "alohomora"
class FakeRegistrationResult(object):
def __init__(self, operation_id, status, state):
self.operationId = operation_id
self.status = status
self.registrationState = state
def __str__(self):
return "\n".join([str(self.registrationState), self.status])
class FakeRegistrationState(object):
def __init__(self, payload):
self.deviceId = fake_device_id
self.assignedHub = fake_assigned_hub
self.payload = payload
self.substatus = fake_sub_status
def __str__(self):
return "\n".join(
[self.deviceId, self.assignedHub, self.substatus, self.get_payload_string()]
)
def get_payload_string(self):
return json.dumps(self.payload, default=lambda o: o.__dict__, sort_keys=True)
def create_registration_result(fake_payload, status):
state = FakeRegistrationState(payload=fake_payload)
return FakeRegistrationResult(fake_operation_id, status, state)
def get_registration_result_as_bytes(registration_result):
return json.dumps(registration_result, default=lambda o: o.__dict__).encode("utf-8")
###################
# COMMON FIXTURES #
###################
@pytest.fixture(params=[True, False], ids=["With error", "No error"])
def op_error(request, arbitrary_exception):
if request.param:
return arbitrary_exception
else:
return None
###############################
# REGISTRATION STAGE #
###############################
class RegistrationStageConfig(object):
@pytest.fixture
def cls_type(self):
return pipeline_stages_provisioning.RegistrationStage
@pytest.fixture
def init_kwargs(self):
return {}
@pytest.fixture
def stage(self, mocker, cls_type, init_kwargs):
stage = cls_type(**init_kwargs)
stage.send_op_down = mocker.MagicMock()
stage.send_event_up = mocker.MagicMock()
mocker.spy(stage, "report_background_exception")
return stage
pipeline_stage_test.add_base_pipeline_stage_tests(
test_module=this_module,
stage_class_under_test=pipeline_stages_provisioning.RegistrationStage,
stage_test_config_class=RegistrationStageConfig,
)
@pytest.mark.describe("RegistrationStage - .run_op() -- called with RegisterOperation")
class TestRegistrationStageWithRegisterOperation(StageRunOpTestBase, RegistrationStageConfig):
@pytest.fixture(params=[" ", fake_payload], ids=["empty payload", "some payload"])
def request_payload(self, request):
return request.param
@pytest.fixture
def op(self, stage, mocker, request_payload):
op = pipeline_ops_provisioning.RegisterOperation(
request_payload, fake_registration_id, callback=mocker.MagicMock()
)
yield op
# Clean up any timers set on it
if op.provisioning_timeout_timer:
op.provisioning_timeout_timer.cancel()
if op.retry_after_timer:
op.retry_after_timer.cancel()
if op.provisioning_timeout_timer:
op.provisioning_timeout_timer.cancel()
@pytest.fixture
def request_body(self, request_payload):
return '{{"payload": {json_payload}, "registrationId": "{reg_id}"}}'.format(
reg_id=fake_registration_id, json_payload=json.dumps(request_payload)
)
@pytest.mark.it(
"Sends a new RequestAndResponseOperation down the pipeline, configured to request a registration from provisioning service"
)
def test_request_and_response_op(self, stage, op, request_body):
stage.run_op(op)
assert stage.send_op_down.call_count == 1
new_op = stage.send_op_down.call_args[0][0]
assert isinstance(new_op, pipeline_ops_base.RequestAndResponseOperation)
assert new_op.request_type == "register"
assert new_op.method == "PUT"
assert new_op.resource_location == "/"
assert new_op.request_body == request_body
@pytest.mark.describe("RegistrationStage - .run_op() -- Called with other arbitrary operation")
class TestRegistrationStageWithArbitraryOperation(StageRunOpTestBase, RegistrationStageConfig):
@pytest.fixture
def op(self, arbitrary_op):
return arbitrary_op
@pytest.mark.it("Sends the operation down the pipeline")
def test_sends_op_down(self, mocker, stage, op):
stage.run_op(op)
assert stage.send_op_down.call_count == 1
assert stage.send_op_down.call_args == mocker.call(op)
@pytest.mark.describe(
"RegistrationStage - OCCURRENCE: RequestAndResponseOperation created from RegisterOperation is completed"
)
class TestRegistrationStageWithRegisterOperationCompleted(RegistrationStageConfig):
@pytest.fixture(params=[" ", fake_payload], ids=["empty payload", "some payload"])
def request_payload(self, request):
return request.param
@pytest.fixture
def send_registration_op(self, mocker, request_payload):
op = pipeline_ops_provisioning.RegisterOperation(
request_payload, fake_registration_id, callback=mocker.MagicMock()
)
yield op
# Clean up any timers set on it
if op.provisioning_timeout_timer:
op.provisioning_timeout_timer.cancel()
if op.retry_after_timer:
op.retry_after_timer.cancel()
if op.provisioning_timeout_timer:
op.provisioning_timeout_timer.cancel()
@pytest.fixture
def stage(self, mocker, cls_type, init_kwargs, send_registration_op):
stage = cls_type(**init_kwargs)
stage.send_op_down = mocker.MagicMock()
stage.send_event_up = mocker.MagicMock()
mocker.spy(stage, "report_background_exception")
# Run the registration operation
stage.run_op(send_registration_op)
return stage
@pytest.fixture
def request_and_response_op(self, stage):
assert stage.send_op_down.call_count == 1
op = stage.send_op_down.call_args[0][0]
assert isinstance(op, pipeline_ops_base.RequestAndResponseOperation)
# reset the stage mock for convenience
stage.send_op_down.reset_mock()
return op
@pytest.fixture
def request_body(self, request_payload):
return '{{"payload": {json_payload}, "registrationId": "{reg_id}"}}'.format(
reg_id=fake_registration_id, json_payload=json.dumps(request_payload)
)
@pytest.mark.it(
"Completes the RegisterOperation unsuccessfully, with the error from the RequestAndResponseOperation, if the RequestAndResponseOperation is completed unsuccessfully"
)
@pytest.mark.parametrize(
"status_code",
[
pytest.param(None, id="Status Code: None"),
pytest.param(200, id="Status Code: 200"),
pytest.param(300, id="Status Code: 300"),
pytest.param(400, id="Status Code: 400"),
pytest.param(500, id="Status Code: 500"),
],
)
@pytest.mark.parametrize(
"has_response_body", [True, False], ids=["With Response Body", "No Response Body"]
)
def test_request_and_response_op_completed_with_err(
self,
stage,
send_registration_op,
request_and_response_op,
status_code,
has_response_body,
arbitrary_exception,
):
assert not send_registration_op.completed
assert not request_and_response_op.completed
# NOTE: It shouldn't happen that an operation completed with error has a status code or a
# response body, but it IS possible.
request_and_response_op.status_code = status_code
if has_response_body:
request_and_response_op.response_body = b'{"key": "value"}'
request_and_response_op.complete(error=arbitrary_exception)
assert request_and_response_op.completed
assert request_and_response_op.error is arbitrary_exception
assert send_registration_op.completed
assert send_registration_op.error is arbitrary_exception
assert send_registration_op.registration_result is None
@pytest.mark.it(
"Completes the RegisterOperation unsuccessfully with a ServiceError if the RequestAndResponseOperation is completed with a status code >= 300 and less than 429"
)
@pytest.mark.parametrize(
"has_response_body", [True, False], ids=["With Response Body", "No Response Body"]
)
@pytest.mark.parametrize(
"status_code",
[
pytest.param(300, id="Status Code: 300"),
pytest.param(400, id="Status Code: 400"),
pytest.param(428, id="Status Code: 428"),
],
)
def test_request_and_response_op_completed_success_with_bad_code(
self, stage, send_registration_op, request_and_response_op, status_code, has_response_body
):
assert not send_registration_op.completed
assert not request_and_response_op.completed
request_and_response_op.status_code = status_code
if has_response_body:
request_and_response_op.response_body = b'{"key": "value"}'
request_and_response_op.complete()
assert request_and_response_op.completed
assert request_and_response_op.error is None
assert send_registration_op.completed
assert isinstance(send_registration_op.error, ServiceError)
# Twin is NOT returned
assert send_registration_op.registration_result is None
@pytest.mark.it(
"Decodes, deserializes, and returns registration_result on the RegisterOperation op when RequestAndResponseOperation completes with no error if the status code < 300 and if status is 'assigned'"
)
def test_request_and_response_op_completed_success_with_status_assigned(
self, stage, request_payload, send_registration_op, request_and_response_op
):
registration_result = create_registration_result(request_payload, "assigned")
assert not send_registration_op.completed
assert not request_and_response_op.completed
request_and_response_op.status_code = 200
request_and_response_op.retry_after = None
request_and_response_op.response_body = get_registration_result_as_bytes(
registration_result
)
request_and_response_op.complete()
assert request_and_response_op.completed
assert request_and_response_op.error is None
assert send_registration_op.completed
assert send_registration_op.error is None
# We need to assert string representations as these are inherently different objects
assert str(send_registration_op.registration_result) == str(registration_result)
@pytest.mark.it(
"Decodes, deserializes, and returns registration_result along with an error on the RegisterOperation op when RequestAndResponseOperation completes with status code < 300 and status 'failed'"
)
def test_request_and_response_op_completed_success_with_status_failed(
self, stage, request_payload, send_registration_op, request_and_response_op
):
registration_result = create_registration_result(request_payload, "failed")
assert not send_registration_op.completed
assert not request_and_response_op.completed
request_and_response_op.status_code = 200
request_and_response_op.retry_after = None
request_and_response_op.response_body = get_registration_result_as_bytes(
registration_result
)
request_and_response_op.complete()
assert request_and_response_op.completed
assert request_and_response_op.error is None
assert send_registration_op.completed
assert isinstance(send_registration_op.error, ServiceError)
# We need to assert string representations as these are inherently different objects
assert str(send_registration_op.registration_result) == str(registration_result)
assert "failed registration status" in str(send_registration_op.error)
@pytest.mark.it(
"Returns error on the RegisterOperation op when RequestAndResponseOperation completes with status code < 300 and some unknown status"
)
def test_request_and_response_op_completed_success_with_unknown_status(
self, stage, request_payload, send_registration_op, request_and_response_op
):
registration_result = create_registration_result(request_payload, "quidditching")
assert not send_registration_op.completed
assert not request_and_response_op.completed
request_and_response_op.status_code = 200
request_and_response_op.retry_after = None
request_and_response_op.response_body = get_registration_result_as_bytes(
registration_result
)
request_and_response_op.complete()
assert request_and_response_op.completed
assert request_and_response_op.error is None
assert send_registration_op.completed
assert isinstance(send_registration_op.error, ServiceError)
assert "invalid registration status" in str(send_registration_op.error)
@pytest.mark.it(
"Decodes, deserializes the response from RequestAndResponseOperation and creates another op if the status code < 300 and if status is 'assigning'"
)
def test_spawns_another_op_request_and_response_op_completed_success_with_status_assigning(
self, mocker, stage, request_payload, send_registration_op, request_and_response_op
):
mock_timer = mocker.patch(
"azure.iot.device.provisioning.pipeline.pipeline_stages_provisioning.Timer"
)
mocker.spy(send_registration_op, "spawn_worker_op")
registration_result = create_registration_result(request_payload, "assigning")
assert not send_registration_op.completed
assert not request_and_response_op.completed
request_and_response_op.status_code = 200
request_and_response_op.retry_after = None
request_and_response_op.response_body = get_registration_result_as_bytes(
registration_result
)
request_and_response_op.complete()
assert send_registration_op.retry_after_timer is None
assert send_registration_op.polling_timer is not None
timer_callback = mock_timer.call_args[0][1]
timer_callback()
assert request_and_response_op.completed
assert request_and_response_op.error is None
assert not send_registration_op.completed
assert send_registration_op.error is None
assert (
send_registration_op.spawn_worker_op.call_args[1]["operation_id"] == fake_operation_id
)
class RetryStageConfig(object):
@pytest.fixture
def init_kwargs(self):
return {}
@pytest.fixture
def stage(self, mocker, cls_type, init_kwargs):
stage = cls_type(**init_kwargs)
mocker.spy(stage, "run_op")
stage.send_op_down = mocker.MagicMock()
stage.send_event_up = mocker.MagicMock()
mocker.spy(stage, "report_background_exception")
return stage
@pytest.mark.describe("RegistrationStage - .run_op() -- retried again with RegisterOperation")
class TestRegistrationStageWithRetryOfRegisterOperation(RetryStageConfig):
@pytest.fixture(params=[" ", fake_payload], ids=["empty payload", "some payload"])
def request_payload(self, request):
return request.param
@pytest.fixture
def cls_type(self):
return pipeline_stages_provisioning.RegistrationStage
@pytest.fixture
def op(self, stage, mocker, request_payload):
op = pipeline_ops_provisioning.RegisterOperation(
request_payload, fake_registration_id, callback=mocker.MagicMock()
)
yield op
# Clean up any timers set on it
if op.provisioning_timeout_timer:
op.provisioning_timeout_timer.cancel()
if op.retry_after_timer:
op.retry_after_timer.cancel()
if op.provisioning_timeout_timer:
op.provisioning_timeout_timer.cancel()
@pytest.fixture
def request_body(self, request_payload):
return '{{"payload": {json_payload}, "registrationId": "{reg_id}"}}'.format(
reg_id=fake_registration_id, json_payload=json.dumps(request_payload)
)
@pytest.mark.it(
"Decodes, deserializes the response from RequestAndResponseOperation and retries the op if the status code > 429"
)
def test_stage_retries_op_if_next_stage_responds_with_status_code_greater_than_429(
self, mocker, stage, op, request_body, request_payload
):
mock_timer = mocker.patch(
"azure.iot.device.provisioning.pipeline.pipeline_stages_provisioning.Timer"
)
stage.run_op(op)
assert stage.send_op_down.call_count == 1
next_op = stage.send_op_down.call_args[0][0]
assert isinstance(next_op, pipeline_ops_base.RequestAndResponseOperation)
next_op.status_code = 430
next_op.retry_after = "1"
registration_result = create_registration_result(request_payload, "flying")
next_op.response_body = get_registration_result_as_bytes(registration_result)
next_op.complete()
assert op.retry_after_timer is not None
assert op.polling_timer is None
timer_callback = mock_timer.call_args[0][1]
timer_callback()
assert stage.run_op.call_count == 2
assert stage.send_op_down.call_count == 2
next_op_2 = stage.send_op_down.call_args[0][0]
assert isinstance(next_op_2, pipeline_ops_base.RequestAndResponseOperation)
assert next_op_2.request_type == "register"
assert next_op_2.method == "PUT"
assert next_op_2.resource_location == "/"
assert next_op_2.request_body == request_body
@pytest.mark.describe(
"RegistrationStage - .run_op() -- Called with register request operation eligible for timeout"
)
class TestRegistrationStageWithTimeoutOfRegisterOperation(
StageRunOpTestBase, RegistrationStageConfig
):
@pytest.fixture
def op(self, stage, mocker):
op = pipeline_ops_provisioning.RegisterOperation(
" ", fake_registration_id, callback=mocker.MagicMock()
)
yield op
# Clean up any timers set on it
if op.provisioning_timeout_timer:
op.provisioning_timeout_timer.cancel()
if op.retry_after_timer:
op.retry_after_timer.cancel()
if op.provisioning_timeout_timer:
op.provisioning_timeout_timer.cancel()
@pytest.fixture
def mock_timer(self, mocker):
return mocker.patch(
"azure.iot.device.provisioning.pipeline.pipeline_stages_provisioning.Timer"
)
@pytest.mark.it(
"Adds a provisioning timeout timer with the interval specified in the configuration to the operation, and starts it"
)
def test_adds_timer(self, mocker, stage, op, mock_timer):
stage.run_op(op)
assert mock_timer.call_count == 1
assert mock_timer.call_args == mocker.call(constant.DEFAULT_TIMEOUT_INTERVAL, mocker.ANY)
assert op.provisioning_timeout_timer is mock_timer.return_value
assert op.provisioning_timeout_timer.start.call_count == 1
assert op.provisioning_timeout_timer.start.call_args == mocker.call()
@pytest.mark.it(
"Sends converted RequestResponse Op down the pipeline after attaching timer to the original op"
)
def test_sends_down(self, mocker, stage, op, mock_timer):
stage.run_op(op)
assert stage.send_op_down.call_count == 1
new_op = stage.send_op_down.call_args[0][0]
assert isinstance(new_op, pipeline_ops_base.RequestAndResponseOperation)
assert op.provisioning_timeout_timer is mock_timer.return_value
@pytest.mark.it("Completes the operation unsuccessfully, with a ServiceError due to timeout")
def test_not_complete_timeout(self, mocker, stage, op, mock_timer):
# Apply the timer
stage.run_op(op)
assert not op.completed
assert mock_timer.call_count == 1
on_timer_complete = mock_timer.call_args[0][1]
# Call timer complete callback (indicating timer completion)
on_timer_complete()
# Op is now completed with error
assert op.completed
assert isinstance(op.error, exceptions.ServiceError)
assert "register" in op.error.args[0]
@pytest.mark.it(
"Completes the operation successfully, cancels and clears the operation's timeout timer"
)
def test_complete_before_timeout(self, mocker, stage, op, mock_timer):
# Apply the timer
stage.run_op(op)
assert not op.completed
assert mock_timer.call_count == 1
mock_timer_inst = op.provisioning_timeout_timer
assert mock_timer_inst is mock_timer.return_value
assert mock_timer_inst.cancel.call_count == 0
# Complete the next operation
new_op = stage.send_op_down.call_args[0][0]
new_op.status_code = 200
new_op.response_body = "{}".encode("utf-8")
new_op.complete()
# Timer is now cancelled and cleared
assert mock_timer_inst.cancel.call_count == 1
assert mock_timer_inst.cancel.call_args == mocker.call()
assert op.provisioning_timeout_timer is None
class PollingStageConfig(object):
@pytest.fixture
def cls_type(self):
return pipeline_stages_provisioning.PollingStatusStage
@pytest.fixture
def init_kwargs(self):
return {}
@pytest.fixture
def stage(self, mocker, cls_type, init_kwargs):
stage = cls_type(**init_kwargs)
stage.send_op_down = mocker.MagicMock()
stage.send_event_up = mocker.MagicMock()
mocker.spy(stage, "report_background_exception")
return stage
pipeline_stage_test.add_base_pipeline_stage_tests(
test_module=this_module,
stage_class_under_test=pipeline_stages_provisioning.PollingStatusStage,
stage_test_config_class=PollingStageConfig,
)
@pytest.mark.describe("PollingStatusStage - .run_op() -- called with PollStatusOperation")
class TestPollingStatusStageWithPollStatusOperation(StageRunOpTestBase, PollingStageConfig):
@pytest.fixture
def op(self, stage, mocker):
op = pipeline_ops_provisioning.PollStatusOperation(
fake_operation_id, " ", callback=mocker.MagicMock()
)
yield op
# Clean up any timers set on it
if op.polling_timer:
op.polling_timer.cancel()
if op.retry_after_timer:
op.retry_after_timer.cancel()
if op.provisioning_timeout_timer:
op.provisioning_timeout_timer.cancel()
@pytest.mark.it(
"Sends a new RequestAndResponseOperation down the pipeline, configured to request a registration from provisioning service"
)
def test_request_and_response_op(self, stage, op):
stage.run_op(op)
assert stage.send_op_down.call_count == 1
new_op = stage.send_op_down.call_args[0][0]
assert isinstance(new_op, pipeline_ops_base.RequestAndResponseOperation)
assert new_op.request_type == "query"
assert new_op.method == "GET"
assert new_op.resource_location == "/"
assert new_op.request_body == " "
@pytest.mark.describe("PollingStatusStage - .run_op() -- Called with other arbitrary operation")
class TestPollingStatusStageWithArbitraryOperation(StageRunOpTestBase, PollingStageConfig):
@pytest.fixture
def op(self, arbitrary_op):
return arbitrary_op
@pytest.mark.it("Sends the operation down the pipeline")
def test_sends_op_down(self, mocker, stage, op):
stage.run_op(op)
assert stage.send_op_down.call_count == 1
assert stage.send_op_down.call_args == mocker.call(op)
@pytest.mark.describe(
"PollingStatusStage - OCCURRENCE: RequestAndResponseOperation created from PollStatusOperation is completed"
)
class TestPollingStatusStageWithPollStatusOperationCompleted(PollingStageConfig):
@pytest.fixture
def send_query_op(self, mocker):
op = pipeline_ops_provisioning.PollStatusOperation(
fake_operation_id, " ", callback=mocker.MagicMock()
)
yield op
# Clean up any timers set on it
if op.polling_timer:
op.polling_timer.cancel()
if op.retry_after_timer:
op.retry_after_timer.cancel()
if op.provisioning_timeout_timer:
op.provisioning_timeout_timer.cancel()
@pytest.fixture
def stage(self, mocker, cls_type, init_kwargs, send_query_op):
stage = cls_type(**init_kwargs)
stage.send_op_down = mocker.MagicMock()
stage.send_event_up = mocker.MagicMock()
mocker.spy(stage, "report_background_exception")
# Run the registration operation
stage.run_op(send_query_op)
return stage
@pytest.fixture
def request_and_response_op(self, stage):
assert stage.send_op_down.call_count == 1
op = stage.send_op_down.call_args[0][0]
assert isinstance(op, pipeline_ops_base.RequestAndResponseOperation)
# reset the stage mock for convenience
stage.send_op_down.reset_mock()
return op
@pytest.mark.it(
"Completes the PollStatusOperation unsuccessfully, with the error from the RequestAndResponseOperation, if the RequestAndResponseOperation is completed unsuccessfully"
)
@pytest.mark.parametrize(
"status_code",
[
pytest.param(None, id="Status Code: None"),
pytest.param(200, id="Status Code: 200"),
pytest.param(300, id="Status Code: 300"),
pytest.param(400, id="Status Code: 400"),
pytest.param(500, id="Status Code: 500"),
],
)
@pytest.mark.parametrize(
"has_response_body", [True, False], ids=["With Response Body", "No Response Body"]
)
def test_request_and_response_op_completed_with_err(
self,
stage,
send_query_op,
request_and_response_op,
status_code,
has_response_body,
arbitrary_exception,
):
assert not send_query_op.completed
assert not request_and_response_op.completed
# NOTE: It shouldn't happen that an operation completed with error has a status code or a
# response body, but it IS possible.
request_and_response_op.status_code = status_code
if has_response_body:
request_and_response_op.response_body = b'{"key": "value"}'
request_and_response_op.complete(error=arbitrary_exception)
assert request_and_response_op.completed
assert request_and_response_op.error is arbitrary_exception
assert send_query_op.completed
assert send_query_op.error is arbitrary_exception
assert send_query_op.registration_result is None
@pytest.mark.it(
"Completes the PollStatusOperation unsuccessfully with a ServiceError if the RequestAndResponseOperation is completed with a status code >= 300 and less than 429"
)
@pytest.mark.parametrize(
"has_response_body", [True, False], ids=["With Response Body", "No Response Body"]
)
@pytest.mark.parametrize(
"status_code",
[
pytest.param(300, id="Status Code: 300"),
pytest.param(400, id="Status Code: 400"),
pytest.param(428, id="Status Code: 428"),
],
)
def test_request_and_response_op_completed_success_with_bad_code(
self, stage, send_query_op, request_and_response_op, status_code, has_response_body
):
assert not send_query_op.completed
assert not request_and_response_op.completed
request_and_response_op.status_code = status_code
if has_response_body:
request_and_response_op.response_body = b'{"key": "value"}'
request_and_response_op.complete()
assert request_and_response_op.completed
assert request_and_response_op.error is None
assert send_query_op.completed
assert isinstance(send_query_op.error, ServiceError)
# Twin is NOT returned
assert send_query_op.registration_result is None
@pytest.mark.it(
"Decodes, deserializes, and returns registration_result on the PollStatusOperation op when RequestAndResponseOperation completes with no error if the status code < 300 and if status is 'assigned'"
)
def test_request_and_response_op_completed_success_with_status_assigned(
self, stage, send_query_op, request_and_response_op
):
registration_result = create_registration_result(" ", "assigned")
assert not send_query_op.completed
assert not request_and_response_op.completed
request_and_response_op.status_code = 200
request_and_response_op.retry_after = None
request_and_response_op.response_body = get_registration_result_as_bytes(
registration_result
)
request_and_response_op.complete()
assert request_and_response_op.completed
assert request_and_response_op.error is None
assert send_query_op.completed
assert send_query_op.error is None
# We need to assert string representations as these are inherently different objects
assert str(send_query_op.registration_result) == str(registration_result)
@pytest.mark.it(
"Decodes, deserializes, and returns registration_result along with an error on the PollStatusOperation op when RequestAndResponseOperation completes with status code < 300 and status 'failed'"
)
def test_request_and_response_op_completed_success_with_status_failed(
self, stage, send_query_op, request_and_response_op
):
registration_result = create_registration_result(" ", "failed")
assert not send_query_op.completed
assert not request_and_response_op.completed
request_and_response_op.status_code = 200
request_and_response_op.retry_after = None
request_and_response_op.response_body = get_registration_result_as_bytes(
registration_result
)
request_and_response_op.complete()
assert request_and_response_op.completed
assert request_and_response_op.error is None
assert send_query_op.completed
assert isinstance(send_query_op.error, ServiceError)
# We need to assert string representations as these are inherently different objects
assert str(send_query_op.registration_result) == str(registration_result)
assert "failed registration status" in str(send_query_op.error)
@pytest.mark.it(
"Returns error on the PollStatusOperation op when RequestAndResponseOperation completes with status code < 300 and some unknown status"
)
def test_request_and_response_op_completed_success_with_unknown_status(
self, stage, send_query_op, request_and_response_op
):
registration_result = create_registration_result(" ", "quidditching")
assert not send_query_op.completed
assert not request_and_response_op.completed
request_and_response_op.status_code = 200
request_and_response_op.retry_after = None
request_and_response_op.response_body = get_registration_result_as_bytes(
registration_result
)
request_and_response_op.complete()
assert request_and_response_op.completed
assert request_and_response_op.error is None
assert send_query_op.completed
assert isinstance(send_query_op.error, ServiceError)
assert "invalid registration status" in str(send_query_op.error)
@pytest.mark.describe("PollingStatusStage - .run_op() -- retried again with PollStatusOperation")
class TestPollingStatusStageWithPollStatusRetryOperation(RetryStageConfig):
@pytest.fixture
def cls_type(self):
return pipeline_stages_provisioning.PollingStatusStage
@pytest.fixture
def op(self, stage, mocker):
op = pipeline_ops_provisioning.PollStatusOperation(
fake_operation_id, " ", callback=mocker.MagicMock()
)
yield op
# Clean up any timers set on it
if op.polling_timer:
op.polling_timer.cancel()
if op.retry_after_timer:
op.retry_after_timer.cancel()
if op.provisioning_timeout_timer:
op.provisioning_timeout_timer.cancel()
@pytest.mark.it(
"Decodes, deserializes the response from RequestAndResponseOperation and retries the op if the status code > 429"
)
def test_stage_retries_op_if_next_stage_responds_with_status_code_greater_than_429(
self, mocker, stage, op
):
mock_timer = mocker.patch(
"azure.iot.device.provisioning.pipeline.pipeline_stages_provisioning.Timer"
)
stage.run_op(op)
assert stage.send_op_down.call_count == 1
next_op = stage.send_op_down.call_args[0][0]
assert isinstance(next_op, pipeline_ops_base.RequestAndResponseOperation)
next_op.status_code = 430
next_op.retry_after = "1"
registration_result = create_registration_result(" ", "flying")
next_op.response_body = get_registration_result_as_bytes(registration_result)
next_op.complete()
assert op.retry_after_timer is not None
assert op.polling_timer is None
timer_callback = mock_timer.call_args[0][1]
timer_callback()
assert stage.run_op.call_count == 2
assert stage.send_op_down.call_count == 2
next_op_2 = stage.send_op_down.call_args[0][0]
assert isinstance(next_op_2, pipeline_ops_base.RequestAndResponseOperation)
assert next_op_2.request_type == "query"
assert next_op_2.method == "GET"
assert next_op_2.resource_location == "/"
assert next_op_2.request_body == " "
@pytest.mark.it(
"Decodes, deserializes the response from RequestAndResponseOperation and retries the op if the status code < 300 and if status is 'assigning'"
)
def test_stage_retries_op_if_next_stage_responds_with_status_assigning(self, mocker, stage, op):
mock_timer = mocker.patch(
"azure.iot.device.provisioning.pipeline.pipeline_stages_provisioning.Timer"
)
stage.run_op(op)
assert stage.send_op_down.call_count == 1
next_op = stage.send_op_down.call_args[0][0]
assert isinstance(next_op, pipeline_ops_base.RequestAndResponseOperation)
next_op.status_code = 228
next_op.retry_after = "1"
registration_result = create_registration_result(" ", "assigning")
next_op.response_body = get_registration_result_as_bytes(registration_result)
next_op.complete()
assert op.retry_after_timer is None
assert op.polling_timer is not None
timer_callback = mock_timer.call_args[0][1]
timer_callback()
assert stage.run_op.call_count == 2
assert stage.send_op_down.call_count == 2
next_op_2 = stage.send_op_down.call_args[0][0]
assert isinstance(next_op_2, pipeline_ops_base.RequestAndResponseOperation)
assert next_op_2.request_type == "query"
assert next_op_2.method == "GET"
assert next_op_2.resource_location == "/"
assert next_op_2.request_body == " "
@pytest.mark.describe(
"RegistrationStage - .run_op() -- Called with register request operation eligible for timeout"
)
class TestPollingStageWithTimeoutOfQueryOperation(StageRunOpTestBase, PollingStageConfig):
@pytest.fixture
def op(self, stage, mocker):
op = pipeline_ops_provisioning.PollStatusOperation(
fake_operation_id, " ", callback=mocker.MagicMock()
)
yield op
# Clean up any timers set on it
if op.polling_timer:
op.polling_timer.cancel()
if op.retry_after_timer:
op.retry_after_timer.cancel()
if op.provisioning_timeout_timer:
op.provisioning_timeout_timer.cancel()
@pytest.fixture
def mock_timer(self, mocker):
return mocker.patch(
"azure.iot.device.provisioning.pipeline.pipeline_stages_provisioning.Timer"
)
@pytest.mark.it(
"Adds a provisioning timeout timer with the interval specified in the configuration to the operation, and starts it"
)
def test_adds_timer(self, mocker, stage, op, mock_timer):
stage.run_op(op)
assert mock_timer.call_count == 1
assert mock_timer.call_args == mocker.call(constant.DEFAULT_TIMEOUT_INTERVAL, mocker.ANY)
assert op.provisioning_timeout_timer is mock_timer.return_value
assert op.provisioning_timeout_timer.start.call_count == 1
assert op.provisioning_timeout_timer.start.call_args == mocker.call()
@pytest.mark.it(
"Sends converted RequestResponse Op down the pipeline after attaching timer to the original op"
)
def test_sends_down(self, mocker, stage, op, mock_timer):
stage.run_op(op)
assert stage.send_op_down.call_count == 1
new_op = stage.send_op_down.call_args[0][0]
assert isinstance(new_op, pipeline_ops_base.RequestAndResponseOperation)
assert op.provisioning_timeout_timer is mock_timer.return_value
@pytest.mark.it("Completes the operation unsuccessfully, with a ServiceError due to timeout")
def test_not_complete_timeout(self, mocker, stage, op, mock_timer):
# Apply the timer
stage.run_op(op)
assert not op.completed
assert mock_timer.call_count == 1
on_timer_complete = mock_timer.call_args[0][1]
# Call timer complete callback (indicating timer completion)
on_timer_complete()
# Op is now completed with error
assert op.completed
assert isinstance(op.error, exceptions.ServiceError)
assert "query" in op.error.args[0]
@pytest.mark.it(
"Completes the operation successfully, cancels and clears the operation's timeout timer"
)
def test_complete_before_timeout(self, mocker, stage, op, mock_timer):
# Apply the timer
stage.run_op(op)
assert not op.completed
assert mock_timer.call_count == 1
mock_timer_inst = op.provisioning_timeout_timer
assert mock_timer_inst is mock_timer.return_value
assert mock_timer_inst.cancel.call_count == 0
# Complete the next operation
new_op = stage.send_op_down.call_args[0][0]
new_op.status_code = 200
new_op.response_body = "{}".encode("utf-8")
new_op.complete()
# Timer is now cancelled and cleared
assert mock_timer_inst.cancel.call_count == 1
assert mock_timer_inst.cancel.call_args == mocker.call()
assert op.provisioning_timeout_timer is None
| 39.400387 | 204 | 0.706946 | 4,956 | 40,740 | 5.500807 | 0.067595 | 0.036314 | 0.065366 | 0.072629 | 0.874807 | 0.869672 | 0.862373 | 0.851075 | 0.833688 | 0.830533 | 0 | 0.009181 | 0.213991 | 40,740 | 1,033 | 205 | 39.438529 | 0.842171 | 0.041777 | 0 | 0.721271 | 0 | 0.00978 | 0.155217 | 0.030339 | 0 | 0 | 0 | 0 | 0.212714 | 1 | 0.080685 | false | 0.001222 | 0.017115 | 0.025672 | 0.154034 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
73d5cc5278ad1a0d88e6309e0357d565523e1faa | 1,518 | py | Python | download_models.py | chenqifeng22/PhotographicImageSynthesis | e89c281e996aaac8669df0df66fbefec704f0588 | [
"MIT"
] | null | null | null | download_models.py | chenqifeng22/PhotographicImageSynthesis | e89c281e996aaac8669df0df66fbefec704f0588 | [
"MIT"
] | null | null | null | download_models.py | chenqifeng22/PhotographicImageSynthesis | e89c281e996aaac8669df0df66fbefec704f0588 | [
"MIT"
] | null | null | null | import urllib.request
print('Dowloading VGG-19 Model (510Mb)')
urllib.request.urlretrieve('https://cqf.io/data/Tensorflow_models/VGG_Model/imagenet-vgg-verydeep-19.mat','VGG_Model/imagenet-vgg-verydeep-19.mat')
print('Dowloading CRN 1024p Model (500Mb)')
urllib.request.urlretrieve('https://cqf.io/data/Tensorflow_models/result_1024p/model.ckpt.data-00000-of-00001','result_1024p/model.ckpt.data-00000-of-00001')
urllib.request.urlretrieve('https://cqf.io/data/Tensorflow_models/result_1024p/model.ckpt.meta','result_1024p/model.ckpt.meta')
print('Dowloading CRN 512p Model (1.2Gb)')
urllib.request.urlretrieve('https://cqf.io/data/Tensorflow_models/result_512p/model.ckpt.data-00000-of-00001','result_512p/model.ckpt.data-00000-of-00001')
urllib.request.urlretrieve('https://cqf.io/data/Tensorflow_models/result_512p/model.ckpt.meta','result_512p/model.ckpt.meta')
print('Dowloading CRN 256p Model (1.2Gb)')
urllib.request.urlretrieve('https://cqf.io/data/Tensorflow_models/result_256p/model.ckpt.data-00000-of-00001','result_256p/model.ckpt.data-00000-of-00001')
urllib.request.urlretrieve('https://cqf.io/data/Tensorflow_models/result_256p/model.ckpt.meta','result_256p/model.ckpt.meta')
print('Downloading GTA 256p Model (1.2Gb)')
urllib.request.urlretrieve('https://cqf.io/data/Tensorflow_models/result_GTA/model.ckpt.data-00000-of-00001','result_GTA/model.ckpt.data-00000-of-00001')
urllib.request.urlretrieve('https://cqf.io/data/Tensorflow_models/result_GTA/model.ckpt.meta','result_GTA/model.ckpt.meta')
| 72.285714 | 157 | 0.801713 | 235 | 1,518 | 5.06383 | 0.144681 | 0.121008 | 0.181513 | 0.219328 | 0.911765 | 0.842857 | 0.790756 | 0.736975 | 0.64958 | 0.604202 | 0 | 0.102165 | 0.02635 | 1,518 | 20 | 158 | 75.9 | 0.702977 | 0 | 0 | 0 | 0 | 0.333333 | 0.747694 | 0.206851 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.066667 | 0 | 0.066667 | 0.333333 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
fb7d6e32f5dacae952bbcf2e95bc9e34e6b6557d | 39 | py | Python | flows_get_brightest/version.py | emirkmo/flows-tools | d2f91f39b0635ff1a78240e6b4a5b74da02cfa9d | [
"MIT"
] | 1 | 2021-10-05T16:09:58.000Z | 2021-10-05T16:09:58.000Z | flows_get_brightest/version.py | emirkmo/flows-tools | d2f91f39b0635ff1a78240e6b4a5b74da02cfa9d | [
"MIT"
] | 2 | 2021-10-07T11:51:13.000Z | 2022-02-20T21:36:29.000Z | flows_get_brightest/version.py | emirkmo/flows-tools | d2f91f39b0635ff1a78240e6b4a5b74da02cfa9d | [
"MIT"
] | null | null | null | version = "1.0.1"
__version__ = "1.0.1" | 19.5 | 21 | 0.615385 | 8 | 39 | 2.5 | 0.375 | 0.8 | 0.9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.176471 | 0.128205 | 39 | 2 | 21 | 19.5 | 0.411765 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8391a0d3081097d7ad24d95b8b9662c253a88a98 | 1,950 | py | Python | Calculator 2.0.py | chidoski/Python-Projects | 1f18cdc2f56ae10d0eb641f48f949053d14b23fa | [
"MIT"
] | null | null | null | Calculator 2.0.py | chidoski/Python-Projects | 1f18cdc2f56ae10d0eb641f48f949053d14b23fa | [
"MIT"
] | null | null | null | Calculator 2.0.py | chidoski/Python-Projects | 1f18cdc2f56ae10d0eb641f48f949053d14b23fa | [
"MIT"
] | null | null | null | while True:
user_input = raw_input('Type quit to quit the program or choose =,-,*,**,%')
if user_input=='quit':break
elif user_input == '+' :
#Addition
print('Lets add 2 numbers')
number1 = raw_input("Enter the first number: ")
number2 = raw_input("Enter the second number: ")
print('Your answer is: ')
print int(number1) + int(number2)
elif user_input == '-' :
#Subtraction
print('Lets subtract 2 numbers')
number1 = raw_input("Enter the first number: ")
number2 = raw_input("Enter the second number: ")
print('Your answer is: ')
print int(number1) - int(number2)
elif user_input == '*' :
#Multiplication
print('Lets multiply 2 numbers')
number1 = raw_input("Enter the first number: ")
number2 = raw_input("Enter the second number: ")
('Your answer is: ')
print int(number1) * int(number2)
elif user_input == '/' :
#Modulous
print('Lets use division')
number1 = raw_input("Enter the first number: ")
number2 = raw_input("Enter the second number: ")
print('Modulous is: ')
print int(number1) / int(number2)
elif user_input == '**' :
#Exponent
print('Lets use exponents')
number1 = raw_input("Enter the first number: ")
number2 = raw_input("Enter the second number: ")
print('Your answer is: ')
print int(number1) ** int(number2)
elif user_input == '%' :
#Modulous
print('Lets use modulous')
number1 = raw_input("Enter the first number: ")
number2 = raw_input("Enter the second number: ")
print('Modulous is: ')
print int(number1) % int(number2)
zero = raw_input("Enter a number other than zero: ")
if zero == '0':
print("You did not follow instructions you get a 0%.")
else:
print("Awesome you get a 100%")
| 33.62069 | 80 | 0.581026 | 234 | 1,950 | 4.747863 | 0.230769 | 0.10081 | 0.152115 | 0.172817 | 0.707471 | 0.707471 | 0.707471 | 0.707471 | 0.707471 | 0.69577 | 0 | 0.023239 | 0.293846 | 1,950 | 57 | 81 | 34.210526 | 0.783588 | 0.029231 | 0 | 0.386364 | 0 | 0 | 0.350291 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.431818 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 8 |
83b82b7725fb21d8e221dcc75fa8232910d56658 | 12,508 | py | Python | General/PJB_ImageRegistration/regTest_ctrlpts/GenerateSummeries_sin.py | petebunting/rsgis_scripts | b35b0403cdfad6c63824d4f8c038f190cdb5978d | [
"MIT"
] | 4 | 2020-09-16T10:45:15.000Z | 2021-05-06T04:34:32.000Z | General/PJB_ImageRegistration/regTest_ctrlpts/GenerateSummeries_sin.py | petebunting/rsgis_scripts | b35b0403cdfad6c63824d4f8c038f190cdb5978d | [
"MIT"
] | null | null | null | General/PJB_ImageRegistration/regTest_ctrlpts/GenerateSummeries_sin.py | petebunting/rsgis_scripts | b35b0403cdfad6c63824d4f8c038f190cdb5978d | [
"MIT"
] | 2 | 2020-07-06T18:03:40.000Z | 2022-02-15T12:45:34.000Z | #! /usr/bin/env python
from CompareCtrlPts import CompareCtrlPts
import gc
referenceFiles = [\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p142_CASI_correct_LiDAR_sin5warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p138_CASI_correct_LiDAR_sin5warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p142_AIRSAR_correct_LiDAR_sin5warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p138_AIRSAR_correct_LiDAR_sin5warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p142_HyMap_correct_LiDAR_sin5warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p138_HyMap_correct_LiDAR_sin5warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p142_CASI_correct_HyMap_sin5warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p138_CASI_correct_HyMap_sin5warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/injune2_AIRSAR_correct_HyMap_sin5warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/injune8_AIRSAR_correct_HyMap_sin5warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/scene1_ALOS_correct_Landsat_sin5warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/scene2_ALOS_correct_Landsat_sin5warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p142_CASI_correct_LiDAR_sin5warpY_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p138_CASI_correct_LiDAR_sin5warpY_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p142_AIRSAR_correct_LiDAR_sin5warpY_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p138_AIRSAR_correct_LiDAR_sin5warpY_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p142_HyMap_correct_LiDAR_sin5warpY_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p138_HyMap_correct_LiDAR_sin5warpY_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p142_CASI_correct_HyMap_sin5warpY_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p138_CASI_correct_HyMap_sin5warpY_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/injune2_AIRSAR_correct_HyMap_sin5warpY_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/injune8_AIRSAR_correct_HyMap_sin5warpY_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/scene1_ALOS_correct_Landsat_sin5warpY_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/scene2_ALOS_correct_Landsat_sin5warpY_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p142_CASI_correct_LiDAR_sin10warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p138_CASI_correct_LiDAR_sin10warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p142_AIRSAR_correct_LiDAR_sin10warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p138_AIRSAR_correct_LiDAR_sin10warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p142_HyMap_correct_LiDAR_sin10warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p138_HyMap_correct_LiDAR_sin10warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p142_CASI_correct_HyMap_sin10warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/p138_CASI_correct_HyMap_sin10warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/injune2_AIRSAR_correct_HyMap_sin10warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/injune8_AIRSAR_correct_HyMap_sin10warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/scene1_ALOS_correct_Landsat_sin10warpX_ctrlpts.pts',\
'/Users/pete/Desktop/Registration_Tests/CorrectProducedCtrlPts/scene2_ALOS_correct_Landsat_sin10warpX_ctrlpts.pts'\
]
registeredFiles = [\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p142_CASI_LiDAR_XSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p138_CASI_LiDAR_XSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p142_AIRSAR_LiDAR_XSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p138_AIRSAR_LiDAR_XSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p142_HyMap_LiDAR_XSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p138_HyMap_LiDAR_XSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p142_CASI_HyMap_XSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p138_CASI_HyMap_XSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/injune2_AIRSAR_HyMap_XSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/injune8_AIRSAR_HyMap_XSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/scene1_ALOS_Landsat_XSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/scene2_ALOS_Landsat_XSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p142_CASI_LiDAR_YSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p138_CASI_LiDAR_YSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p142_AIRSAR_LiDAR_YSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p138_AIRSAR_LiDAR_YSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p142_HyMap_LiDAR_YSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p138_HyMap_LiDAR_YSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p142_CASI_HyMap_YSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p138_CASI_HyMap_YSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/injune2_AIRSAR_HyMap_YSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/injune8_AIRSAR_HyMap_YSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/scene1_ALOS_Landsat_YSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/scene2_ALOS_Landsat_YSin5Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p142_CASI_LiDAR_XSin10Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p138_CASI_LiDAR_XSin10Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p142_AIRSAR_LiDAR_XSin10Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p138_AIRSAR_LiDAR_XSin10Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p142_HyMap_LiDAR_XSin10Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p138_HyMap_LiDAR_XSin10Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p142_CASI_HyMap_XSin10Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/p138_CASI_HyMap_XSin10Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/injune2_AIRSAR_HyMap_XSin10Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/injune8_AIRSAR_HyMap_XSin10Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/scene1_ALOS_Landsat_XSin10Warp_image2image.pts',\
'/Users/pete/Desktop/Registration_Tests/Reg_Ctrl_Pts/scene2_ALOS_Landsat_XSin10Warp_image2image.pts'\
]
outputPlots = [\
'/Users/pete/Desktop/Registration_Tests/plots/p142_CASI_LiDAR_XSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p138_CASI_LiDAR_XSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p142_AIRSAR_LiDAR_XSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p138_AIRSAR_LiDAR_XSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p142_HyMap_LiDAR_XSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p138_HyMap_LiDAR_XSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p142_CASI_HyMap_XSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p138_CASI_HyMap_XSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/injune2_AIRSAR_HyMap_XSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/injune8_AIRSAR_HyMap_XSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/scene1_ALOS_Landsat_XSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/scene2_ALOS_Landsat_XSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p142_CASI_LiDAR_YSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p138_CASI_LiDAR_YSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p142_AIRSAR_LiDAR_YSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p138_AIRSAR_LiDAR_YSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p142_HyMap_LiDAR_YSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p138_HyMap_LiDAR_YSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p142_CASI_HyMap_YSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p138_CASI_HyMap_YSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/injune2_AIRSAR_HyMap_YSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/injune8_AIRSAR_HyMap_YSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/scene1_ALOS_Landsat_YSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/scene2_ALOS_Landsat_YSin5Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p142_CASI_LiDAR_XSin10Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p138_CASI_LiDAR_XSin10Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p142_AIRSAR_LiDAR_XSin10Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p138_AIRSAR_LiDAR_XSin10Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p142_HyMap_LiDAR_XSin10Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p138_HyMap_LiDAR_XSin10Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p142_CASI_HyMap_XSin10Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/p138_CASI_HyMap_XSin10Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/injune2_AIRSAR_HyMap_XSin10Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/injune8_AIRSAR_HyMap_XSin10Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/scene1_ALOS_Landsat_XSin10Warp',\
'/Users/pete/Desktop/Registration_Tests/plots/scene2_ALOS_Landsat_XSin10Warp'\
]
title= [\
'p142_CASI_LiDAR_XSin5Warp',\
'p138_CASI_LiDAR_XSin5Warp',\
'p142_AIRSAR_LiDAR_XSin5Warp',\
'p138_AIRSAR_LiDAR_XSin5Warp',\
'p142_HyMap_LiDAR_XSin5Warp',\
'p138_HyMap_LiDAR_XSin5Warp',\
'p142_CASI_HyMap_XSin5Warp',\
'p138_CASI_HyMap_XSin5Warp',\
'injune2_AIRSAR_HyMap_XSin5Warp',\
'injune8_AIRSAR_HyMap_XSin5Warp',\
'scene1_ALOS_Landsat_XSin5Warp',\
'scene2_ALOS_Landsat_XSin5Warp',\
'p142_CASI_LiDAR_YSin5Warp',\
'p138_CASI_LiDAR_YSin5Warp',\
'p142_AIRSAR_LiDAR_YSin5Warp',\
'p138_AIRSAR_LiDAR_YSin5Warp',\
'p142_HyMap_LiDAR_YSin5Warp',\
'p138_HyMap_LiDAR_YSin5Warp',\
'p142_CASI_HyMap_YSin5Warp',\
'p138_CASI_HyMap_YSin5Warp',\
'injune2_AIRSAR_HyMap_YSin5Warp',\
'injune8_AIRSAR_HyMap_YSin5Warp',\
'scene1_ALOS_Landsat_YSin5Warp',\
'scene2_ALOS_Landsat_YSin5Warp',\
'p142_CASI_LiDAR_XSin10Warp',\
'p138_CASI_LiDAR_XSin10Warp',\
'p142_AIRSAR_LiDAR_XSin10Warp',\
'p138_AIRSAR_LiDAR_XSin10Warp',\
'p142_HyMap_LiDAR_XSin10Warp',\
'p138_HyMap_LiDAR_XSin10Warp',\
'p142_CASI_HyMap_XSin10Warp',\
'p138_CASI_HyMap_XSin10Warp',\
'injune2_AIRSAR_HyMap_XSin10Warp',\
'injune8_AIRSAR_HyMap_XSin10Warp',\
'scene1_ALOS_Landsat_XSin10Warp',\
'scene2_ALOS_Landsat_XSin10Warp'\
]
summeries = []
compareCtrlPts = CompareCtrlPts()
for i in range(len(referenceFiles)):
print title[i]
compareCtrlPts = CompareCtrlPts()
#print referenceFiles[i]
#print registeredFiles[i]
summery = compareCtrlPts.run(referenceFiles[i], registeredFiles[i], outputPlots[i], 'png', 50)
summeries.append(summery)
#print 'Min: ', summery[0], ' Mean: ', summery[1], ' Max: ', summery[2], ' Std Dev: ', summery[3]
#print ''
del compareCtrlPts
gc.collect()
print 'title, \t min, \t mean, \t max, \t stddev'
for i in range(len(title)):
print title[i], ',', summeries[i][0], ',', summeries[i][1], ',', summeries[i][2], ',', summeries[i][3]
| 69.104972 | 117 | 0.860969 | 1,574 | 12,508 | 6.384371 | 0.043202 | 0.096726 | 0.171957 | 0.300925 | 0.861578 | 0.856404 | 0.854413 | 0.853418 | 0.853418 | 0.631108 | 0 | 0.047011 | 0.023825 | 12,508 | 180 | 118 | 69.488889 | 0.776003 | 0.013751 | 0 | 0.012048 | 0 | 0 | 0.892385 | 0.888492 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.012048 | null | null | 0.018072 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
83dab4133a36480ad2bbe7a165d72da8a3fac90c | 23,458 | py | Python | experimental/rank1_bnns/rank1_bnn_layers_test.py | mhavasi/edward2 | b630fea94386f7a6413f7d33ce75bb1dbe413d2d | [
"Apache-2.0"
] | null | null | null | experimental/rank1_bnns/rank1_bnn_layers_test.py | mhavasi/edward2 | b630fea94386f7a6413f7d33ce75bb1dbe413d2d | [
"Apache-2.0"
] | null | null | null | experimental/rank1_bnns/rank1_bnn_layers_test.py | mhavasi/edward2 | b630fea94386f7a6413f7d33ce75bb1dbe413d2d | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2020 The Edward2 Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Tests for rank-1 BNN layers."""
import itertools
from absl.testing import parameterized
from experimental.rank1_bnns import rank1_bnn_layers # local file import
import numpy as np
import tensorflow.compat.v2 as tf
class Rank1PriorLayersTest(parameterized.TestCase, tf.test.TestCase):
@parameterized.parameters(
{'alpha_initializer': 'he_normal',
'gamma_initializer': 'he_normal',
'bias_initializer': 'zeros'},
{'alpha_initializer': 'trainable_deterministic',
'gamma_initializer': 'trainable_deterministic',
'bias_initializer': 'trainable_deterministic'},
)
def testDenseRank1BatchEnsemble(self,
alpha_initializer,
gamma_initializer,
bias_initializer):
tf.keras.backend.set_learning_phase(1) # training time
ensemble_size = 3
examples_per_model = 4
input_dim = 5
output_dim = 5
inputs = tf.random.normal([examples_per_model, input_dim])
batched_inputs = tf.tile(inputs, [ensemble_size, 1])
layer = rank1_bnn_layers.DenseRank1(
output_dim,
alpha_initializer=alpha_initializer,
gamma_initializer=gamma_initializer,
bias_initializer=bias_initializer,
alpha_regularizer=None,
gamma_regularizer=None,
activation=None,
ensemble_size=ensemble_size)
output = layer(batched_inputs)
manual_output = [
layer.dense(inputs*layer.alpha[i]) * layer.gamma[i] + layer.bias[i]
for i in range(ensemble_size)]
manual_output = tf.concat(manual_output, axis=0)
expected_shape = (ensemble_size*examples_per_model, output_dim)
self.assertEqual(output.shape, expected_shape)
self.assertAllClose(output, manual_output)
@parameterized.parameters(
{'alpha_initializer': 'he_normal',
'gamma_initializer': 'he_normal',
'all_close': True,
'use_additive_perturbation': False,
'ensemble_size': 1},
{'alpha_initializer': 'he_normal',
'gamma_initializer': 'he_normal',
'all_close': True,
'use_additive_perturbation': True,
'ensemble_size': 1},
{'alpha_initializer': 'he_normal',
'gamma_initializer': 'he_normal',
'all_close': True,
'use_additive_perturbation': False,
'ensemble_size': 4},
{'alpha_initializer': 'he_normal',
'gamma_initializer': 'he_normal',
'all_close': True,
'use_additive_perturbation': True,
'ensemble_size': 4},
{'alpha_initializer': 'zeros',
'gamma_initializer': 'zeros',
'all_close': True,
'use_additive_perturbation': False,
'ensemble_size': 1},
{'alpha_initializer': 'trainable_normal',
'gamma_initializer': 'zeros',
'all_close': True,
'use_additive_perturbation': False,
'ensemble_size': 1},
{'alpha_initializer': 'trainable_normal',
'gamma_initializer': 'zeros',
'all_close': True,
'use_additive_perturbation': True,
'ensemble_size': 4},
{'alpha_initializer': 'zeros',
'gamma_initializer': 'trainable_normal',
'all_close': True,
'use_additive_perturbation': True,
'ensemble_size': 1},
{'alpha_initializer': 'zeros',
'gamma_initializer': 'trainable_normal',
'all_close': True,
'use_additive_perturbation': False,
'ensemble_size': 4},
{'alpha_initializer': 'trainable_normal',
'gamma_initializer': 'trainable_normal',
'all_close': False,
'use_additive_perturbation': False,
'ensemble_size': 1},
{'alpha_initializer': 'trainable_normal',
'gamma_initializer': 'trainable_normal',
'all_close': False,
'use_additive_perturbation': True,
'ensemble_size': 1},
{'alpha_initializer': 'trainable_normal',
'gamma_initializer': 'trainable_normal',
'all_close': False,
'use_additive_perturbation': False,
'ensemble_size': 4},
{'alpha_initializer': 'trainable_normal',
'gamma_initializer': 'trainable_normal',
'all_close': False,
'use_additive_perturbation': True,
'ensemble_size': 4},
)
def testDenseRank1AlphaGamma(self,
alpha_initializer,
gamma_initializer,
all_close,
use_additive_perturbation,
ensemble_size):
tf.keras.backend.set_learning_phase(1) # training time
inputs = np.random.rand(5*ensemble_size, 12).astype(np.float32)
model = rank1_bnn_layers.DenseRank1(
4,
ensemble_size=ensemble_size,
alpha_initializer=alpha_initializer,
gamma_initializer=gamma_initializer,
activation=None)
outputs1 = model(inputs)
outputs2 = model(inputs)
self.assertEqual(outputs1.shape, (5*ensemble_size, 4))
if all_close:
self.assertAllClose(outputs1, outputs2)
else:
self.assertNotAllClose(outputs1, outputs2)
model.get_config()
def testDenseRank1Model(self):
inputs = np.random.rand(3, 4, 4, 1).astype(np.float32)
model = tf.keras.Sequential([
tf.keras.layers.Conv2D(3,
kernel_size=2,
padding='SAME',
activation=tf.nn.relu),
tf.keras.layers.Flatten(),
rank1_bnn_layers.DenseRank1(2, activation=None),
])
outputs = model(inputs, training=True)
self.assertEqual(outputs.shape, (3, 2))
self.assertLen(model.losses, 2)
@parameterized.parameters(
{'alpha_initializer': 'he_normal',
'gamma_initializer': 'he_normal'},
{'alpha_initializer': 'trainable_deterministic',
'gamma_initializer': 'trainable_deterministic'},
)
def testConv2DRank1BatchEnsemble(self, alpha_initializer, gamma_initializer):
tf.keras.backend.set_learning_phase(1) # training time
ensemble_size = 3
examples_per_model = 4
input_dim = 5
output_dim = 5
inputs = tf.random.normal([examples_per_model, 4, 4, input_dim])
batched_inputs = tf.tile(inputs, [ensemble_size, 1, 1, 1])
layer = rank1_bnn_layers.Conv2DRank1(
output_dim,
kernel_size=2,
padding='same',
alpha_initializer=alpha_initializer,
gamma_initializer=gamma_initializer,
alpha_regularizer=None,
gamma_regularizer=None,
activation=None,
ensemble_size=ensemble_size)
output = layer(batched_inputs)
manual_output = [
layer.conv2d(inputs*layer.alpha[i]) * layer.gamma[i] + layer.bias[i]
for i in range(ensemble_size)]
manual_output = tf.concat(manual_output, axis=0)
self.assertEqual(output.shape,
(ensemble_size*examples_per_model, 4, 4, output_dim))
self.assertAllClose(output, manual_output)
@parameterized.parameters(
{'alpha_initializer': 'he_normal',
'gamma_initializer': 'he_normal',
'all_close': True,
'use_additive_perturbation': False,
'ensemble_size': 1},
{'alpha_initializer': 'he_normal',
'gamma_initializer': 'he_normal',
'all_close': True,
'use_additive_perturbation': True,
'ensemble_size': 1},
{'alpha_initializer': 'he_normal',
'gamma_initializer': 'he_normal',
'all_close': True,
'use_additive_perturbation': False,
'ensemble_size': 4},
{'alpha_initializer': 'he_normal',
'gamma_initializer': 'he_normal',
'all_close': True,
'use_additive_perturbation': True,
'ensemble_size': 4},
{'alpha_initializer': 'zeros',
'gamma_initializer': 'zeros',
'all_close': True,
'use_additive_perturbation': False,
'ensemble_size': 1},
{'alpha_initializer': 'trainable_normal',
'gamma_initializer': 'zeros',
'all_close': True,
'use_additive_perturbation': False,
'ensemble_size': 1},
{'alpha_initializer': 'trainable_normal',
'gamma_initializer': 'zeros',
'all_close': True,
'use_additive_perturbation': True,
'ensemble_size': 4},
{'alpha_initializer': 'zeros',
'gamma_initializer': 'trainable_normal',
'all_close': True,
'use_additive_perturbation': True,
'ensemble_size': 1},
{'alpha_initializer': 'zeros',
'gamma_initializer': 'trainable_normal',
'all_close': True,
'use_additive_perturbation': False,
'ensemble_size': 4},
{'alpha_initializer': 'trainable_normal',
'gamma_initializer': 'trainable_normal',
'all_close': False,
'use_additive_perturbation': False,
'ensemble_size': 1},
{'alpha_initializer': 'trainable_normal',
'gamma_initializer': 'trainable_normal',
'all_close': False,
'use_additive_perturbation': True,
'ensemble_size': 1},
{'alpha_initializer': 'trainable_normal',
'gamma_initializer': 'trainable_normal',
'all_close': False,
'use_additive_perturbation': False,
'ensemble_size': 4},
{'alpha_initializer': 'trainable_normal',
'gamma_initializer': 'trainable_normal',
'all_close': False,
'use_additive_perturbation': True,
'ensemble_size': 4},
)
def testConv2DRank1AlphaGamma(self,
alpha_initializer,
gamma_initializer,
all_close,
use_additive_perturbation,
ensemble_size):
tf.keras.backend.set_learning_phase(1) # training time
inputs = np.random.rand(5*ensemble_size, 4, 4, 12).astype(np.float32)
model = rank1_bnn_layers.Conv2DRank1(
4,
kernel_size=2,
alpha_initializer=alpha_initializer,
gamma_initializer=gamma_initializer,
activation=None)
outputs1 = model(inputs)
outputs2 = model(inputs)
self.assertEqual(outputs1.shape, (5*ensemble_size, 3, 3, 4))
if all_close:
self.assertAllClose(outputs1, outputs2)
else:
self.assertNotAllClose(outputs1, outputs2)
model.get_config()
def testConv2DRank1Model(self):
inputs = np.random.rand(3, 4, 4, 1).astype(np.float32)
model = tf.keras.Sequential([
rank1_bnn_layers.Conv2DRank1(3,
kernel_size=2,
padding='SAME',
activation=tf.nn.relu),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(2, activation=None),
])
outputs = model(inputs, training=True)
self.assertEqual(outputs.shape, (3, 2))
self.assertLen(model.losses, 2)
@parameterized.parameters(
{'alpha_initializer': 'he_normal',
'gamma_initializer': 'he_normal'},
{'alpha_initializer': 'trainable_deterministic',
'gamma_initializer': 'trainable_deterministic'},
)
def testConv1DRank1BatchEnsemble(self, alpha_initializer, gamma_initializer):
tf.keras.backend.set_learning_phase(1) # training time
ensemble_size = 3
examples_per_model = 4
input_dim = 5
output_dim = 5
inputs = tf.random.normal([examples_per_model, 4, input_dim])
batched_inputs = tf.tile(inputs, [ensemble_size, 1, 1])
layer = rank1_bnn_layers.Conv1DRank1(
output_dim,
kernel_size=2,
padding='same',
alpha_initializer=alpha_initializer,
gamma_initializer=gamma_initializer,
alpha_regularizer=None,
gamma_regularizer=None,
activation=None,
ensemble_size=ensemble_size)
output = layer(batched_inputs)
manual_output = [
layer.conv1d(inputs*layer.alpha[i]) * layer.gamma[i] + layer.bias[i]
for i in range(ensemble_size)]
manual_output = tf.concat(manual_output, axis=0)
self.assertEqual(output.shape,
(ensemble_size*examples_per_model, 4, output_dim))
self.assertAllClose(output, manual_output)
@parameterized.parameters(
{'alpha_initializer': 'he_normal',
'gamma_initializer': 'he_normal',
'all_close': True,
'use_additive_perturbation': False,
'ensemble_size': 1},
{'alpha_initializer': 'he_normal',
'gamma_initializer': 'he_normal',
'all_close': True,
'use_additive_perturbation': True,
'ensemble_size': 1},
{'alpha_initializer': 'he_normal',
'gamma_initializer': 'he_normal',
'all_close': True,
'use_additive_perturbation': False,
'ensemble_size': 4},
{'alpha_initializer': 'he_normal',
'gamma_initializer': 'he_normal',
'all_close': True,
'use_additive_perturbation': True,
'ensemble_size': 4},
{'alpha_initializer': 'zeros',
'gamma_initializer': 'zeros',
'all_close': True,
'use_additive_perturbation': False,
'ensemble_size': 1},
{'alpha_initializer': 'trainable_normal',
'gamma_initializer': 'zeros',
'all_close': True,
'use_additive_perturbation': False,
'ensemble_size': 1},
{'alpha_initializer': 'trainable_normal',
'gamma_initializer': 'zeros',
'all_close': True,
'use_additive_perturbation': True,
'ensemble_size': 4},
{'alpha_initializer': 'zeros',
'gamma_initializer': 'trainable_normal',
'all_close': True,
'use_additive_perturbation': True,
'ensemble_size': 1},
{'alpha_initializer': 'zeros',
'gamma_initializer': 'trainable_normal',
'all_close': True,
'use_additive_perturbation': False,
'ensemble_size': 4},
{'alpha_initializer': 'trainable_normal',
'gamma_initializer': 'trainable_normal',
'all_close': False,
'use_additive_perturbation': False,
'ensemble_size': 1},
{'alpha_initializer': 'trainable_normal',
'gamma_initializer': 'trainable_normal',
'all_close': False,
'use_additive_perturbation': True,
'ensemble_size': 1},
{'alpha_initializer': 'trainable_normal',
'gamma_initializer': 'trainable_normal',
'all_close': False,
'use_additive_perturbation': False,
'ensemble_size': 4},
{'alpha_initializer': 'trainable_normal',
'gamma_initializer': 'trainable_normal',
'all_close': False,
'use_additive_perturbation': True,
'ensemble_size': 4},
)
def testConv1DRank1AlphaGamma(self,
alpha_initializer,
gamma_initializer,
all_close,
use_additive_perturbation,
ensemble_size):
tf.keras.backend.set_learning_phase(1) # training time
inputs = np.random.rand(5*ensemble_size, 4, 12).astype(np.float32)
model = rank1_bnn_layers.Conv1DRank1(
4,
kernel_size=2,
alpha_initializer=alpha_initializer,
gamma_initializer=gamma_initializer,
activation=None)
outputs1 = model(inputs)
outputs2 = model(inputs)
self.assertEqual(outputs1.shape, (5*ensemble_size, 3, 4))
if all_close:
self.assertAllClose(outputs1, outputs2)
else:
self.assertNotAllClose(outputs1, outputs2)
model.get_config()
def testConv1DRank1Model(self):
inputs = np.random.rand(3, 4, 1).astype(np.float32)
model = tf.keras.Sequential([
rank1_bnn_layers.Conv1DRank1(3,
kernel_size=2,
padding='SAME',
activation=tf.nn.relu),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(2, activation=None),
])
outputs = model(inputs, training=True)
self.assertEqual(outputs.shape, (3, 2))
self.assertLen(model.losses, 2)
@parameterized.parameters(
itertools.chain(
itertools.product(
('he_normal',), ('he_normal',), ('he_normal',), ('he_normal',),
('he_normal',), (True, False), (1, 2), (True, False)),
itertools.product(
('trainable_deterministic',), ('trainable_deterministic',),
('trainable_deterministic',), ('trainable_deterministic',),
('trainable_deterministic',), (True, False), (1, 2),
(True, False)))
)
def testLSTMCellRank1BatchEnsemble(self, alpha_initializer, gamma_initializer,
recurrent_alpha_initializer,
recurrent_gamma_initializer,
bias_initializer, use_bias, implementation,
use_additive_perturbation):
tf.keras.backend.set_learning_phase(1) # training time
ensemble_size = 4
examples_per_model = 4
input_dim = 5
output_dim = 5
inputs = tf.random.normal([examples_per_model, input_dim])
batched_inputs = tf.tile(inputs, [ensemble_size, 1])
layer = rank1_bnn_layers.LSTMCellRank1(
output_dim,
use_bias=use_bias,
alpha_initializer=alpha_initializer,
gamma_initializer=gamma_initializer,
recurrent_alpha_initializer=recurrent_alpha_initializer,
recurrent_gamma_initializer=recurrent_gamma_initializer,
bias_initializer=bias_initializer,
alpha_regularizer=None,
gamma_regularizer=None,
recurrent_alpha_regularizer=None,
recurrent_gamma_regularizer=None,
implementation=implementation,
use_additive_perturbation=use_additive_perturbation,
ensemble_size=ensemble_size)
h0 = tf.random.normal([examples_per_model, output_dim])
c0 = tf.random.normal([examples_per_model, output_dim])
def compute_rank1_lstm_cell(i):
if use_additive_perturbation:
ifgo = tf.linalg.matmul(
inputs + layer.alpha[i], layer.kernel) + layer.gamma[i]
ifgo += tf.linalg.matmul(
h0 + layer.recurrent_alpha[i],
layer.recurrent_kernel) + layer.recurrent_gamma[i]
else:
ifgo = tf.linalg.matmul(
inputs * layer.alpha[i], layer.kernel) * layer.gamma[i]
ifgo += tf.linalg.matmul(
h0 * layer.recurrent_alpha[i],
layer.recurrent_kernel) * layer.recurrent_gamma[i]
if use_bias:
ifgo += layer.bias[i]
i, f, g, o = tf.split(ifgo, num_or_size_splits=4, axis=1)
i = tf.nn.sigmoid(i)
f = tf.nn.sigmoid(f)
g = tf.nn.tanh(g)
o = tf.nn.sigmoid(o)
c = f*c0 + i*g
h = o * tf.nn.tanh(c)
return h
h0_batched = tf.tile(h0, [ensemble_size, 1])
c0_batched = tf.tile(c0, [ensemble_size, 1])
outputs, _ = layer(batched_inputs, (h0_batched, c0_batched))
manual_outputs = tf.concat(
[compute_rank1_lstm_cell(i) for i in range(ensemble_size)], axis=0)
expected_shape = (ensemble_size*examples_per_model, output_dim)
self.assertEqual(outputs.shape, expected_shape)
self.assertAllClose(outputs, manual_outputs)
layer2 = rank1_bnn_layers.LSTMCellRank1.from_config(layer.get_config())
layer2(batched_inputs, (h0_batched, c0_batched)) # force initialization
layer2.set_weights(layer.get_weights())
outputs2, _ = layer2(batched_inputs, (h0_batched, c0_batched))
self.assertAllClose(outputs, outputs2)
@parameterized.parameters(
list(itertools.product(
('he_normal', 'trainable_normal',),
('he_normal', 'trainable_normal',),
('he_normal', 'trainable_normal',),
('he_normal', 'trainable_normal',),
(1, 2), (True, False)))
)
def testLSTMCellRank1AlphaGamma(self, alpha_initializer, gamma_initializer,
recurrent_alpha_initializer,
recurrent_gamma_initializer,
implementation, use_additive_perturbation):
tf.keras.backend.set_learning_phase(1) # training time
ensemble_size = 4
batch_size = 5 * ensemble_size
output_dim = 4
inputs = np.random.rand(batch_size, 12).astype(np.float32)
layer = rank1_bnn_layers.LSTMCellRank1(
output_dim,
alpha_initializer=alpha_initializer,
gamma_initializer=gamma_initializer,
recurrent_alpha_initializer=recurrent_alpha_initializer,
recurrent_gamma_initializer=recurrent_gamma_initializer,
ensemble_size=ensemble_size)
h0 = tf.random.normal([batch_size, output_dim])
c0 = tf.random.normal([batch_size, output_dim])
outputs1, _ = layer(inputs, (h0, c0))
layer._sample_weights(inputs)
outputs2, _ = layer(inputs, (h0, c0))
self.assertEqual(outputs1.shape, (batch_size, output_dim))
all_close = 'trainable_normal' not in [alpha_initializer, gamma_initializer,
recurrent_alpha_initializer,
recurrent_gamma_initializer]
if all_close:
self.assertAllClose(outputs1, outputs2, rtol=1e-4)
else:
self.assertNotAllClose(outputs1, outputs2)
@parameterized.parameters(
list(itertools.product((1, 4), (1, 2), (True, False), (True, False)))
)
def testLSTMCellRank1Model(self, ensemble_size, implementation,
use_additive_perturbation, use_bias):
batch_size = 2 * ensemble_size
timesteps = 3
input_dim = 12
hidden_size = 10
inputs = np.random.rand(batch_size, timesteps, input_dim).astype(np.float32)
cell = rank1_bnn_layers.LSTMCellRank1(
hidden_size, use_bias=use_bias, implementation=implementation,
use_additive_perturbation=use_additive_perturbation,
ensemble_size=ensemble_size)
model = tf.keras.Sequential([
tf.keras.layers.RNN(cell, return_sequences=True)
])
outputs1 = model(inputs)
outputs2 = model(inputs)
state = (tf.zeros([1, hidden_size]), tf.zeros([1, hidden_size]))
outputs3 = []
for t in range(timesteps):
out, state = cell(inputs[:, t, :], state)
outputs3.append(out)
outputs3 = tf.stack(outputs3, axis=1)
self.assertEqual(outputs1.shape, (batch_size, timesteps, hidden_size))
self.assertEqual(outputs3.shape, (batch_size, timesteps, hidden_size))
# NOTE: `cell.sample_weights` should have been called at the beginning of
# each call, so these should be different.
self.assertNotAllClose(outputs1, outputs2)
# NOTE: We didn't call `cell.sample_weights` again before computing
# `outputs3`, so the cell should have had the same weights as it did
# during computation of `outputs2`, and thus yielded the same output
# tensor.
self.assertAllClose(outputs2, outputs3)
self.assertLen(model.losses, 4)
if __name__ == '__main__':
tf.enable_v2_behavior()
tf.test.main()
| 38.582237 | 80 | 0.636414 | 2,499 | 23,458 | 5.686275 | 0.103241 | 0.070936 | 0.080929 | 0.028501 | 0.814215 | 0.794652 | 0.763054 | 0.740746 | 0.713863 | 0.708726 | 0 | 0.018524 | 0.252068 | 23,458 | 607 | 81 | 38.645799 | 0.791394 | 0.046423 | 0 | 0.749104 | 0 | 0 | 0.214973 | 0.056016 | 0 | 0 | 0 | 0 | 0.057348 | 1 | 0.023297 | false | 0 | 0.008961 | 0 | 0.035842 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f7c44e93ff73e835b7ac1a4a47b0972af9654101 | 85 | py | Python | AppPkg/Applications/Python/Python-2.7.2/Lib/struct.py | CEOALT1/RefindPlusUDK | 116b957ad735f96fbb6d80a0ba582046960ba164 | [
"BSD-2-Clause"
] | 2,757 | 2018-04-28T21:41:36.000Z | 2022-03-29T06:33:36.000Z | AppPkg/Applications/Python/Python-2.7.2/Lib/struct.py | CEOALT1/RefindPlusUDK | 116b957ad735f96fbb6d80a0ba582046960ba164 | [
"BSD-2-Clause"
] | 20 | 2019-07-23T15:29:32.000Z | 2022-01-21T12:53:04.000Z | AppPkg/Applications/Python/Python-2.7.2/Lib/struct.py | CEOALT1/RefindPlusUDK | 116b957ad735f96fbb6d80a0ba582046960ba164 | [
"BSD-2-Clause"
] | 449 | 2018-05-09T05:54:05.000Z | 2022-03-30T14:54:18.000Z | from _struct import *
from _struct import _clearcache
from _struct import __doc__
| 21.25 | 32 | 0.811765 | 11 | 85 | 5.545455 | 0.454545 | 0.491803 | 0.786885 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.176471 | 85 | 3 | 33 | 28.333333 | 0.871429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
791e884020ca67e51438c214893f0ab0b6d6f179 | 6,607 | py | Python | lcp_physics/physics/constraints.py | kracon7/lcp-physics | 463d9602350b854464a027b2c57faae412fa2691 | [
"Apache-2.0"
] | 1 | 2022-01-25T09:13:10.000Z | 2022-01-25T09:13:10.000Z | lcp_physics/physics/constraints.py | kracon7/lcp-physics | 463d9602350b854464a027b2c57faae412fa2691 | [
"Apache-2.0"
] | null | null | null | lcp_physics/physics/constraints.py | kracon7/lcp-physics | 463d9602350b854464a027b2c57faae412fa2691 | [
"Apache-2.0"
] | null | null | null | import pygame
import torch
from .utils import Indices, Defaults, cart_to_polar, polar_to_cart
X = Indices.X
Y = Indices.Y
DIM = Defaults.DIM
class Joint:
"""Revolute joint.
"""
def __init__(self, body1, body2, pos):
self.static = False
self.num_constraints = 2
self.body1 = body1
self.body2 = body2
self.pos = body1.p.new_tensor(pos)
self.pos1 = self.pos - self.body1.pos
self.r1, self.rot1 = cart_to_polar(self.pos1)
self.rot2 = None
if body2 is not None:
self.pos2 = self.pos - self.body2.pos
self.r2, self.rot2 = cart_to_polar(self.pos2)
def J(self):
J1 = torch.cat([torch.cat([-self.pos1[Y:Y+1], self.pos1[X:X+1]]).unsqueeze(1),
torch.eye(DIM).type_as(self.pos)], dim=1)
J2 = None
if self.body2 is not None:
J2 = torch.cat([torch.cat([self.pos2[Y:Y+1], -self.pos2[X:X+1]]).unsqueeze(1),
-torch.eye(DIM).type_as(self.pos)], dim=1)
return J1, J2
def move(self, dt):
self.rot1 = self.rot1 + self.body1.v[0] * dt
if self.body2 is not None:
self.rot2 = self.rot2 + self.body2.v[0] * dt
self.update_pos()
def update_pos(self):
self.pos1 = polar_to_cart(self.r1, self.rot1)
self.pos = self.body1.pos + self.pos1
if self.body2 is not None:
# keep position on body1 as reference
self.pos2 = self.pos - self.body2.pos
def draw(self, screen, pixels_per_meter=1):
pos = (self.pos.detach().cpu().numpy() * pixels_per_meter).astype(int)
return [pygame.draw.circle(screen, (0, 255, 0), pos, 2)]
class FixedJoint:
"""Fixed joint, fixes two bodies together."""
def __init__(self, body1, body2):
self.static = False
self.num_constraints = 3
self.body1 = body1
self.body2 = body2
self.pos = body1.pos
self.pos1 = self.pos - self.body1.pos
self.rot1 = self.pos.new_tensor(0)
self.rot2 = None
self.pos2 = self.pos - self.body2.pos
self.rot2 = self.body2.p[0] - self.body1.p[0] # inverted sign?
def J(self):
J1 = torch.cat([torch.cat([-self.pos1[Y:Y+1], self.pos1[X:X+1]]).unsqueeze(1),
torch.eye(DIM).type_as(self.pos)], dim=1)
J1 = torch.cat([J1, J1.new_tensor([1, 0, 0]).unsqueeze(0)], dim=0)
J2 = torch.cat([torch.cat([self.pos2[Y:Y+1], -self.pos2[X:X+1]]).unsqueeze(1),
-torch.eye(DIM).type_as(self.pos)], dim=1)
J2 = torch.cat([J2, J2.new_tensor([-1, 0, 0]).unsqueeze(0)], dim=0)
return J1, J2
def move(self, dt):
self.update_pos()
def update_pos(self):
self.pos = self.body1.pos
self.pos1 = self.pos - self.body1.pos
if self.body2 is not None:
# keep position on body1 as reference
self.pos2 = self.pos - self.body2.pos
def draw(self, screen, pixels_per_meter=1):
return []
# start = (self.body1.pos.detach().cpu().numpy() * pixels_per_meter).astype(int)
# end = (self.body2.pos.detach().cpu().numpy() * pixels_per_meter).astype(int)
# return [pygame.draw.line(screen, (0, 255, 0), start, end, 2)]
class YConstraint:
"""Prevents motion in the Y axis.
"""
def __init__(self, body1):
self.static = True
self.num_constraints = 1
self.body1 = body1
self.pos = body1.pos
self.rot1 = self.body1.p[0]
self.body2 = self.rot2 = None
def J(self):
J = self.pos.new_tensor([0, 0, 1]).unsqueeze(0)
return J, None
def move(self, dt):
self.update_pos()
def update_pos(self):
self.pos = self.body1.pos
self.rot1 = self.body1.p[0]
def draw(self, screen, pixels_per_meter=1):
pos = (self.pos.detach().cpu().numpy() * pixels_per_meter).astype(int)
return [pygame.draw.line(screen, (0, 255, 0), pos - [5, 0], pos + [5, 0], 2)]
class XConstraint:
"""Prevents motion in the X axis.
"""
def __init__(self, body1):
self.static = True
self.num_constraints = 1
self.body1 = body1
self.pos = body1.pos
self.rot1 = self.body1.p[0]
self.body2 = self.rot2 = None
def J(self):
J = self.pos.new_tensor([0, 1, 0]).unsqueeze(0)
return J, None
def move(self, dt):
self.update_pos()
def update_pos(self):
self.pos = self.body1.pos
self.rot1 = self.body1.p[0]
def draw(self, screen, pixels_per_meter=1):
pos = (self.pos.detach().cpu().numpy() * pixels_per_meter).astype(int)
return [pygame.draw.line(screen, (0, 255, 0), pos - [0, 5], pos + [0, 5], 2)]
class RotConstraint:
"""Prevents rotational motion.
"""
def __init__(self, body1):
self.static = True
self.num_constraints = 1
self.body1 = body1
self.pos = body1.pos
self.rot1 = self.body1.p[0]
self.body2 = self.rot2 = None
def J(self):
J = self.pos.new_tensor([1, 0, 0]).unsqueeze(0)
return J, None
def move(self, dt):
self.update_pos()
def update_pos(self):
self.pos = self.body1.pos
self.rot1 = self.body1.p[0]
def draw(self, screen, pixels_per_meter=1):
pos = (self.pos.detach().cpu().numpy() * pixels_per_meter).astype(int)
return [pygame.draw.circle(screen, (0, 255, 0), pos, 5, 1)]
class TotalConstraint:
"""Prevents all motion.
"""
def __init__(self, body1):
self.static = True
self.num_constraints = 3
self.body1 = body1
self.pos = body1.pos
self.pos1 = self.pos - self.body1.pos
self.r1, self.rot1 = cart_to_polar(self.pos1)
self.body2 = self.rot2 = None
self.eye = torch.eye(self.num_constraints).type_as(self.pos)
def J(self):
J = self.eye
return J, None
def move(self, dt):
self.rot1 = self.rot1 + self.body1.v[0] * dt
self.update_pos()
def update_pos(self):
self.pos1 = polar_to_cart(self.r1, self.rot1)
self.pos = self.body1.pos + self.pos1
def draw(self, screen, pixels_per_meter=1):
pos = (self.pos.detach().cpu().numpy() * pixels_per_meter).astype(int)
return [pygame.draw.circle(screen, (0, 255, 0), pos + 1, 5, 1),
pygame.draw.line(screen, (0, 255, 0), pos - [5, 0], pos + [5, 0], 2),
pygame.draw.line(screen, (0, 255, 0), pos - [0, 5], pos + [0, 5], 2)]
| 31.764423 | 90 | 0.56758 | 976 | 6,607 | 3.744877 | 0.098361 | 0.082353 | 0.042134 | 0.043776 | 0.845691 | 0.818605 | 0.794254 | 0.793707 | 0.773735 | 0.704514 | 0 | 0.055872 | 0.282125 | 6,607 | 207 | 91 | 31.917874 | 0.714738 | 0.075223 | 0 | 0.753333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.02 | 0.006667 | 0.34 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
7923279970ff705d63182dc4a2c372544c28c6c4 | 98 | py | Python | app/api/v1/__init__.py | JackLiu16/flasky | 809758794740c099f9f66a3e2490433dcee26c4e | [
"MIT"
] | 1 | 2018-10-30T08:30:39.000Z | 2018-10-30T08:30:39.000Z | app/api/v1/__init__.py | JackLiu16/flasky | 809758794740c099f9f66a3e2490433dcee26c4e | [
"MIT"
] | null | null | null | app/api/v1/__init__.py | JackLiu16/flasky | 809758794740c099f9f66a3e2490433dcee26c4e | [
"MIT"
] | null | null | null | from flask import Blueprint
api_v1 = Blueprint('api_v1', __name__)
from app.api.v1 import errors | 19.6 | 38 | 0.785714 | 16 | 98 | 4.4375 | 0.5625 | 0.211268 | 0.394366 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.035294 | 0.132653 | 98 | 5 | 39 | 19.6 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0.060606 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.666667 | 0 | 0.666667 | 0.666667 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 0 | 7 |
f70fcd603a54e94391c4758ae07ce19fce6dcb4f | 6,385 | py | Python | src/bindings/python/tests/test_ngraph/test_ops_binary.py | pazamelin/openvino | b7e8ef910d7ed8e52326d14dc6fd53b71d16ed48 | [
"Apache-2.0"
] | 1 | 2019-09-22T01:05:07.000Z | 2019-09-22T01:05:07.000Z | src/bindings/python/tests/test_ngraph/test_ops_binary.py | pazamelin/openvino | b7e8ef910d7ed8e52326d14dc6fd53b71d16ed48 | [
"Apache-2.0"
] | 58 | 2020-11-06T12:13:45.000Z | 2022-03-28T13:20:11.000Z | src/bindings/python/tests/test_ngraph/test_ops_binary.py | pazamelin/openvino | b7e8ef910d7ed8e52326d14dc6fd53b71d16ed48 | [
"Apache-2.0"
] | 2 | 2021-07-14T07:40:50.000Z | 2021-07-27T01:40:03.000Z | # Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import operator
import numpy as np
import pytest
import openvino.runtime.opset8 as ov
from tests.runtime import get_runtime
from tests.test_ngraph.util import run_op_node
@pytest.mark.parametrize(
"ng_api_helper,numpy_function",
[
(ov.add, np.add),
(ov.divide, np.divide),
(ov.multiply, np.multiply),
(ov.subtract, np.subtract),
(ov.minimum, np.minimum),
(ov.maximum, np.maximum),
(ov.mod, np.mod),
(ov.equal, np.equal),
(ov.not_equal, np.not_equal),
(ov.greater, np.greater),
(ov.greater_equal, np.greater_equal),
(ov.less, np.less),
(ov.less_equal, np.less_equal),
],
)
def test_binary_op(ng_api_helper, numpy_function):
runtime = get_runtime()
shape = [2, 2]
parameter_a = ov.parameter(shape, name="A", dtype=np.float32)
parameter_b = ov.parameter(shape, name="B", dtype=np.float32)
model = ng_api_helper(parameter_a, parameter_b)
computation = runtime.computation(model, parameter_a, parameter_b)
value_a = np.array([[1, 2], [3, 4]], dtype=np.float32)
value_b = np.array([[5, 6], [7, 8]], dtype=np.float32)
result = computation(value_a, value_b)
expected = numpy_function(value_a, value_b)
assert np.allclose(result, expected)
@pytest.mark.parametrize(
"ng_api_helper,numpy_function",
[
(ov.add, np.add),
(ov.divide, np.divide),
(ov.multiply, np.multiply),
(ov.subtract, np.subtract),
(ov.minimum, np.minimum),
(ov.maximum, np.maximum),
(ov.mod, np.mod),
(ov.equal, np.equal),
(ov.not_equal, np.not_equal),
(ov.greater, np.greater),
(ov.greater_equal, np.greater_equal),
(ov.less, np.less),
(ov.less_equal, np.less_equal),
],
)
def test_binary_op_with_scalar(ng_api_helper, numpy_function):
runtime = get_runtime()
value_a = np.array([[1, 2], [3, 4]], dtype=np.float32)
value_b = np.array([[5, 6], [7, 8]], dtype=np.float32)
shape = [2, 2]
parameter_a = ov.parameter(shape, name="A", dtype=np.float32)
model = ng_api_helper(parameter_a, value_b)
computation = runtime.computation(model, parameter_a)
result = computation(value_a)
expected = numpy_function(value_a, value_b)
assert np.allclose(result, expected)
@pytest.mark.parametrize(
"ng_api_helper,numpy_function",
[(ov.logical_and, np.logical_and), (ov.logical_or, np.logical_or), (ov.logical_xor, np.logical_xor)],
)
def test_binary_logical_op(ng_api_helper, numpy_function):
runtime = get_runtime()
shape = [2, 2]
parameter_a = ov.parameter(shape, name="A", dtype=np.bool)
parameter_b = ov.parameter(shape, name="B", dtype=np.bool)
model = ng_api_helper(parameter_a, parameter_b)
computation = runtime.computation(model, parameter_a, parameter_b)
value_a = np.array([[True, False], [False, True]], dtype=np.bool)
value_b = np.array([[False, True], [False, True]], dtype=np.bool)
result = computation(value_a, value_b)
expected = numpy_function(value_a, value_b)
assert np.allclose(result, expected)
@pytest.mark.parametrize(
"ng_api_helper,numpy_function",
[(ov.logical_and, np.logical_and), (ov.logical_or, np.logical_or), (ov.logical_xor, np.logical_xor)],
)
def test_binary_logical_op_with_scalar(ng_api_helper, numpy_function):
runtime = get_runtime()
value_a = np.array([[True, False], [False, True]], dtype=np.bool)
value_b = np.array([[False, True], [False, True]], dtype=np.bool)
shape = [2, 2]
parameter_a = ov.parameter(shape, name="A", dtype=np.bool)
model = ng_api_helper(parameter_a, value_b)
computation = runtime.computation(model, parameter_a)
result = computation(value_a)
expected = numpy_function(value_a, value_b)
assert np.allclose(result, expected)
@pytest.mark.parametrize(
"operator,numpy_function",
[
(operator.add, np.add),
(operator.sub, np.subtract),
(operator.mul, np.multiply),
(operator.truediv, np.divide),
(operator.eq, np.equal),
(operator.ne, np.not_equal),
(operator.gt, np.greater),
(operator.ge, np.greater_equal),
(operator.lt, np.less),
(operator.le, np.less_equal),
],
)
def test_binary_operators(operator, numpy_function):
runtime = get_runtime()
value_a = np.array([[1, 2], [3, 4]], dtype=np.float32)
value_b = np.array([[4, 5], [1, 7]], dtype=np.float32)
shape = [2, 2]
parameter_a = ov.parameter(shape, name="A", dtype=np.float32)
model = operator(parameter_a, value_b)
computation = runtime.computation(model, parameter_a)
result = computation(value_a)
expected = numpy_function(value_a, value_b)
assert np.allclose(result, expected)
@pytest.mark.parametrize(
"operator,numpy_function",
[
(operator.add, np.add),
(operator.sub, np.subtract),
(operator.mul, np.multiply),
(operator.truediv, np.divide),
(operator.eq, np.equal),
(operator.ne, np.not_equal),
(operator.gt, np.greater),
(operator.ge, np.greater_equal),
(operator.lt, np.less),
(operator.le, np.less_equal),
],
)
def test_binary_operators_with_scalar(operator, numpy_function):
runtime = get_runtime()
value_a = np.array([[1, 2], [3, 4]], dtype=np.float32)
value_b = np.array([[5, 6], [7, 8]], dtype=np.float32)
shape = [2, 2]
parameter_a = ov.parameter(shape, name="A", dtype=np.float32)
model = operator(parameter_a, value_b)
computation = runtime.computation(model, parameter_a)
result = computation(value_a)
expected = numpy_function(value_a, value_b)
assert np.allclose(result, expected)
def test_multiply():
A = np.arange(48, dtype=np.int32).reshape((8, 1, 6, 1))
B = np.arange(35, dtype=np.int32).reshape((7, 1, 5))
expected = np.multiply(A, B)
result = run_op_node([A, B], ov.multiply)
assert np.allclose(result, expected)
def test_power_v1():
A = np.arange(48, dtype=np.float32).reshape((8, 1, 6, 1))
B = np.arange(20, dtype=np.float32).reshape((4, 1, 5))
expected = np.power(A, B)
result = run_op_node([A, B], ov.power)
assert np.allclose(result, expected)
| 30.404762 | 105 | 0.649021 | 902 | 6,385 | 4.406874 | 0.109756 | 0.042264 | 0.05283 | 0.032201 | 0.913962 | 0.906415 | 0.897358 | 0.886289 | 0.876226 | 0.843774 | 0 | 0.021922 | 0.199843 | 6,385 | 209 | 106 | 30.550239 | 0.756117 | 0.01206 | 0 | 0.78125 | 0 | 0 | 0.026328 | 0.025059 | 0 | 0 | 0 | 0 | 0.05 | 1 | 0.05 | false | 0 | 0.0375 | 0 | 0.0875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f77234738ca6e6170ac6c850f4f3de69910bd381 | 6,258 | py | Python | Maintain Median with Heap/maintain_median_heap.py | liu-yunpeng/algorithms | 1bbe34b4a3d2f090007faec25220ac8535213da7 | [
"MIT"
] | 3 | 2021-04-24T01:20:27.000Z | 2021-04-24T20:39:27.000Z | Maintain Median with Heap/maintain_median_heap.py | liu-yunpeng/algorithms | 1bbe34b4a3d2f090007faec25220ac8535213da7 | [
"MIT"
] | null | null | null | Maintain Median with Heap/maintain_median_heap.py | liu-yunpeng/algorithms | 1bbe34b4a3d2f090007faec25220ac8535213da7 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
@author: Yunpeng Liu
"""
input_file = r"~\input.txt"
with open(input_file, 'r') as f:
input_data = []
for line in f:
input_data.append(int(line))
#input_data = input_data[:10]
print(input_data)
class Heap(object):
def __init__(self, array = []):
self.array = array
def insert(self, value):
pass
class Min_Heap(Heap):
def __init__(self, array):
super().__init__(array)
def insert(self, value):
self.array.append(value)
pos = len(self.array) - 1
while pos != 0:
parent_pos = int((pos - 1) / 2)
if self.array[pos] < self.array[parent_pos]:
self.array[pos], self.array[parent_pos] = self.array[parent_pos], self.array[pos]
pos = parent_pos
return self.array
def pop(self):
self.array[0], self.array[-1] = self.array[-1], self.array[0]
min_value = self.array.pop()
pos = 0
while pos * 2 + 2 < len(self.array):
left = pos * 2 + 1
right = pos * 2 + 2
if self.array[left] < self.array[right]:
if self.array[pos] > self.array[left]:
self.array[pos], self.array[left] = self.array[left], self.array[pos]
pos = left
else:
break
else:
if self.array[pos] > self.array[right]:
self.array[pos], self.array[right] = self.array[right], self.array[pos]
pos = right
else:
break
# handle single left
if pos * 2 + 1 < len(self.array):
left = pos * 2 + 1
if self.array[pos] > self.array[left]:
self.array[pos], self.array[left] = self.array[left], self.array[pos]
pos = left
return min_value
class Max_Heap(Heap):
def __init__(self, array):
super().__init__(array)
def insert(self, value):
self.array.append(value)
pos = len(self.array) - 1
while pos != 0:
parent_pos = int((pos - 1) / 2)
if self.array[pos] > self.array[parent_pos]:
self.array[pos], self.array[parent_pos] = self.array[parent_pos], self.array[pos]
pos = parent_pos
return self.array
def pop(self):
self.array[0], self.array[-1] = self.array[-1], self.array[0]
max_value = self.array.pop()
pos = 0
while pos * 2 + 2 < len(self.array):
left = pos * 2 + 1
right = pos * 2 + 2
if self.array[left] > self.array[right]:
if self.array[pos] < self.array[left]:
self.array[pos], self.array[left] = self.array[left], self.array[pos]
pos = left
else:
break
else:
if self.array[pos] < self.array[right]:
self.array[pos], self.array[right] = self.array[right], self.array[pos]
pos = right
else:
break
# handle single left
if pos * 2 + 1 < len(self.array):
left = pos * 2 + 1
if self.array[pos] < self.array[left]:
self.array[pos], self.array[left] = self.array[left], self.array[pos]
pos = left
return max_value
'''
h1 = Min_Heap([])
h1.insert(4)
h1.insert(12)
h1.insert(5)
h1.insert(7)
h1.insert(21)
h1.insert(2)
h1.insert(11)
print(h1.array)
h2 = Max_Heap([])
h2.insert(4)
h2.insert(12)
h2.insert(5)
h2.insert(7)
h2.insert(21)
h2.insert(2)
h2.insert(11)
print('h2.array')
'''
# max heap stores smaller half values
# min heap stores larger half values
min_heap = Min_Heap([])
max_heap = Max_Heap([])
median_sum = 0
for i in input_data:
if not max_heap.array:
max_heap.insert(i)
median_sum += i
#print('max', max_heap.array)
continue
if i > max_heap.array[0]:
min_heap.insert(i)
else:
max_heap.insert(i)
# need to balance 2 heaps
if len(min_heap.array) > len(max_heap.array) + 1:
max_heap.insert(min_heap.pop())
elif len(max_heap.array) > len(min_heap.array) + 1:
min_heap.insert(max_heap.pop())
if len(max_heap.array) >= len(min_heap.array):
median_sum += max_heap.array[0]
#print('median is', max_heap.array[0])
else:
median_sum += min_heap.array[0]
#print('median is', min_heap.array[0])
#print('max', max_heap.array)
#print('min', min_heap.array)
print(median_sum % 10000)
# brute force solution
import statistics
median_total = 0
find_median_list = []
for i in input_data:
find_median_list.append(i)
median_total += statistics.median(find_median_list)
print(median_total % 10000)
# doesn't take too long either
if __name__ == '__name__':
# max heap stores smaller half values
# min heap stores larger half values
min_heap = Min_Heap([])
max_heap = Max_Heap([])
median_sum = 0
for i in input_data:
if not max_heap.array:
max_heap.insert(i)
median_sum += i
#print('max', max_heap.array)
continue
if i > max_heap.array[0]:
min_heap.insert(i)
else:
max_heap.insert(i)
# need to balance 2 heaps
if len(min_heap.array) > len(max_heap.array) + 1:
max_heap.insert(min_heap.pop())
elif len(max_heap.array) > len(min_heap.array) + 1:
min_heap.insert(max_heap.pop())
if len(max_heap.array) >= len(min_heap.array):
median_sum += max_heap.array[0]
#print('median is', max_heap.array[0])
else:
median_sum += min_heap.array[0]
#print('median is', min_heap.array[0])
#print('max', max_heap.array)
#print('min', min_heap.array)
print(median_sum % 10000) | 26.858369 | 97 | 0.520134 | 823 | 6,258 | 3.800729 | 0.110571 | 0.21867 | 0.092072 | 0.081841 | 0.817136 | 0.804987 | 0.804987 | 0.804987 | 0.804987 | 0.804987 | 0 | 0.027943 | 0.353787 | 6,258 | 233 | 98 | 26.858369 | 0.745549 | 0.108981 | 0 | 0.785185 | 0 | 0 | 0.003788 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.059259 | false | 0.007407 | 0.007407 | 0 | 0.118519 | 0.02963 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
f7886029224feb4a8566d7290dfcfada25e768e2 | 515 | py | Python | eval_medseg_timm-regnetx_002_Posterize.py | BrunoKrinski/segtool | cb604b5f38104c43a76450136e37c3d1c4b6d275 | [
"MIT"
] | null | null | null | eval_medseg_timm-regnetx_002_Posterize.py | BrunoKrinski/segtool | cb604b5f38104c43a76450136e37c3d1c4b6d275 | [
"MIT"
] | null | null | null | eval_medseg_timm-regnetx_002_Posterize.py | BrunoKrinski/segtool | cb604b5f38104c43a76450136e37c3d1c4b6d275 | [
"MIT"
] | null | null | null | import os
ls=["python main.py --configs configs/eval_medseg_unetplusplus_timm-regnetx_002_0_Posterize.yml",
"python main.py --configs configs/eval_medseg_unetplusplus_timm-regnetx_002_1_Posterize.yml",
"python main.py --configs configs/eval_medseg_unetplusplus_timm-regnetx_002_2_Posterize.yml",
"python main.py --configs configs/eval_medseg_unetplusplus_timm-regnetx_002_3_Posterize.yml",
"python main.py --configs configs/eval_medseg_unetplusplus_timm-regnetx_002_4_Posterize.yml",
]
for l in ls:
os.system(l) | 46.818182 | 97 | 0.838835 | 80 | 515 | 5.025 | 0.3 | 0.124378 | 0.149254 | 0.236318 | 0.890547 | 0.890547 | 0.890547 | 0.890547 | 0.890547 | 0.890547 | 0 | 0.041322 | 0.060194 | 515 | 11 | 98 | 46.818182 | 0.789256 | 0 | 0 | 0 | 0 | 0 | 0.872093 | 0.629845 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.111111 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
e3964c1d779c96cf25b9526f7bf02a42a9c7ef19 | 34,239 | py | Python | sdk/python/pulumi_aws/appautoscaling/policy.py | jen20/pulumi-aws | 172e00c642adc03238f89cc9c5a16b914a77c2b1 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws/appautoscaling/policy.py | jen20/pulumi-aws | 172e00c642adc03238f89cc9c5a16b914a77c2b1 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws/appautoscaling/policy.py | jen20/pulumi-aws | 172e00c642adc03238f89cc9c5a16b914a77c2b1 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities, _tables
from . import outputs
from ._inputs import *
__all__ = ['PolicyArgs', 'Policy']
@pulumi.input_type
class PolicyArgs:
def __init__(__self__, *,
resource_id: pulumi.Input[str],
scalable_dimension: pulumi.Input[str],
service_namespace: pulumi.Input[str],
name: Optional[pulumi.Input[str]] = None,
policy_type: Optional[pulumi.Input[str]] = None,
step_scaling_policy_configuration: Optional[pulumi.Input['PolicyStepScalingPolicyConfigurationArgs']] = None,
target_tracking_scaling_policy_configuration: Optional[pulumi.Input['PolicyTargetTrackingScalingPolicyConfigurationArgs']] = None):
"""
The set of arguments for constructing a Policy resource.
:param pulumi.Input[str] resource_id: The resource type and unique identifier string for the resource associated with the scaling policy. Documentation can be found in the `ResourceId` parameter at: [AWS Application Auto Scaling API Reference](http://docs.aws.amazon.com/ApplicationAutoScaling/latest/APIReference/API_RegisterScalableTarget.html#API_RegisterScalableTarget_RequestParameters)
:param pulumi.Input[str] scalable_dimension: The scalable dimension of the scalable target. Documentation can be found in the `ScalableDimension` parameter at: [AWS Application Auto Scaling API Reference](http://docs.aws.amazon.com/ApplicationAutoScaling/latest/APIReference/API_RegisterScalableTarget.html#API_RegisterScalableTarget_RequestParameters)
:param pulumi.Input[str] service_namespace: The AWS service namespace of the scalable target. Documentation can be found in the `ServiceNamespace` parameter at: [AWS Application Auto Scaling API Reference](http://docs.aws.amazon.com/ApplicationAutoScaling/latest/APIReference/API_RegisterScalableTarget.html#API_RegisterScalableTarget_RequestParameters)
:param pulumi.Input[str] name: The name of the policy. Must be between 1 and 255 characters in length.
:param pulumi.Input[str] policy_type: The policy type. Valid values are `StepScaling` and `TargetTrackingScaling`. Defaults to `StepScaling`. Certain services only support only one policy type. For more information see the [Target Tracking Scaling Policies](https://docs.aws.amazon.com/autoscaling/application/userguide/application-auto-scaling-target-tracking.html) and [Step Scaling Policies](https://docs.aws.amazon.com/autoscaling/application/userguide/application-auto-scaling-step-scaling-policies.html) documentation.
:param pulumi.Input['PolicyStepScalingPolicyConfigurationArgs'] step_scaling_policy_configuration: Step scaling policy configuration, requires `policy_type = "StepScaling"` (default). See supported fields below.
:param pulumi.Input['PolicyTargetTrackingScalingPolicyConfigurationArgs'] target_tracking_scaling_policy_configuration: A target tracking policy, requires `policy_type = "TargetTrackingScaling"`. See supported fields below.
"""
pulumi.set(__self__, "resource_id", resource_id)
pulumi.set(__self__, "scalable_dimension", scalable_dimension)
pulumi.set(__self__, "service_namespace", service_namespace)
if name is not None:
pulumi.set(__self__, "name", name)
if policy_type is not None:
pulumi.set(__self__, "policy_type", policy_type)
if step_scaling_policy_configuration is not None:
pulumi.set(__self__, "step_scaling_policy_configuration", step_scaling_policy_configuration)
if target_tracking_scaling_policy_configuration is not None:
pulumi.set(__self__, "target_tracking_scaling_policy_configuration", target_tracking_scaling_policy_configuration)
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> pulumi.Input[str]:
"""
The resource type and unique identifier string for the resource associated with the scaling policy. Documentation can be found in the `ResourceId` parameter at: [AWS Application Auto Scaling API Reference](http://docs.aws.amazon.com/ApplicationAutoScaling/latest/APIReference/API_RegisterScalableTarget.html#API_RegisterScalableTarget_RequestParameters)
"""
return pulumi.get(self, "resource_id")
@resource_id.setter
def resource_id(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_id", value)
@property
@pulumi.getter(name="scalableDimension")
def scalable_dimension(self) -> pulumi.Input[str]:
"""
The scalable dimension of the scalable target. Documentation can be found in the `ScalableDimension` parameter at: [AWS Application Auto Scaling API Reference](http://docs.aws.amazon.com/ApplicationAutoScaling/latest/APIReference/API_RegisterScalableTarget.html#API_RegisterScalableTarget_RequestParameters)
"""
return pulumi.get(self, "scalable_dimension")
@scalable_dimension.setter
def scalable_dimension(self, value: pulumi.Input[str]):
pulumi.set(self, "scalable_dimension", value)
@property
@pulumi.getter(name="serviceNamespace")
def service_namespace(self) -> pulumi.Input[str]:
"""
The AWS service namespace of the scalable target. Documentation can be found in the `ServiceNamespace` parameter at: [AWS Application Auto Scaling API Reference](http://docs.aws.amazon.com/ApplicationAutoScaling/latest/APIReference/API_RegisterScalableTarget.html#API_RegisterScalableTarget_RequestParameters)
"""
return pulumi.get(self, "service_namespace")
@service_namespace.setter
def service_namespace(self, value: pulumi.Input[str]):
pulumi.set(self, "service_namespace", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the policy. Must be between 1 and 255 characters in length.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="policyType")
def policy_type(self) -> Optional[pulumi.Input[str]]:
"""
The policy type. Valid values are `StepScaling` and `TargetTrackingScaling`. Defaults to `StepScaling`. Certain services only support only one policy type. For more information see the [Target Tracking Scaling Policies](https://docs.aws.amazon.com/autoscaling/application/userguide/application-auto-scaling-target-tracking.html) and [Step Scaling Policies](https://docs.aws.amazon.com/autoscaling/application/userguide/application-auto-scaling-step-scaling-policies.html) documentation.
"""
return pulumi.get(self, "policy_type")
@policy_type.setter
def policy_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "policy_type", value)
@property
@pulumi.getter(name="stepScalingPolicyConfiguration")
def step_scaling_policy_configuration(self) -> Optional[pulumi.Input['PolicyStepScalingPolicyConfigurationArgs']]:
"""
Step scaling policy configuration, requires `policy_type = "StepScaling"` (default). See supported fields below.
"""
return pulumi.get(self, "step_scaling_policy_configuration")
@step_scaling_policy_configuration.setter
def step_scaling_policy_configuration(self, value: Optional[pulumi.Input['PolicyStepScalingPolicyConfigurationArgs']]):
pulumi.set(self, "step_scaling_policy_configuration", value)
@property
@pulumi.getter(name="targetTrackingScalingPolicyConfiguration")
def target_tracking_scaling_policy_configuration(self) -> Optional[pulumi.Input['PolicyTargetTrackingScalingPolicyConfigurationArgs']]:
"""
A target tracking policy, requires `policy_type = "TargetTrackingScaling"`. See supported fields below.
"""
return pulumi.get(self, "target_tracking_scaling_policy_configuration")
@target_tracking_scaling_policy_configuration.setter
def target_tracking_scaling_policy_configuration(self, value: Optional[pulumi.Input['PolicyTargetTrackingScalingPolicyConfigurationArgs']]):
pulumi.set(self, "target_tracking_scaling_policy_configuration", value)
class Policy(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
name: Optional[pulumi.Input[str]] = None,
policy_type: Optional[pulumi.Input[str]] = None,
resource_id: Optional[pulumi.Input[str]] = None,
scalable_dimension: Optional[pulumi.Input[str]] = None,
service_namespace: Optional[pulumi.Input[str]] = None,
step_scaling_policy_configuration: Optional[pulumi.Input[pulumi.InputType['PolicyStepScalingPolicyConfigurationArgs']]] = None,
target_tracking_scaling_policy_configuration: Optional[pulumi.Input[pulumi.InputType['PolicyTargetTrackingScalingPolicyConfigurationArgs']]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Provides an Application AutoScaling Policy resource.
## Example Usage
### DynamoDB Table Autoscaling
```python
import pulumi
import pulumi_aws as aws
dynamodb_table_read_target = aws.appautoscaling.Target("dynamodbTableReadTarget",
max_capacity=100,
min_capacity=5,
resource_id="table/tableName",
scalable_dimension="dynamodb:table:ReadCapacityUnits",
service_namespace="dynamodb")
dynamodb_table_read_policy = aws.appautoscaling.Policy("dynamodbTableReadPolicy",
policy_type="TargetTrackingScaling",
resource_id=dynamodb_table_read_target.resource_id,
scalable_dimension=dynamodb_table_read_target.scalable_dimension,
service_namespace=dynamodb_table_read_target.service_namespace,
target_tracking_scaling_policy_configuration=aws.appautoscaling.PolicyTargetTrackingScalingPolicyConfigurationArgs(
predefined_metric_specification=aws.appautoscaling.PolicyTargetTrackingScalingPolicyConfigurationPredefinedMetricSpecificationArgs(
predefined_metric_type="DynamoDBReadCapacityUtilization",
),
target_value=70,
))
```
### ECS Service Autoscaling
```python
import pulumi
import pulumi_aws as aws
ecs_target = aws.appautoscaling.Target("ecsTarget",
max_capacity=4,
min_capacity=1,
resource_id="service/clusterName/serviceName",
scalable_dimension="ecs:service:DesiredCount",
service_namespace="ecs")
ecs_policy = aws.appautoscaling.Policy("ecsPolicy",
policy_type="StepScaling",
resource_id=ecs_target.resource_id,
scalable_dimension=ecs_target.scalable_dimension,
service_namespace=ecs_target.service_namespace,
step_scaling_policy_configuration=aws.appautoscaling.PolicyStepScalingPolicyConfigurationArgs(
adjustment_type="ChangeInCapacity",
cooldown=60,
metric_aggregation_type="Maximum",
step_adjustments=[{
"metricIntervalUpperBound": 0,
"scaling_adjustment": -1,
}],
))
```
### Preserve desired count when updating an autoscaled ECS Service
```python
import pulumi
import pulumi_aws as aws
ecs_service = aws.ecs.Service("ecsService",
cluster="clusterName",
task_definition="taskDefinitionFamily:1",
desired_count=2)
```
### Aurora Read Replica Autoscaling
```python
import pulumi
import pulumi_aws as aws
replicas_target = aws.appautoscaling.Target("replicasTarget",
service_namespace="rds",
scalable_dimension="rds:cluster:ReadReplicaCount",
resource_id=f"cluster:{aws_rds_cluster['example']['id']}",
min_capacity=1,
max_capacity=15)
replicas_policy = aws.appautoscaling.Policy("replicasPolicy",
service_namespace=replicas_target.service_namespace,
scalable_dimension=replicas_target.scalable_dimension,
resource_id=replicas_target.resource_id,
policy_type="TargetTrackingScaling",
target_tracking_scaling_policy_configuration=aws.appautoscaling.PolicyTargetTrackingScalingPolicyConfigurationArgs(
predefined_metric_specification=aws.appautoscaling.PolicyTargetTrackingScalingPolicyConfigurationPredefinedMetricSpecificationArgs(
predefined_metric_type="RDSReaderAverageCPUUtilization",
),
target_value=75,
scale_in_cooldown=300,
scale_out_cooldown=300,
))
```
### MSK / Kafka Autoscaling
```python
import pulumi
import pulumi_aws as aws
msk_target = aws.appautoscaling.Target("mskTarget",
service_namespace="kafka",
scalable_dimension="kafka:broker-storage:VolumeSize",
resource_id=aws_msk_cluster["example"]["arn"],
min_capacity=1,
max_capacity=8)
targets = aws.appautoscaling.Policy("targets",
service_namespace=msk_target.service_namespace,
scalable_dimension=msk_target.scalable_dimension,
resource_id=msk_target.resource_id,
policy_type="TargetTrackingScaling",
target_tracking_scaling_policy_configuration=aws.appautoscaling.PolicyTargetTrackingScalingPolicyConfigurationArgs(
predefined_metric_specification=aws.appautoscaling.PolicyTargetTrackingScalingPolicyConfigurationPredefinedMetricSpecificationArgs(
predefined_metric_type="KafkaBrokerStorageUtilization",
),
target_value=55,
))
```
## Import
Application AutoScaling Policy can be imported using the `service-namespace` , `resource-id`, `scalable-dimension` and `policy-name` separated by `/`.
```sh
$ pulumi import aws:appautoscaling/policy:Policy test-policy service-namespace/resource-id/scalable-dimension/policy-name
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] name: The name of the policy. Must be between 1 and 255 characters in length.
:param pulumi.Input[str] policy_type: The policy type. Valid values are `StepScaling` and `TargetTrackingScaling`. Defaults to `StepScaling`. Certain services only support only one policy type. For more information see the [Target Tracking Scaling Policies](https://docs.aws.amazon.com/autoscaling/application/userguide/application-auto-scaling-target-tracking.html) and [Step Scaling Policies](https://docs.aws.amazon.com/autoscaling/application/userguide/application-auto-scaling-step-scaling-policies.html) documentation.
:param pulumi.Input[str] resource_id: The resource type and unique identifier string for the resource associated with the scaling policy. Documentation can be found in the `ResourceId` parameter at: [AWS Application Auto Scaling API Reference](http://docs.aws.amazon.com/ApplicationAutoScaling/latest/APIReference/API_RegisterScalableTarget.html#API_RegisterScalableTarget_RequestParameters)
:param pulumi.Input[str] scalable_dimension: The scalable dimension of the scalable target. Documentation can be found in the `ScalableDimension` parameter at: [AWS Application Auto Scaling API Reference](http://docs.aws.amazon.com/ApplicationAutoScaling/latest/APIReference/API_RegisterScalableTarget.html#API_RegisterScalableTarget_RequestParameters)
:param pulumi.Input[str] service_namespace: The AWS service namespace of the scalable target. Documentation can be found in the `ServiceNamespace` parameter at: [AWS Application Auto Scaling API Reference](http://docs.aws.amazon.com/ApplicationAutoScaling/latest/APIReference/API_RegisterScalableTarget.html#API_RegisterScalableTarget_RequestParameters)
:param pulumi.Input[pulumi.InputType['PolicyStepScalingPolicyConfigurationArgs']] step_scaling_policy_configuration: Step scaling policy configuration, requires `policy_type = "StepScaling"` (default). See supported fields below.
:param pulumi.Input[pulumi.InputType['PolicyTargetTrackingScalingPolicyConfigurationArgs']] target_tracking_scaling_policy_configuration: A target tracking policy, requires `policy_type = "TargetTrackingScaling"`. See supported fields below.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: PolicyArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides an Application AutoScaling Policy resource.
## Example Usage
### DynamoDB Table Autoscaling
```python
import pulumi
import pulumi_aws as aws
dynamodb_table_read_target = aws.appautoscaling.Target("dynamodbTableReadTarget",
max_capacity=100,
min_capacity=5,
resource_id="table/tableName",
scalable_dimension="dynamodb:table:ReadCapacityUnits",
service_namespace="dynamodb")
dynamodb_table_read_policy = aws.appautoscaling.Policy("dynamodbTableReadPolicy",
policy_type="TargetTrackingScaling",
resource_id=dynamodb_table_read_target.resource_id,
scalable_dimension=dynamodb_table_read_target.scalable_dimension,
service_namespace=dynamodb_table_read_target.service_namespace,
target_tracking_scaling_policy_configuration=aws.appautoscaling.PolicyTargetTrackingScalingPolicyConfigurationArgs(
predefined_metric_specification=aws.appautoscaling.PolicyTargetTrackingScalingPolicyConfigurationPredefinedMetricSpecificationArgs(
predefined_metric_type="DynamoDBReadCapacityUtilization",
),
target_value=70,
))
```
### ECS Service Autoscaling
```python
import pulumi
import pulumi_aws as aws
ecs_target = aws.appautoscaling.Target("ecsTarget",
max_capacity=4,
min_capacity=1,
resource_id="service/clusterName/serviceName",
scalable_dimension="ecs:service:DesiredCount",
service_namespace="ecs")
ecs_policy = aws.appautoscaling.Policy("ecsPolicy",
policy_type="StepScaling",
resource_id=ecs_target.resource_id,
scalable_dimension=ecs_target.scalable_dimension,
service_namespace=ecs_target.service_namespace,
step_scaling_policy_configuration=aws.appautoscaling.PolicyStepScalingPolicyConfigurationArgs(
adjustment_type="ChangeInCapacity",
cooldown=60,
metric_aggregation_type="Maximum",
step_adjustments=[{
"metricIntervalUpperBound": 0,
"scaling_adjustment": -1,
}],
))
```
### Preserve desired count when updating an autoscaled ECS Service
```python
import pulumi
import pulumi_aws as aws
ecs_service = aws.ecs.Service("ecsService",
cluster="clusterName",
task_definition="taskDefinitionFamily:1",
desired_count=2)
```
### Aurora Read Replica Autoscaling
```python
import pulumi
import pulumi_aws as aws
replicas_target = aws.appautoscaling.Target("replicasTarget",
service_namespace="rds",
scalable_dimension="rds:cluster:ReadReplicaCount",
resource_id=f"cluster:{aws_rds_cluster['example']['id']}",
min_capacity=1,
max_capacity=15)
replicas_policy = aws.appautoscaling.Policy("replicasPolicy",
service_namespace=replicas_target.service_namespace,
scalable_dimension=replicas_target.scalable_dimension,
resource_id=replicas_target.resource_id,
policy_type="TargetTrackingScaling",
target_tracking_scaling_policy_configuration=aws.appautoscaling.PolicyTargetTrackingScalingPolicyConfigurationArgs(
predefined_metric_specification=aws.appautoscaling.PolicyTargetTrackingScalingPolicyConfigurationPredefinedMetricSpecificationArgs(
predefined_metric_type="RDSReaderAverageCPUUtilization",
),
target_value=75,
scale_in_cooldown=300,
scale_out_cooldown=300,
))
```
### MSK / Kafka Autoscaling
```python
import pulumi
import pulumi_aws as aws
msk_target = aws.appautoscaling.Target("mskTarget",
service_namespace="kafka",
scalable_dimension="kafka:broker-storage:VolumeSize",
resource_id=aws_msk_cluster["example"]["arn"],
min_capacity=1,
max_capacity=8)
targets = aws.appautoscaling.Policy("targets",
service_namespace=msk_target.service_namespace,
scalable_dimension=msk_target.scalable_dimension,
resource_id=msk_target.resource_id,
policy_type="TargetTrackingScaling",
target_tracking_scaling_policy_configuration=aws.appautoscaling.PolicyTargetTrackingScalingPolicyConfigurationArgs(
predefined_metric_specification=aws.appautoscaling.PolicyTargetTrackingScalingPolicyConfigurationPredefinedMetricSpecificationArgs(
predefined_metric_type="KafkaBrokerStorageUtilization",
),
target_value=55,
))
```
## Import
Application AutoScaling Policy can be imported using the `service-namespace` , `resource-id`, `scalable-dimension` and `policy-name` separated by `/`.
```sh
$ pulumi import aws:appautoscaling/policy:Policy test-policy service-namespace/resource-id/scalable-dimension/policy-name
```
:param str resource_name: The name of the resource.
:param PolicyArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PolicyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
name: Optional[pulumi.Input[str]] = None,
policy_type: Optional[pulumi.Input[str]] = None,
resource_id: Optional[pulumi.Input[str]] = None,
scalable_dimension: Optional[pulumi.Input[str]] = None,
service_namespace: Optional[pulumi.Input[str]] = None,
step_scaling_policy_configuration: Optional[pulumi.Input[pulumi.InputType['PolicyStepScalingPolicyConfigurationArgs']]] = None,
target_tracking_scaling_policy_configuration: Optional[pulumi.Input[pulumi.InputType['PolicyTargetTrackingScalingPolicyConfigurationArgs']]] = None,
__props__=None,
__name__=None,
__opts__=None):
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['name'] = name
__props__['policy_type'] = policy_type
if resource_id is None and not opts.urn:
raise TypeError("Missing required property 'resource_id'")
__props__['resource_id'] = resource_id
if scalable_dimension is None and not opts.urn:
raise TypeError("Missing required property 'scalable_dimension'")
__props__['scalable_dimension'] = scalable_dimension
if service_namespace is None and not opts.urn:
raise TypeError("Missing required property 'service_namespace'")
__props__['service_namespace'] = service_namespace
__props__['step_scaling_policy_configuration'] = step_scaling_policy_configuration
__props__['target_tracking_scaling_policy_configuration'] = target_tracking_scaling_policy_configuration
__props__['arn'] = None
super(Policy, __self__).__init__(
'aws:appautoscaling/policy:Policy',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
arn: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
policy_type: Optional[pulumi.Input[str]] = None,
resource_id: Optional[pulumi.Input[str]] = None,
scalable_dimension: Optional[pulumi.Input[str]] = None,
service_namespace: Optional[pulumi.Input[str]] = None,
step_scaling_policy_configuration: Optional[pulumi.Input[pulumi.InputType['PolicyStepScalingPolicyConfigurationArgs']]] = None,
target_tracking_scaling_policy_configuration: Optional[pulumi.Input[pulumi.InputType['PolicyTargetTrackingScalingPolicyConfigurationArgs']]] = None) -> 'Policy':
"""
Get an existing Policy resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] arn: The ARN assigned by AWS to the scaling policy.
:param pulumi.Input[str] name: The name of the policy. Must be between 1 and 255 characters in length.
:param pulumi.Input[str] policy_type: The policy type. Valid values are `StepScaling` and `TargetTrackingScaling`. Defaults to `StepScaling`. Certain services only support only one policy type. For more information see the [Target Tracking Scaling Policies](https://docs.aws.amazon.com/autoscaling/application/userguide/application-auto-scaling-target-tracking.html) and [Step Scaling Policies](https://docs.aws.amazon.com/autoscaling/application/userguide/application-auto-scaling-step-scaling-policies.html) documentation.
:param pulumi.Input[str] resource_id: The resource type and unique identifier string for the resource associated with the scaling policy. Documentation can be found in the `ResourceId` parameter at: [AWS Application Auto Scaling API Reference](http://docs.aws.amazon.com/ApplicationAutoScaling/latest/APIReference/API_RegisterScalableTarget.html#API_RegisterScalableTarget_RequestParameters)
:param pulumi.Input[str] scalable_dimension: The scalable dimension of the scalable target. Documentation can be found in the `ScalableDimension` parameter at: [AWS Application Auto Scaling API Reference](http://docs.aws.amazon.com/ApplicationAutoScaling/latest/APIReference/API_RegisterScalableTarget.html#API_RegisterScalableTarget_RequestParameters)
:param pulumi.Input[str] service_namespace: The AWS service namespace of the scalable target. Documentation can be found in the `ServiceNamespace` parameter at: [AWS Application Auto Scaling API Reference](http://docs.aws.amazon.com/ApplicationAutoScaling/latest/APIReference/API_RegisterScalableTarget.html#API_RegisterScalableTarget_RequestParameters)
:param pulumi.Input[pulumi.InputType['PolicyStepScalingPolicyConfigurationArgs']] step_scaling_policy_configuration: Step scaling policy configuration, requires `policy_type = "StepScaling"` (default). See supported fields below.
:param pulumi.Input[pulumi.InputType['PolicyTargetTrackingScalingPolicyConfigurationArgs']] target_tracking_scaling_policy_configuration: A target tracking policy, requires `policy_type = "TargetTrackingScaling"`. See supported fields below.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["arn"] = arn
__props__["name"] = name
__props__["policy_type"] = policy_type
__props__["resource_id"] = resource_id
__props__["scalable_dimension"] = scalable_dimension
__props__["service_namespace"] = service_namespace
__props__["step_scaling_policy_configuration"] = step_scaling_policy_configuration
__props__["target_tracking_scaling_policy_configuration"] = target_tracking_scaling_policy_configuration
return Policy(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def arn(self) -> pulumi.Output[str]:
"""
The ARN assigned by AWS to the scaling policy.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the policy. Must be between 1 and 255 characters in length.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="policyType")
def policy_type(self) -> pulumi.Output[Optional[str]]:
"""
The policy type. Valid values are `StepScaling` and `TargetTrackingScaling`. Defaults to `StepScaling`. Certain services only support only one policy type. For more information see the [Target Tracking Scaling Policies](https://docs.aws.amazon.com/autoscaling/application/userguide/application-auto-scaling-target-tracking.html) and [Step Scaling Policies](https://docs.aws.amazon.com/autoscaling/application/userguide/application-auto-scaling-step-scaling-policies.html) documentation.
"""
return pulumi.get(self, "policy_type")
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> pulumi.Output[str]:
"""
The resource type and unique identifier string for the resource associated with the scaling policy. Documentation can be found in the `ResourceId` parameter at: [AWS Application Auto Scaling API Reference](http://docs.aws.amazon.com/ApplicationAutoScaling/latest/APIReference/API_RegisterScalableTarget.html#API_RegisterScalableTarget_RequestParameters)
"""
return pulumi.get(self, "resource_id")
@property
@pulumi.getter(name="scalableDimension")
def scalable_dimension(self) -> pulumi.Output[str]:
"""
The scalable dimension of the scalable target. Documentation can be found in the `ScalableDimension` parameter at: [AWS Application Auto Scaling API Reference](http://docs.aws.amazon.com/ApplicationAutoScaling/latest/APIReference/API_RegisterScalableTarget.html#API_RegisterScalableTarget_RequestParameters)
"""
return pulumi.get(self, "scalable_dimension")
@property
@pulumi.getter(name="serviceNamespace")
def service_namespace(self) -> pulumi.Output[str]:
"""
The AWS service namespace of the scalable target. Documentation can be found in the `ServiceNamespace` parameter at: [AWS Application Auto Scaling API Reference](http://docs.aws.amazon.com/ApplicationAutoScaling/latest/APIReference/API_RegisterScalableTarget.html#API_RegisterScalableTarget_RequestParameters)
"""
return pulumi.get(self, "service_namespace")
@property
@pulumi.getter(name="stepScalingPolicyConfiguration")
def step_scaling_policy_configuration(self) -> pulumi.Output[Optional['outputs.PolicyStepScalingPolicyConfiguration']]:
"""
Step scaling policy configuration, requires `policy_type = "StepScaling"` (default). See supported fields below.
"""
return pulumi.get(self, "step_scaling_policy_configuration")
@property
@pulumi.getter(name="targetTrackingScalingPolicyConfiguration")
def target_tracking_scaling_policy_configuration(self) -> pulumi.Output[Optional['outputs.PolicyTargetTrackingScalingPolicyConfiguration']]:
"""
A target tracking policy, requires `policy_type = "TargetTrackingScaling"`. See supported fields below.
"""
return pulumi.get(self, "target_tracking_scaling_policy_configuration")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 58.32879 | 532 | 0.707234 | 3,470 | 34,239 | 6.742363 | 0.080403 | 0.031971 | 0.061122 | 0.035904 | 0.907249 | 0.873226 | 0.863182 | 0.853907 | 0.831766 | 0.812703 | 0 | 0.002916 | 0.208651 | 34,239 | 586 | 533 | 58.428328 | 0.860565 | 0.58521 | 0 | 0.403756 | 1 | 0 | 0.183114 | 0.107934 | 0 | 0 | 0 | 0 | 0 | 1 | 0.140845 | false | 0.004695 | 0.032864 | 0.00939 | 0.267606 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e3d5fc88d7eab7ea4d07c32f50d5b32686839ad5 | 92 | py | Python | src/todo/__init__.py | MarkStefanovic/todo-api | fb6198511712df853e693787839533f0c9956178 | [
"MIT"
] | null | null | null | src/todo/__init__.py | MarkStefanovic/todo-api | fb6198511712df853e693787839533f0c9956178 | [
"MIT"
] | null | null | null | src/todo/__init__.py | MarkStefanovic/todo-api | fb6198511712df853e693787839533f0c9956178 | [
"MIT"
] | null | null | null | from src.todo.adapter import *
from src.todo.domain import *
from src.todo.service import *
| 23 | 30 | 0.771739 | 15 | 92 | 4.733333 | 0.466667 | 0.295775 | 0.464789 | 0.478873 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.130435 | 92 | 3 | 31 | 30.666667 | 0.8875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
e3e9d8da94a25a438143345ac6797853da2e1d53 | 125,861 | py | Python | tests/pylib/test_agentx_packet.py | akiernan/ntpsec | 7536e2e4bc25fc8db4d5a2f00a91e91ba82d7d7b | [
"CC-BY-4.0",
"BSD-2-Clause",
"NTP",
"MIT",
"BSD-3-Clause"
] | 201 | 2015-11-16T16:57:58.000Z | 2022-03-21T01:01:34.000Z | tests/pylib/test_agentx_packet.py | akiernan/ntpsec | 7536e2e4bc25fc8db4d5a2f00a91e91ba82d7d7b | [
"CC-BY-4.0",
"BSD-2-Clause",
"NTP",
"MIT",
"BSD-3-Clause"
] | 4 | 2019-03-20T21:49:34.000Z | 2021-12-30T18:08:56.000Z | tests/pylib/test_agentx_packet.py | akiernan/ntpsec | 7536e2e4bc25fc8db4d5a2f00a91e91ba82d7d7b | [
"CC-BY-4.0",
"BSD-2-Clause",
"NTP",
"MIT",
"BSD-3-Clause"
] | 40 | 2016-05-25T05:25:51.000Z | 2021-12-30T17:40:00.000Z | # -*- coding: utf-8 -*-
import unittest
import ntp.agentx_packet as AX
from ntp.agentx_packet import slicedata, decode_pduheader, makeflags
extraData = b"Would you kindly ignore this?"
maximumOIDsubs = tuple(range(1, 129))
maximumOIDstr = b"""\x80\x00\x00\x00\
\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x04\
\x00\x00\x00\x05\x00\x00\x00\x06\x00\x00\x00\x07\x00\x00\x00\x08\
\x00\x00\x00\x09\x00\x00\x00\x0A\x00\x00\x00\x0B\x00\x00\x00\x0C\
\x00\x00\x00\x0D\x00\x00\x00\x0E\x00\x00\x00\x0F\x00\x00\x00\x10\
\
\x00\x00\x00\x11\x00\x00\x00\x12\x00\x00\x00\x13\x00\x00\x00\x14\
\x00\x00\x00\x15\x00\x00\x00\x16\x00\x00\x00\x17\x00\x00\x00\x18\
\x00\x00\x00\x19\x00\x00\x00\x1A\x00\x00\x00\x1B\x00\x00\x00\x1C\
\x00\x00\x00\x1D\x00\x00\x00\x1E\x00\x00\x00\x1F\x00\x00\x00\x20\
\
\x00\x00\x00\x21\x00\x00\x00\x22\x00\x00\x00\x23\x00\x00\x00\x24\
\x00\x00\x00\x25\x00\x00\x00\x26\x00\x00\x00\x27\x00\x00\x00\x28\
\x00\x00\x00\x29\x00\x00\x00\x2A\x00\x00\x00\x2B\x00\x00\x00\x2C\
\x00\x00\x00\x2D\x00\x00\x00\x2E\x00\x00\x00\x2F\x00\x00\x00\x30\
\
\x00\x00\x00\x31\x00\x00\x00\x32\x00\x00\x00\x33\x00\x00\x00\x34\
\x00\x00\x00\x35\x00\x00\x00\x36\x00\x00\x00\x37\x00\x00\x00\x38\
\x00\x00\x00\x39\x00\x00\x00\x3A\x00\x00\x00\x3B\x00\x00\x00\x3C\
\x00\x00\x00\x3D\x00\x00\x00\x3E\x00\x00\x00\x3F\x00\x00\x00\x40\
\
\x00\x00\x00\x41\x00\x00\x00\x42\x00\x00\x00\x43\x00\x00\x00\x44\
\x00\x00\x00\x45\x00\x00\x00\x46\x00\x00\x00\x47\x00\x00\x00\x48\
\x00\x00\x00\x49\x00\x00\x00\x4A\x00\x00\x00\x4B\x00\x00\x00\x4C\
\x00\x00\x00\x4D\x00\x00\x00\x4E\x00\x00\x00\x4F\x00\x00\x00\x50\
\
\x00\x00\x00\x51\x00\x00\x00\x52\x00\x00\x00\x53\x00\x00\x00\x54\
\x00\x00\x00\x55\x00\x00\x00\x56\x00\x00\x00\x57\x00\x00\x00\x58\
\x00\x00\x00\x59\x00\x00\x00\x5A\x00\x00\x00\x5B\x00\x00\x00\x5C\
\x00\x00\x00\x5D\x00\x00\x00\x5E\x00\x00\x00\x5F\x00\x00\x00\x60\
\
\x00\x00\x00\x61\x00\x00\x00\x62\x00\x00\x00\x63\x00\x00\x00\x64\
\x00\x00\x00\x65\x00\x00\x00\x66\x00\x00\x00\x67\x00\x00\x00\x68\
\x00\x00\x00\x69\x00\x00\x00\x6A\x00\x00\x00\x6B\x00\x00\x00\x6C\
\x00\x00\x00\x6D\x00\x00\x00\x6E\x00\x00\x00\x6F\x00\x00\x00\x70\
\
\x00\x00\x00\x71\x00\x00\x00\x72\x00\x00\x00\x73\x00\x00\x00\x74\
\x00\x00\x00\x75\x00\x00\x00\x76\x00\x00\x00\x77\x00\x00\x00\x78\
\x00\x00\x00\x79\x00\x00\x00\x7A\x00\x00\x00\x7B\x00\x00\x00\x7C\
\x00\x00\x00\x7D\x00\x00\x00\x7E\x00\x00\x00\x7F\x00\x00\x00\x80\
"""
# The most commonly used flag setups, some tests use custom flags
standardFlags_bare = makeflags(False, False, False, False, True)
standardFlags = {"flags": standardFlags_bare}
lilEndianFlags = {"flags": makeflags(False, False, False, False, False)}
contextFlags = {"flags": makeflags(False, False, False, True, True)}
def test_pducore(tester, pdu, pduType, endian, sID, tactID, pktID):
tester.assertEqual(pdu.pduType, pduType)
tester.assertEqual(pdu.bigEndian, endian)
tester.assertEqual(pdu.sessionID, sID)
tester.assertEqual(pdu.transactionID, tactID)
tester.assertEqual(pdu.packetID, pktID)
class TestNtpclientsNtpsnmpd(unittest.TestCase):
#
# PDU tests
#
def test_AgentXPDU(self):
cls = AX.AgentXPDU
# Test these so we don't need a bunch of redundant tests
# Test basic, without context
test = cls(0, True, 1, 2, 3, context=extraData)
self.assertEqual(repr(test),
"AgentXPDU(bigEndian=True, packetID=3, "
"pduType=0, sessionID=1, transactionID=2)")
# Test basic, with context
test = cls(0, True, 1, 2, 3, hascontext=True)
self.assertEqual(repr(test),
"AgentXPDU(bigEndian=True, context=None, packetID=3, "
"pduType=0, sessionID=1, transactionID=2)")
# Test with added variables
test.foo = 42
test._wyk = extraData # this should be ignored
test.context = "jabber jabber jabber"
self.assertEqual(repr(test),
"AgentXPDU(bigEndian=True, "
"context='jabber jabber jabber', "
"foo=42, packetID=3, "
"pduType=0, sessionID=1, transactionID=2)")
# Test __eq__
a = cls(0, True, 1, 2, 3)
b = cls(0, True, 1, 2, 3)
# Test all equal
self.assertEqual(a == b, True)
# Test same class, false
b = "blah blah"
self.assertEqual(a == b, False)
# Test different pdu
b = cls(1, True, 1, 2, 3)
self.assertEqual(a == b, False)
# Test different endianness
b = cls(0, False, 1, 2, 3)
self.assertEqual(a == b, False)
# Test different session ID
b = cls(0, True, 42, 2, 3)
self.assertEqual(a == b, False)
# Test different transaction ID
b = cls(0, True, 1, 23, 3)
self.assertEqual(a == b, False)
# Test different packet ID
b = cls(0, True, 1, 2, 13)
self.assertEqual(a == b, False)
# Test different _hascontext
b = cls(0, True, 1, 2, 3, hascontext=True)
self.assertEqual(a == b, False)
# Test with context, equal
a = cls(0, True, 1, 2, 3, True, "foo")
b = cls(0, True, 1, 2, 3, True, "foo")
self.assertEqual(a == b, True)
# Test with context, not equal
b = cls(0, True, 1, 2, 3, True, "bar")
self.assertEqual(a == b, False)
def test_OpenPDU(self):
dec = AX.decode_OpenPDU
cls = AX.OpenPDU
# Test PDU init, null packet
nullPkt = cls(True, 1, 2, 3, 4, (), "")
test_pducore(self, nullPkt, AX.PDU_OPEN, True, 1, 2, 3)
self.assertEqual(nullPkt.timeout, 4)
self.assertEqual(nullPkt.oid, AX.OID((), False))
self.assertEqual(nullPkt.description, "")
# Test PDU init, basic packet
basicPkt = cls(False, 1, 2, 3, 4, (1, 2, 3, 4), "foo")
test_pducore(self, basicPkt, AX.PDU_OPEN, False, 1, 2, 3)
self.assertEqual(basicPkt.timeout, 4)
self.assertEqual(basicPkt.oid, AX.OID((1, 2, 3, 4), False))
self.assertEqual(basicPkt.description, "foo")
# Test encoding, null packet
nullPkt_str = nullPkt.encode()
self.assertEqual(nullPkt_str,
b"\x01\x01\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x0C"
b"\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00")
# Test encoding, basic packet
basicPkt_str = basicPkt.encode()
self.assertEqual(basicPkt_str,
b"\x01\x01\x00\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x20\x00\x00\x00"
b"\x04\x00\x00\x00"
b"\x04\x00\x00\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x04\x00\x00\x00"
b"\x03\x00\x00\x00foo\x00")
# Test decoding, null packet
header, body = slicedata(nullPkt_str, 20)
header = decode_pduheader(header)
nullPkt_new = dec(body, header)
test_pducore(self, nullPkt_new, AX.PDU_OPEN, True, 1, 2, 3)
self.assertEqual(nullPkt_new.timeout, 4)
self.assertEqual(nullPkt_new.oid, AX.OID((), False))
self.assertEqual(nullPkt_new.description, "")
# Test decoding, basic packet
header, body = slicedata(basicPkt_str, 20)
header = decode_pduheader(header)
basicPkt_new = dec(body, header)
test_pducore(self, basicPkt_new, AX.PDU_OPEN, False, 1, 2, 3)
self.assertEqual(basicPkt_new.timeout, 4)
self.assertEqual(basicPkt_new.oid, AX.OID((1, 2, 3, 4), False))
self.assertEqual(basicPkt_new.description, "foo")
# Test packetVars
self.assertEqual(basicPkt_new.packetVars(),
{"pduType": 1,
"bigEndian": False,
"sessionID": 1,
"transactionID": 2,
"packetID": 3,
"timeout": 4,
"oid": AX.OID((1, 2, 3, 4), False),
"description": "foo"})
def test_ClosePDU(self):
dec = AX.decode_ClosePDU
cls = AX.ClosePDU
# Test init
pkt = cls(True, 1, 2, 3, AX.RSN_OTHER)
test_pducore(self, pkt, AX.PDU_CLOSE, True, 1, 2, 3)
self.assertEqual(pkt.reason, AX.RSN_OTHER)
# Test init, little endian
pkt_LE = cls(False, 1, 2, 3, AX.RSN_OTHER)
test_pducore(self, pkt_LE, AX.PDU_CLOSE, False, 1, 2, 3)
self.assertEqual(pkt_LE.reason, AX.RSN_OTHER)
# Test encoding
pkt_str = pkt.encode()
self.assertEqual(pkt_str,
b"\x01\x02\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x04"
b"\x01\x00\x00\x00")
# Test encoding, little endian
pkt_LE_str = pkt_LE.encode()
self.assertEqual(pkt_LE_str,
b"\x01\x02\x00\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x04\x00\x00\x00"
b"\x01\x00\x00\x00")
# Test decoding
header, body = slicedata(pkt_str, 20)
header = decode_pduheader(header)
pkt_new = dec(body, header)
test_pducore(self, pkt_new, AX.PDU_CLOSE, True, 1, 2, 3)
self.assertEqual(pkt_new.reason, AX.RSN_OTHER)
# Test decoding, little endian
header, body = slicedata(pkt_LE_str, 20)
header = decode_pduheader(header)
pkt_LE_new = dec(body, header)
test_pducore(self, pkt_LE_new, AX.PDU_CLOSE, False, 1, 2, 3)
self.assertEqual(pkt_LE_new.reason, AX.RSN_OTHER)
# Test packetVars
self.assertEqual(pkt_new.packetVars(),
{"pduType": 2,
"bigEndian": True,
"sessionID": 1,
"transactionID": 2,
"packetID": 3,
"reason": AX.RSN_OTHER})
def test_RegisterPDU(self):
dec = AX.decode_xRegisterPDU
cls = AX.RegisterPDU
# Test init, basic packet
basicPkt = cls(True, 1, 2, 3, 4, 5, (1, 2, 3))
test_pducore(self, basicPkt, AX.PDU_REGISTER, True, 1, 2, 3)
self.assertEqual(basicPkt.timeout, 4)
self.assertEqual(basicPkt.priority, 5)
self.assertEqual(basicPkt.subtree, AX.OID((1, 2, 3), False))
self.assertEqual(basicPkt.rangeSubid, 0)
self.assertEqual(basicPkt.upperBound, None)
self.assertEqual(basicPkt.context, None)
# Test init, basic packet, little endian
basicPkt_LE = cls(False, 1, 2, 3, 4, 5, (1, 2, 3))
test_pducore(self, basicPkt_LE, AX.PDU_REGISTER, False, 1, 2, 3)
self.assertEqual(basicPkt_LE.timeout, 4)
self.assertEqual(basicPkt_LE.priority, 5)
self.assertEqual(basicPkt_LE.subtree, AX.OID((1, 2, 3), False))
self.assertEqual(basicPkt_LE.rangeSubid, 0)
self.assertEqual(basicPkt_LE.upperBound, None)
self.assertEqual(basicPkt_LE.context, None)
# Test init, fancy packet
fancyPkt = cls(True, 1, 2, 3, 4, 5, (1, 2, 3),
rangeSubid=5, upperBound=23, context="blah")
test_pducore(self, fancyPkt, AX.PDU_REGISTER, True, 1, 2, 3)
self.assertEqual(fancyPkt.timeout, 4)
self.assertEqual(fancyPkt.priority, 5)
self.assertEqual(fancyPkt.subtree, AX.OID((1, 2, 3), False))
self.assertEqual(fancyPkt.rangeSubid, 5)
self.assertEqual(fancyPkt.upperBound, 23)
self.assertEqual(fancyPkt.context, "blah")
# Test encode, basic packet
basicPkt_str = basicPkt.encode()
self.assertEqual(basicPkt_str,
b"\x01\x03\x11\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x14"
b"\x04\x05\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x03")
# Test encode, basic packet, little endian
basicPkt_LE_str = basicPkt_LE.encode()
self.assertEqual(basicPkt_LE_str,
b"\x01\x03\x01\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x14\x00\x00\x00"
b"\x04\x05\x00\x00"
b"\x03\x00\x00\x00\x01\x00\x00\x00"
b"\x02\x00\x00\x00\x03\x00\x00\x00")
# Test encode, fancy packet
fancyPkt_str = fancyPkt.encode()
self.assertEqual(fancyPkt_str,
b"\x01\x03\x19\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x20"
b"\x00\x00\x00\x04blah"
b"\x04\x05\x05\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x00\x00\x00\x17")
# Test decoding, basic packet
header, body = slicedata(basicPkt_str, 20)
header = decode_pduheader(header)
basicPkt_new = dec(body, header)
test_pducore(self, basicPkt_new, AX.PDU_REGISTER, True, 1, 2, 3)
self.assertEqual(basicPkt_new.timeout, 4)
self.assertEqual(basicPkt_new.priority, 5)
self.assertEqual(basicPkt_new.subtree, AX.OID((1, 2, 3), False))
self.assertEqual(basicPkt_new.rangeSubid, 0)
self.assertEqual(basicPkt_new.upperBound, None)
self.assertEqual(basicPkt_new.context, None)
# Test decoding, basic packet, little endian
header, body = slicedata(basicPkt_LE_str, 20)
header = decode_pduheader(header)
basicPkt_LE_new = dec(body, header)
test_pducore(self, basicPkt_LE_new, AX.PDU_REGISTER, False, 1, 2, 3)
self.assertEqual(basicPkt_LE_new.timeout, 4)
self.assertEqual(basicPkt_LE_new.priority, 5)
self.assertEqual(basicPkt_LE_new.subtree, AX.OID((1, 2, 3), False))
self.assertEqual(basicPkt_LE_new.rangeSubid, 0)
self.assertEqual(basicPkt_LE_new.upperBound, None)
self.assertEqual(basicPkt_LE_new.context, None)
# Test decoding, fancy packet
header, body = slicedata(fancyPkt_str, 20)
header = decode_pduheader(header)
fancyPkt_new = dec(body, header)
test_pducore(self, fancyPkt_new, AX.PDU_REGISTER, True, 1, 2, 3)
self.assertEqual(fancyPkt_new.timeout, 4)
self.assertEqual(fancyPkt_new.priority, 5)
self.assertEqual(fancyPkt_new.subtree, AX.OID((1, 2, 3), False))
self.assertEqual(fancyPkt_new.rangeSubid, 5)
self.assertEqual(fancyPkt_new.upperBound, 23)
self.assertEqual(fancyPkt_new.context, "blah")
# Test packetVars
self.assertEqual(basicPkt_new.packetVars(),
{"pduType": 3,
"bigEndian": True,
"sessionID": 1,
"transactionID": 2,
"packetID": 3,
"timeout": 4,
"priority": 5,
"subtree": AX.OID((1, 2, 3), False),
"rangeSubid": 0,
"upperBound": None,
"context": None})
# Test __eq__ gap, equal
a = cls(True, 1, 2, 3, 4, 5, ())
b = cls(True, 1, 2, 3, 4, 5, ())
self.assertEqual(a == b, True)
# Test __eq__ gap, unequal
b = cls(False, 1, 2, 3, 4, 5, ())
self.assertEqual(a == b, False)
def test_UnregisterPDU(self):
dec = AX.decode_xRegisterPDU
cls = AX.UnregisterPDU
# Test init, basic packet
basicPkt = cls(True, 1, 2, 3, 5, (1, 2, 3))
test_pducore(self, basicPkt, AX.PDU_UNREGISTER, True, 1, 2, 3)
self.assertEqual(basicPkt.priority, 5)
self.assertEqual(basicPkt.subtree, AX.OID((1, 2, 3), False))
self.assertEqual(basicPkt.rangeSubid, 0)
self.assertEqual(basicPkt.upperBound, None)
self.assertEqual(basicPkt.context, None)
# Test init, basic packet, little endian
basicPkt_LE = cls(False, 1, 2, 3, 5, (1, 2, 3))
test_pducore(self, basicPkt_LE, AX.PDU_UNREGISTER, False, 1, 2, 3)
self.assertEqual(basicPkt_LE.priority, 5)
self.assertEqual(basicPkt_LE.subtree, AX.OID((1, 2, 3), False))
self.assertEqual(basicPkt_LE.rangeSubid, 0)
self.assertEqual(basicPkt_LE.upperBound, None)
self.assertEqual(basicPkt_LE.context, None)
# Test init, fancy packet
fancyPkt = cls(True, 1, 2, 3, 5, (1, 2, 3),
rangeSubid=5, upperBound=23, context="blah")
test_pducore(self, fancyPkt, AX.PDU_UNREGISTER, True, 1, 2, 3)
self.assertEqual(fancyPkt.priority, 5)
self.assertEqual(fancyPkt.subtree, AX.OID((1, 2, 3), False))
self.assertEqual(fancyPkt.rangeSubid, 5)
self.assertEqual(fancyPkt.upperBound, 23)
self.assertEqual(fancyPkt.context, "blah")
# Test encode, basic packet
basicPkt_str = basicPkt.encode()
self.assertEqual(basicPkt_str,
b"\x01\x04\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x14"
b"\x00\x05\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x03")
# Test encode, basic packet, little endian
basicPkt_LE_str = basicPkt_LE.encode()
self.assertEqual(basicPkt_LE_str,
b"\x01\x04\x00\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x14\x00\x00\x00"
b"\x00\x05\x00\x00"
b"\x03\x00\x00\x00\x01\x00\x00\x00"
b"\x02\x00\x00\x00\x03\x00\x00\x00")
# Test encode, fancy packet
fancyPkt_str = fancyPkt.encode()
self.assertEqual(fancyPkt_str,
b"\x01\x04\x18\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x20"
b"\x00\x00\x00\x04blah"
b"\x00\x05\x05\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x00\x00\x00\x17")
# Test decoding, basic packet
header, body = slicedata(basicPkt_str, 20)
header = decode_pduheader(header)
basicPkt_new = dec(body, header)
test_pducore(self, basicPkt, AX.PDU_UNREGISTER, True, 1, 2, 3)
self.assertEqual(basicPkt_new.priority, 5)
self.assertEqual(basicPkt_new.subtree, AX.OID((1, 2, 3), False))
self.assertEqual(basicPkt_new.rangeSubid, 0)
self.assertEqual(basicPkt_new.upperBound, None)
self.assertEqual(basicPkt_new.context, None)
# Test decoding, basic packet, little endian
header, body = slicedata(basicPkt_LE_str, 20)
header = decode_pduheader(header)
basicPkt_LE_new = dec(body, header)
test_pducore(self, basicPkt_LE, AX.PDU_UNREGISTER, False, 1, 2, 3)
self.assertEqual(basicPkt_LE_new.priority, 5)
self.assertEqual(basicPkt_LE_new.subtree, AX.OID((1, 2, 3), False))
self.assertEqual(basicPkt_LE_new.rangeSubid, 0)
self.assertEqual(basicPkt_LE_new.upperBound, None)
self.assertEqual(basicPkt_LE_new.context, None)
# Test decoding, fancy packet
header, body = slicedata(fancyPkt_str, 20)
header = decode_pduheader(header)
fancyPkt_new = dec(body, header)
test_pducore(self, fancyPkt_new, AX.PDU_UNREGISTER, True, 1, 2, 3)
self.assertEqual(fancyPkt_new.priority, 5)
self.assertEqual(fancyPkt_new.subtree, AX.OID((1, 2, 3), False))
self.assertEqual(fancyPkt_new.rangeSubid, 5)
self.assertEqual(fancyPkt_new.upperBound, 23)
self.assertEqual(fancyPkt_new.context, "blah")
# Test packetVars
self.assertEqual(basicPkt_new.packetVars(),
{"pduType": 4,
"bigEndian": True,
"sessionID": 1,
"transactionID": 2,
"packetID": 3,
"priority": 5,
"subtree": AX.OID((1, 2, 3), False),
"rangeSubid": 0,
"upperBound": None,
"context": None})
def test_GetPDU(self):
dec = AX.decode_xGetPDU
cls = AX.GetPDU
srch = AX.SearchRange
# Test init, null packet
nullPkt = cls(True, 1, 2, 3, ())
test_pducore(self, nullPkt, AX.PDU_GET, True, 1, 2, 3)
self.assertEqual(nullPkt.oidranges, ())
self.assertEqual(nullPkt.context, None)
# Test init, full packet
fullPkt = cls(True, 1, 2, 3,
(srch((1, 2, 3), (1, 2, 5), False),
srch((10, 20), (30, 40), True)),
context="blah")
test_pducore(self, fullPkt, AX.PDU_GET, True, 1, 2, 3)
self.assertEqual(fullPkt.oidranges,
(srch((1, 2, 3), (1, 2, 5), False),
srch((10, 20), (30, 40), True)))
self.assertEqual(fullPkt.context, "blah")
# Test init, full packet, little endian
fullPkt_LE = cls(False, 1, 2, 3,
(srch((1, 2, 3), (1, 2, 5), False),
srch((10, 20), (30, 40), True)),
context="blah")
test_pducore(self, fullPkt_LE, AX.PDU_GET, False, 1, 2, 3)
self.assertEqual(fullPkt_LE.oidranges,
(srch((1, 2, 3), (1, 2, 5), False),
srch((10, 20), (30, 40), True)))
self.assertEqual(fullPkt_LE.context, "blah")
# Test encode, null packet
nullPkt_str = nullPkt.encode()
self.assertEqual(nullPkt_str,
b"\x01\x05\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x00")
# Test encode, full packet
fullPkt_str = fullPkt.encode()
self.assertEqual(fullPkt_str,
b"\x01\x05\x18\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x40"
b"\x00\x00\x00\x04blah"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x05"
b"\x02\x00\x01\x00\x00\x00\x00\x0A\x00\x00\x00\x14"
b"\x02\x00\x00\x00\x00\x00\x00\x1E\x00\x00\x00\x28")
# Test encode, full packet, little endian
fullPkt_LE_str = fullPkt_LE.encode()
self.assertEqual(fullPkt_LE_str,
b"\x01\x05\x08\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x40\x00\x00\x00"
b"\x04\x00\x00\x00blah"
b"\x03\x00\x00\x00\x01\x00\x00\x00"
b"\x02\x00\x00\x00\x03\x00\x00\x00"
b"\x03\x00\x00\x00\x01\x00\x00\x00"
b"\x02\x00\x00\x00\x05\x00\x00\x00"
b"\x02\x00\x01\x00\x0A\x00\x00\x00\x14\x00\x00\x00"
b"\x02\x00\x00\x00\x1E\x00\x00\x00\x28\x00\x00\x00")
# Test decoding, null packet
header, body = slicedata(nullPkt_str, 20)
header = decode_pduheader(header)
nullPkt_new = dec(body, header)
test_pducore(self, nullPkt_new, AX.PDU_GET, True, 1, 2, 3)
self.assertEqual(nullPkt_new.oidranges, ())
self.assertEqual(nullPkt_new.context, None)
# Test decoding, full packet
header, body = slicedata(fullPkt_str, 20)
header = decode_pduheader(header)
fullPkt_new = dec(body, header)
test_pducore(self, fullPkt_new, AX.PDU_GET, True, 1, 2, 3)
self.assertEqual(fullPkt_new.oidranges,
(srch((1, 2, 3), (1, 2, 5), False),
srch((10, 20), (30, 40), True)))
self.assertEqual(fullPkt_new.context, "blah")
# Test decoding, full packet, little endian
header, body = slicedata(fullPkt_LE_str, 20)
header = decode_pduheader(header)
fullPkt_LE_new = dec(body, header)
test_pducore(self, fullPkt_LE_new, AX.PDU_GET, False, 1, 2, 3)
self.assertEqual(fullPkt_LE_new.oidranges,
(srch((1, 2, 3), (1, 2, 5), False),
srch((10, 20), (30, 40), True)))
self.assertEqual(fullPkt_LE_new.context, "blah")
# Test packetVars
self.assertEqual(nullPkt_new.packetVars(),
{"pduType": 5,
"bigEndian": True,
"sessionID": 1,
"transactionID": 2,
"packetID": 3,
"oidranges": (),
"context": None})
def test_GetNextPDU(self):
dec = AX.decode_xGetPDU
cls = AX.GetNextPDU
srch = AX.SearchRange
# Test init, null packet
nullPkt = cls(True, 1, 2, 3, ())
test_pducore(self, nullPkt, AX.PDU_GET_NEXT, True, 1, 2, 3)
self.assertEqual(nullPkt.oidranges, ())
self.assertEqual(nullPkt.context, None)
# Test init, full packet
fullPkt = cls(True, 1, 2, 3,
(srch((1, 2, 3), (1, 2, 5), False),
srch((10, 20), (30, 40), True)),
context="blah")
test_pducore(self, fullPkt, AX.PDU_GET_NEXT, True, 1, 2, 3)
self.assertEqual(fullPkt.oidranges,
(srch((1, 2, 3), (1, 2, 5), False),
srch((10, 20), (30, 40), True)))
self.assertEqual(fullPkt.context, "blah")
# Test init, full packet, little endian
fullPkt_LE = cls(False, 1, 2, 3,
(srch((1, 2, 3), (1, 2, 5), False),
srch((10, 20), (30, 40), True)),
context="blah")
test_pducore(self, fullPkt_LE, AX.PDU_GET_NEXT, False, 1, 2, 3)
self.assertEqual(fullPkt_LE.oidranges,
(srch((1, 2, 3), (1, 2, 5), False),
srch((10, 20), (30, 40), True)))
self.assertEqual(fullPkt_LE.context, "blah")
# Test encode, null packet
nullPkt_str = nullPkt.encode()
self.assertEqual(nullPkt_str,
b"\x01\x06\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x00")
# Test encode, full packet
fullPkt_str = fullPkt.encode()
self.assertEqual(fullPkt_str,
b"\x01\x06\x18\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x40"
b"\x00\x00\x00\x04blah"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x05"
b"\x02\x00\x01\x00\x00\x00\x00\x0A\x00\x00\x00\x14"
b"\x02\x00\x00\x00\x00\x00\x00\x1E\x00\x00\x00\x28")
# Test encode, full packet, little endian
fullPkt_LE_str = fullPkt_LE.encode()
self.assertEqual(fullPkt_LE_str,
b"\x01\x06\x08\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x40\x00\x00\x00"
b"\x04\x00\x00\x00blah"
b"\x03\x00\x00\x00\x01\x00\x00\x00"
b"\x02\x00\x00\x00\x03\x00\x00\x00"
b"\x03\x00\x00\x00\x01\x00\x00\x00"
b"\x02\x00\x00\x00\x05\x00\x00\x00"
b"\x02\x00\x01\x00\x0A\x00\x00\x00\x14\x00\x00\x00"
b"\x02\x00\x00\x00\x1E\x00\x00\x00\x28\x00\x00\x00")
# Test decoding, null packet
header, body = slicedata(nullPkt_str, 20)
header = decode_pduheader(header)
nullPkt_new = dec(body, header)
test_pducore(self, nullPkt_new, AX.PDU_GET_NEXT, True, 1, 2, 3)
self.assertEqual(nullPkt_new.oidranges, ())
self.assertEqual(nullPkt_new.context, None)
# Test decoding, full packet
header, body = slicedata(fullPkt_str, 20)
header = decode_pduheader(header)
fullPkt_new = dec(body, header)
test_pducore(self, fullPkt_new, AX.PDU_GET_NEXT, True, 1, 2, 3)
self.assertEqual(fullPkt_new.oidranges,
(srch((1, 2, 3), (1, 2, 5), False),
srch((10, 20), (30, 40), True)))
self.assertEqual(fullPkt_new.context, "blah")
# Test decoding, full packet, little endian
header, body = slicedata(fullPkt_LE_str, 20)
header = decode_pduheader(header)
fullPkt_LE_new = dec(body, header)
test_pducore(self, fullPkt_LE_new, AX.PDU_GET_NEXT, False, 1, 2, 3)
self.assertEqual(fullPkt_LE_new.oidranges,
(srch((1, 2, 3), (1, 2, 5), False),
srch((10, 20), (30, 40), True)))
self.assertEqual(fullPkt_LE_new.context, "blah")
# Test packetVars
self.assertEqual(nullPkt_new.packetVars(),
{"pduType": 6,
"bigEndian": True,
"sessionID": 1,
"transactionID": 2,
"packetID": 3,
"oidranges": (),
"context": None})
def test_GetBulkPDU(self):
dec = AX.decode_GetBulkPDU
cls = AX.GetBulkPDU
srch = AX.SearchRange
# Test init
pkt = cls(True, 1, 2, 3, 1, 5,
(srch((1, 2), (3, 4), False),
srch((6, 7), (8, 9), True)),
context="blah")
test_pducore(self, pkt, AX.PDU_GET_BULK, True, 1, 2, 3)
self.assertEqual(pkt.nonReps, 1)
self.assertEqual(pkt.maxReps, 5)
self.assertEqual(pkt.oidranges,
(srch((1, 2), (3, 4), False),
srch((6, 7), (8, 9), True)))
self.assertEqual(pkt.context, "blah")
# Test init, little endian
pkt_LE = cls(False, 1, 2, 3, 1, 5,
(srch((1, 2), (3, 4), False),
srch((6, 7), (8, 9), True)),
context="blah")
test_pducore(self, pkt_LE, AX.PDU_GET_BULK, False, 1, 2, 3)
self.assertEqual(pkt_LE.nonReps, 1)
self.assertEqual(pkt_LE.maxReps, 5)
self.assertEqual(pkt_LE.oidranges,
(srch((1, 2), (3, 4), False),
srch((6, 7), (8, 9), True)))
self.assertEqual(pkt_LE.context, "blah")
# Test encoding
pkt_str = pkt.encode()
self.assertEqual(pkt_str,
b"\x01\x07\x18\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x3C"
b"\x00\x00\x00\x04blah"
b"\x00\x01\x00\x05"
b"\x02\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x02\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x04"
b"\x02\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x07"
b"\x02\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x09")
# Test encoding, little endian
pkt_LE_str = pkt_LE.encode()
self.assertEqual(pkt_LE_str,
b"\x01\x07\x08\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x3C\x00\x00\x00"
b"\x04\x00\x00\x00blah"
b"\x01\x00\x05\x00"
b"\x02\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x02\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00"
b"\x02\x00\x01\x00\x06\x00\x00\x00\x07\x00\x00\x00"
b"\x02\x00\x00\x00\x08\x00\x00\x00\x09\x00\x00\x00")
# Test decoding
header, body = slicedata(pkt_str, 20)
header = decode_pduheader(header)
pkt_new = dec(body, header)
test_pducore(self, pkt_new, AX.PDU_GET_BULK, True, 1, 2, 3)
self.assertEqual(pkt_new.nonReps, 1)
self.assertEqual(pkt_new.maxReps, 5)
self.assertEqual(pkt_new.oidranges,
(srch((1, 2), (3, 4), False),
srch((6, 7), (8, 9), True)))
self.assertEqual(pkt_new.context, "blah")
# Test decoding, little endian
header, body = slicedata(pkt_LE_str, 20)
header = decode_pduheader(header)
pkt_LE_new = dec(body, header)
test_pducore(self, pkt_LE_new, AX.PDU_GET_BULK, False, 1, 2, 3)
self.assertEqual(pkt_LE_new.nonReps, 1)
self.assertEqual(pkt_LE_new.maxReps, 5)
self.assertEqual(pkt_LE_new.oidranges,
(srch((1, 2), (3, 4), False),
srch((6, 7), (8, 9), True)))
self.assertEqual(pkt_LE_new.context, "blah")
# Test packetVars
self.assertEqual(pkt_new.packetVars(),
{"pduType": 7,
"bigEndian": True,
"sessionID": 1,
"transactionID": 2,
"packetID": 3,
"nonReps": 1,
"maxReps": 5,
"oidranges": (srch((1, 2), (3, 4), False),
srch((6, 7), (8, 9), True)),
"context": "blah"})
def test_TestSetPDU(self):
dec = AX.decode_TestSetPDU
cls = AX.TestSetPDU
# Test init
pkt = cls(True, 1, 2, 3,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3), AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")),
context="blah")
test_pducore(self, pkt, AX.PDU_TEST_SET, True, 1, 2, 3)
self.assertEqual(pkt.varbinds,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")))
self.assertEqual(pkt.context, "blah")
# Test init, little endian
pkt_LE = cls(False, 1, 2, 3,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")),
context="blah")
test_pducore(self, pkt_LE, AX.PDU_TEST_SET, False, 1, 2, 3)
self.assertEqual(pkt_LE.varbinds,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")))
self.assertEqual(pkt_LE.context, "blah")
# Test encoding
pkt_str = pkt.encode()
self.assertEqual(pkt_str,
b"\x01\x08\x18\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x48"
b"\x00\x00\x00\x04blah"
b"\x00\x06\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x03\x00\x00\x00\x00\x00\x00\x04"
b"\x00\x00\x00\x05\x00\x00\x00\x06"
b"\x00\x04\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x04"
b"\x00\x00\x00\x04blah")
# Test encoding, little endian
pkt_LE_str = pkt_LE.encode()
self.assertEqual(pkt_LE_str,
b"\x01\x08\x08\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x48\x00\x00\x00"
b"\x04\x00\x00\x00blah"
b"\x06\x00\x00\x00"
b"\x03\x00\x00\x00\x01\x00\x00\x00"
b"\x02\x00\x00\x00\x03\x00\x00\x00"
b"\x03\x00\x00\x00\x04\x00\x00\x00"
b"\x05\x00\x00\x00\x06\x00\x00\x00"
b"\x04\x00\x00\x00"
b"\x03\x00\x00\x00\x01\x00\x00\x00"
b"\x02\x00\x00\x00\x04\x00\x00\x00"
b"\x04\x00\x00\x00blah")
# Test decoding
header, body = slicedata(pkt_str, 20)
header = decode_pduheader(header)
pkt_new = dec(body, header)
test_pducore(self, pkt_new, AX.PDU_TEST_SET, True, 1, 2, 3)
self.assertEqual(pkt_new.varbinds,
(AX.Varbind(AX.VALUE_OID,
AX.OID((1, 2, 3), False),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR,
AX.OID((1, 2, 4), False),
"blah")))
self.assertEqual(pkt_new.context, "blah")
# Test decoding, little endian
header, body = slicedata(pkt_LE_str, 20)
header = decode_pduheader(header)
pkt_LE_new = dec(body, header)
test_pducore(self, pkt_LE_new, AX.PDU_TEST_SET, False, 1, 2, 3)
self.assertEqual(pkt_LE_new.varbinds,
(AX.Varbind(AX.VALUE_OID,
AX.OID((1, 2, 3), False),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR,
AX.OID((1, 2, 4), False),
"blah")))
self.assertEqual(pkt_LE_new.context, "blah")
# Test packetVars
self.assertEqual(pkt_new.packetVars(),
{"pduType": 8,
"bigEndian": True,
"sessionID": 1,
"transactionID": 2,
"packetID": 3,
"varbinds": (AX.Varbind(AX.VALUE_OID,
AX.OID((1, 2, 3), False),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR,
AX.OID((1, 2, 4), False),
"blah")),
"context": "blah"})
def test_CommitSetPDU(self):
dec = AX.decode_CommitSetPDU
cls = AX.CommitSetPDU
# Test init
pkt = cls(True, 1, 2, 3)
test_pducore(self, pkt, AX.PDU_COMMIT_SET, True, 1, 2, 3)
# Test init, little endian
pkt_LE = cls(False, 1, 2, 3)
test_pducore(self, pkt_LE, AX.PDU_COMMIT_SET, False, 1, 2, 3)
# Test encode
pkt_str = pkt.encode()
self.assertEqual(pkt_str,
b"\x01\x09\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x00")
# Test encode, little endian
pkt_LE_str = pkt_LE.encode()
self.assertEqual(pkt_LE_str,
b"\x01\x09\x00\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x00")
# Test decode
header, body = slicedata(pkt_str, 20)
header = decode_pduheader(header)
pkt_new = dec(body, header)
test_pducore(self, pkt_new, AX.PDU_COMMIT_SET, True, 1, 2, 3)
# Test decode, little endian
header, body = slicedata(pkt_LE_str, 20)
header = decode_pduheader(header)
pkt_LE_new = dec(body, header)
test_pducore(self, pkt_LE_new, AX.PDU_COMMIT_SET, False, 1, 2, 3)
# Test packetVars
self.assertEqual(pkt_new.packetVars(),
{"pduType": 9,
"bigEndian": True,
"sessionID": 1,
"transactionID": 2,
"packetID": 3})
def test_UndoSetPDU(self):
dec = AX.decode_UndoSetPDU
cls = AX.UndoSetPDU
# Test init
pkt = cls(True, 1, 2, 3)
test_pducore(self, pkt, AX.PDU_UNDO_SET, True, 1, 2, 3)
# Test init, little endian
pkt_LE = cls(False, 1, 2, 3)
test_pducore(self, pkt_LE, AX.PDU_UNDO_SET, False, 1, 2, 3)
# Test encode
pkt_str = pkt.encode()
self.assertEqual(pkt_str,
b"\x01\x0A\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x00")
# Test encode, little endian
pkt_LE_str = pkt_LE.encode()
self.assertEqual(pkt_LE_str,
b"\x01\x0A\x00\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x00")
# Test decode
header, body = slicedata(pkt_str, 20)
header = decode_pduheader(header)
pkt_new = dec(body, header)
test_pducore(self, pkt_new, AX.PDU_UNDO_SET, True, 1, 2, 3)
# Test decode, little endian
header, body = slicedata(pkt_LE_str, 20)
header = decode_pduheader(header)
pkt_LE_new = dec(body, header)
test_pducore(self, pkt_LE_new, AX.PDU_UNDO_SET, False, 1, 2, 3)
# Test packetVars
self.assertEqual(pkt_new.packetVars(),
{"pduType": 10,
"bigEndian": True,
"sessionID": 1,
"transactionID": 2,
"packetID": 3})
def test_CleanupSetPDU(self):
dec = AX.decode_CleanupSetPDU
cls = AX.CleanupSetPDU
# Test init
pkt = cls(True, 1, 2, 3)
test_pducore(self, pkt, AX.PDU_CLEANUP_SET, True, 1, 2, 3)
# Test init, little endian
pkt_LE = cls(False, 1, 2, 3)
test_pducore(self, pkt_LE, AX.PDU_CLEANUP_SET, False, 1, 2, 3)
# Test encode
pkt_str = pkt.encode()
self.assertEqual(pkt_str,
b"\x01\x0B\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x00")
# Test encode, little endian
pkt_LE_str = pkt_LE.encode()
self.assertEqual(pkt_LE_str,
b"\x01\x0B\x00\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x00")
# Test decode
header, body = slicedata(pkt_str, 20)
header = decode_pduheader(header)
pkt_new = dec(body, header)
test_pducore(self, pkt_new, AX.PDU_CLEANUP_SET, True, 1, 2, 3)
# Test decode, little endian
header, body = slicedata(pkt_LE_str, 20)
header = decode_pduheader(header)
pkt_LE_new = dec(body, header)
test_pducore(self, pkt_LE_new, AX.PDU_CLEANUP_SET, False, 1, 2, 3)
# Test packetVars
self.assertEqual(pkt_new.packetVars(),
{"pduType": 11,
"bigEndian": True,
"sessionID": 1,
"transactionID": 2,
"packetID": 3})
def test_PingPDU(self):
dec = AX.decode_PingPDU
cls = AX.PingPDU
# Test init
pkt = cls(True, 1, 2, 3, "blah")
test_pducore(self, pkt, AX.PDU_PING, True, 1, 2, 3)
self.assertEqual(pkt.context, "blah")
# Test init, little endian
pkt_LE = cls(False, 1, 2, 3, "blah")
test_pducore(self, pkt_LE, AX.PDU_PING, False, 1, 2, 3)
self.assertEqual(pkt_LE.context, "blah")
# Test encode
pkt_str = pkt.encode()
self.assertEqual(pkt_str,
b"\x01\x0D\x18\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x08"
b"\x00\x00\x00\x04blah")
# Test encode, little endian
pkt_LE_str = pkt_LE.encode()
self.assertEqual(pkt_LE_str,
b"\x01\x0D\x08\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x08\x00\x00\x00"
b"\x04\x00\x00\x00blah")
# Test decode
header, body = slicedata(pkt_str, 20)
header = decode_pduheader(header)
pkt_new = dec(body, header)
test_pducore(self, pkt_new, AX.PDU_PING, True, 1, 2, 3)
self.assertEqual(pkt_new.context, "blah")
# Test decode, little endian
header, body = slicedata(pkt_LE_str, 20)
header = decode_pduheader(header)
pkt_LE_new = dec(body, header)
test_pducore(self, pkt_LE_new, AX.PDU_PING, False, 1, 2, 3)
self.assertEqual(pkt_LE_new.context, "blah")
# Test packetVars
self.assertEqual(pkt_new.packetVars(),
{"pduType": 13,
"bigEndian": True,
"sessionID": 1,
"transactionID": 2,
"packetID": 3,
"context": "blah"})
def test_NotifyPDU(self):
dec = AX.decode_NotifyPDU
cls = AX.NotifyPDU
# Test init
pkt = cls(True, 1, 2, 3,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3), AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")),
context="blah")
test_pducore(self, pkt, AX.PDU_NOTIFY, True, 1, 2, 3)
self.assertEqual(pkt.varbinds,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")))
self.assertEqual(pkt.context, "blah")
# Test init, little endian
pkt_LE = cls(False, 1, 2, 3,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")),
context="blah")
test_pducore(self, pkt_LE, AX.PDU_NOTIFY, False, 1, 2, 3)
self.assertEqual(pkt_LE.varbinds,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")))
self.assertEqual(pkt_LE.context, "blah")
# Test encode
pkt_str = pkt.encode()
self.assertEqual(pkt_str,
b"\x01\x0C\x18\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x48"
b"\x00\x00\x00\x04blah"
b"\x00\x06\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x03\x00\x00\x00\x00\x00\x00\x04"
b"\x00\x00\x00\x05\x00\x00\x00\x06"
b"\x00\x04\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x04"
b"\x00\x00\x00\x04blah")
# Test encode, little endian
pkt_LE_str = pkt_LE.encode()
self.assertEqual(pkt_LE_str,
b"\x01\x0C\x08\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x48\x00\x00\x00"
b"\x04\x00\x00\x00blah"
b"\x06\x00\x00\x00"
b"\x03\x00\x00\x00\x01\x00\x00\x00"
b"\x02\x00\x00\x00\x03\x00\x00\x00"
b"\x03\x00\x00\x00\x04\x00\x00\x00"
b"\x05\x00\x00\x00\x06\x00\x00\x00"
b"\x04\x00\x00\x00"
b"\x03\x00\x00\x00\x01\x00\x00\x00"
b"\x02\x00\x00\x00\x04\x00\x00\x00"
b"\x04\x00\x00\x00blah")
# Test decode
header, body = slicedata(pkt_str, 20)
header = decode_pduheader(header)
pkt_new = dec(body, header)
test_pducore(self, pkt_new, AX.PDU_NOTIFY, True, 1, 2, 3)
self.assertEqual(pkt_new.varbinds,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")))
self.assertEqual(pkt_new.context, "blah")
# Test decode, little endian
header, body = slicedata(pkt_LE_str, 20)
header = decode_pduheader(header)
pkt_LE_new = dec(body, header)
test_pducore(self, pkt_LE_new, AX.PDU_NOTIFY, False, 1, 2, 3)
self.assertEqual(pkt_LE_new.varbinds,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")))
self.assertEqual(pkt_LE_new.context, "blah")
# Test packetVars
self.assertEqual(pkt_new.packetVars(),
{"pduType": 12,
"bigEndian": True,
"sessionID": 1,
"transactionID": 2,
"packetID": 3,
"varbinds": (AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4),
"blah")),
"context": "blah"})
def test_IndexAllocPDU(self):
dec = AX.decode_xIndexAllocPDU
cls = AX.IndexAllocPDU
# Test init
pkt = cls(True, 1, 2, 3, True, True,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3), AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")),
context="blah")
test_pducore(self, pkt, AX.PDU_INDEX_ALLOC, True, 1, 2, 3)
self.assertEqual(pkt.newIndex, True)
self.assertEqual(pkt.anyIndex, True)
self.assertEqual(pkt.varbinds,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")))
self.assertEqual(pkt.context, "blah")
# Test init, little endian
pkt_LE = cls(False, 1, 2, 3, True, True,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")),
context="blah")
test_pducore(self, pkt_LE, AX.PDU_INDEX_ALLOC, False, 1, 2, 3)
self.assertEqual(pkt_LE.newIndex, True)
self.assertEqual(pkt_LE.anyIndex, True)
self.assertEqual(pkt_LE.varbinds,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")))
self.assertEqual(pkt_LE.context, "blah")
# Test encode
pkt_str = pkt.encode()
self.assertEqual(pkt_str,
b"\x01\x0E\x1E\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x48"
b"\x00\x00\x00\x04blah"
b"\x00\x06\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x03\x00\x00\x00\x00\x00\x00\x04"
b"\x00\x00\x00\x05\x00\x00\x00\x06"
b"\x00\x04\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x04"
b"\x00\x00\x00\x04blah")
# Test encode, little endian
pkt_LE_str = pkt_LE.encode()
self.assertEqual(pkt_LE_str,
b"\x01\x0E\x0E\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x48\x00\x00\x00"
b"\x04\x00\x00\x00blah"
b"\x06\x00\x00\x00"
b"\x03\x00\x00\x00\x01\x00\x00\x00"
b"\x02\x00\x00\x00\x03\x00\x00\x00"
b"\x03\x00\x00\x00\x04\x00\x00\x00"
b"\x05\x00\x00\x00\x06\x00\x00\x00"
b"\x04\x00\x00\x00"
b"\x03\x00\x00\x00\x01\x00\x00\x00"
b"\x02\x00\x00\x00\x04\x00\x00\x00"
b"\x04\x00\x00\x00blah")
# Test decode
header, body = slicedata(pkt_str, 20)
header = decode_pduheader(header)
pkt_new = dec(body, header)
test_pducore(self, pkt_new, AX.PDU_INDEX_ALLOC, True, 1, 2, 3)
self.assertEqual(pkt_new.newIndex, True)
self.assertEqual(pkt_new.anyIndex, True)
self.assertEqual(pkt_new.varbinds,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")))
# Test decode, little endian
header, body = slicedata(pkt_LE_str, 20)
header = decode_pduheader(header)
pkt_LE_new = dec(body, header)
test_pducore(self, pkt_LE_new, AX.PDU_INDEX_ALLOC, False, 1, 2, 3)
self.assertEqual(pkt_LE_new.newIndex, True)
self.assertEqual(pkt_LE_new.anyIndex, True)
self.assertEqual(pkt_LE_new.varbinds,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")))
self.assertEqual(pkt_LE_new.context, "blah")
# Test packetVars
self.assertEqual(pkt_new.packetVars(),
{"pduType": 14,
"bigEndian": True,
"sessionID": 1,
"transactionID": 2,
"packetID": 3,
"newIndex": True,
"anyIndex": True,
"varbinds": (AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4),
"blah")),
"context": "blah"})
def test_IndexDeallocPDU(self):
dec = AX.decode_xIndexAllocPDU
cls = AX.IndexDeallocPDU
# Test init
pkt = cls(True, 1, 2, 3, True, True,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3), AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")),
context="blah")
test_pducore(self, pkt, AX.PDU_INDEX_DEALLOC, True, 1, 2, 3)
self.assertEqual(pkt.newIndex, True)
self.assertEqual(pkt.anyIndex, True)
self.assertEqual(pkt.varbinds,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")))
self.assertEqual(pkt.context, "blah")
# Test init, little endian
pkt_LE = cls(False, 1, 2, 3, True, True,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")),
context="blah")
test_pducore(self, pkt_LE, AX.PDU_INDEX_DEALLOC, False, 1, 2, 3)
self.assertEqual(pkt_LE.newIndex, True)
self.assertEqual(pkt_LE.anyIndex, True)
self.assertEqual(pkt_LE.varbinds,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")))
self.assertEqual(pkt_LE.context, "blah")
# Test encode
pkt_str = pkt.encode()
self.assertEqual(pkt_str,
b"\x01\x0F\x1E\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x48"
b"\x00\x00\x00\x04blah"
b"\x00\x06\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x03\x00\x00\x00\x00\x00\x00\x04"
b"\x00\x00\x00\x05\x00\x00\x00\x06"
b"\x00\x04\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x04"
b"\x00\x00\x00\x04blah")
# Test encode, little endian
pkt_LE_str = pkt_LE.encode()
self.assertEqual(pkt_LE_str,
b"\x01\x0F\x0E\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x48\x00\x00\x00"
b"\x04\x00\x00\x00blah"
b"\x06\x00\x00\x00"
b"\x03\x00\x00\x00\x01\x00\x00\x00"
b"\x02\x00\x00\x00\x03\x00\x00\x00"
b"\x03\x00\x00\x00\x04\x00\x00\x00"
b"\x05\x00\x00\x00\x06\x00\x00\x00"
b"\x04\x00\x00\x00"
b"\x03\x00\x00\x00\x01\x00\x00\x00"
b"\x02\x00\x00\x00\x04\x00\x00\x00"
b"\x04\x00\x00\x00blah")
# Test decode
header, body = slicedata(pkt_str, 20)
header = decode_pduheader(header)
pkt_new = dec(body, header)
test_pducore(self, pkt_new, AX.PDU_INDEX_DEALLOC, True, 1, 2, 3)
self.assertEqual(pkt_new.newIndex, True)
self.assertEqual(pkt_new.anyIndex, True)
self.assertEqual(pkt_new.varbinds,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")))
self.assertEqual(pkt_new.context, "blah")
# Test decode, little endian
header, body = slicedata(pkt_LE_str, 20)
header = decode_pduheader(header)
pkt_LE_new = dec(body, header)
test_pducore(self, pkt_LE_new, AX.PDU_INDEX_DEALLOC, False, 1, 2, 3)
self.assertEqual(pkt_LE_new.newIndex, True)
self.assertEqual(pkt_LE_new.anyIndex, True)
self.assertEqual(pkt_LE_new.varbinds,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")))
self.assertEqual(pkt_LE_new.context, "blah")
# Test packetVars
self.assertEqual(pkt_new.packetVars(),
{"pduType": 15,
"bigEndian": True,
"sessionID": 1,
"transactionID": 2,
"packetID": 3,
"newIndex": True,
"anyIndex": True,
"varbinds": (AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4),
"blah")),
"context": "blah"})
def test_AddAgentCapsPDU(self):
dec = AX.decode_AddAgentCapsPDU
cls = AX.AddAgentCapsPDU
# Test init
pkt = cls(True, 1, 2, 3, (4, 5, 6), "blah", context="bluh")
test_pducore(self, pkt, AX.PDU_ADD_AGENT_CAPS, True, 1, 2, 3)
self.assertEqual(pkt.oid, AX.OID((4, 5, 6), False))
self.assertEqual(pkt.description, "blah")
self.assertEqual(pkt.context, "bluh")
# Test init, little endian
pkt_LE = cls(False, 1, 2, 3, (4, 5, 6), "blah", context="bluh")
test_pducore(self, pkt_LE, AX.PDU_ADD_AGENT_CAPS, False, 1, 2, 3)
self.assertEqual(pkt_LE.oid, AX.OID((4, 5, 6), False))
self.assertEqual(pkt_LE.description, "blah")
self.assertEqual(pkt_LE.context, "bluh")
# Test encode
pkt_str = pkt.encode()
self.assertEqual(pkt_str,
b"\x01\x10\x18\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x20"
b"\x00\x00\x00\x04bluh"
b"\x03\x00\x00\x00\x00\x00\x00\x04"
b"\x00\x00\x00\x05\x00\x00\x00\x06"
b"\x00\x00\x00\x04blah")
# Test encode, little endian
pkt_LE_str = pkt_LE.encode()
self.assertEqual(pkt_LE_str,
b"\x01\x10\x08\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x20\x00\x00\x00"
b"\x04\x00\x00\x00bluh"
b"\x03\x00\x00\x00\x04\x00\x00\x00"
b"\x05\x00\x00\x00\x06\x00\x00\x00"
b"\x04\x00\x00\x00blah")
# Test decode
header, body = slicedata(pkt_str, 20)
header = decode_pduheader(header)
pkt_new = dec(body, header)
test_pducore(self, pkt_new, AX.PDU_ADD_AGENT_CAPS, True, 1, 2, 3)
self.assertEqual(pkt_new.oid, AX.OID((4, 5, 6), False))
self.assertEqual(pkt_new.description, "blah")
self.assertEqual(pkt_new.context, "bluh")
# Test decode, little endian
header, body = slicedata(pkt_LE_str, 20)
header = decode_pduheader(header)
pkt_LE_new = dec(body, header)
test_pducore(self, pkt_LE_new, AX.PDU_ADD_AGENT_CAPS, False, 1, 2, 3)
self.assertEqual(pkt_LE_new.oid, AX.OID((4, 5, 6), False))
self.assertEqual(pkt_LE_new.description, "blah")
self.assertEqual(pkt_LE_new.context, "bluh")
# Test packetVars
self.assertEqual(pkt_new.packetVars(),
{"pduType": 16,
"bigEndian": True,
"sessionID": 1,
"transactionID": 2,
"packetID": 3,
"oid": AX.OID((4, 5, 6), False),
"description": "blah",
"context": "bluh"})
def test_RMAgentCapsPDU(self):
dec = AX.decode_RMAgentCapsPDU
cls = AX.RMAgentCapsPDU
# Test init
pkt = cls(True, 1, 2, 3, (4, 5, 6), context="bluh")
test_pducore(self, pkt, AX.PDU_RM_AGENT_CAPS, True, 1, 2, 3)
self.assertEqual(pkt.oid, AX.OID((4, 5, 6), False))
self.assertEqual(pkt.context, "bluh")
# Test init, little endian
pkt_LE = cls(False, 1, 2, 3, (4, 5, 6), context="bluh")
test_pducore(self, pkt_LE, AX.PDU_RM_AGENT_CAPS, False, 1, 2, 3)
self.assertEqual(pkt_LE.oid, AX.OID((4, 5, 6), False))
self.assertEqual(pkt_LE.context, "bluh")
# Test encode
pkt_str = pkt.encode()
self.assertEqual(pkt_str,
b"\x01\x11\x18\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x18"
b"\x00\x00\x00\x04bluh"
b"\x03\x00\x00\x00\x00\x00\x00\x04"
b"\x00\x00\x00\x05\x00\x00\x00\x06")
# Test encode, little endian
pkt_LE_str = pkt_LE.encode()
self.assertEqual(pkt_LE_str,
b"\x01\x11\x08\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x18\x00\x00\x00"
b"\x04\x00\x00\x00bluh"
b"\x03\x00\x00\x00\x04\x00\x00\x00"
b"\x05\x00\x00\x00\x06\x00\x00\x00")
# Test decode
header, body = slicedata(pkt_str, 20)
header = decode_pduheader(header)
pkt_new = dec(body, header)
test_pducore(self, pkt_new, AX.PDU_RM_AGENT_CAPS, True, 1, 2, 3)
self.assertEqual(pkt_new.oid, AX.OID((4, 5, 6), False))
self.assertEqual(pkt_new.context, "bluh")
# Test decode, little endian
header, body = slicedata(pkt_LE_str, 20)
header = decode_pduheader(header)
pkt_LE_new = dec(body, header)
test_pducore(self, pkt_LE_new, AX.PDU_RM_AGENT_CAPS, False, 1, 2, 3)
self.assertEqual(pkt_LE_new.oid, AX.OID((4, 5, 6), False))
self.assertEqual(pkt_LE_new.context, "bluh")
# Test packetVars
self.assertEqual(pkt_new.packetVars(),
{"pduType": 17,
"bigEndian": True,
"sessionID": 1,
"transactionID": 2,
"packetID": 3,
"oid": AX.OID((4, 5, 6), False),
"context": "bluh"})
def test_ResponsePDU(self):
dec = AX.decode_ResponsePDU
cls = AX.ResponsePDU
# Test init
pkt = cls(True, 1, 2, 3, 4, 5, 6,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3), AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")))
test_pducore(self, pkt, AX.PDU_RESPONSE, True, 1, 2, 3)
self.assertEqual(pkt.sysUptime, 4)
self.assertEqual(pkt.resError, 5)
self.assertEqual(pkt.resIndex, 6)
self.assertEqual(pkt.varbinds,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")))
# Test init, little endian
pkt_LE = cls(False, 1, 2, 3, 4, 5, 6,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")))
test_pducore(self, pkt_LE, AX.PDU_RESPONSE, False, 1, 2, 3)
self.assertEqual(pkt_LE.sysUptime, 4)
self.assertEqual(pkt_LE.resError, 5)
self.assertEqual(pkt_LE.resIndex, 6)
self.assertEqual(pkt_LE.varbinds,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")))
# Test encode
pkt_str = pkt.encode()
self.assertEqual(pkt_str,
b"\x01\x12\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x48"
b"\x00\x00\x00\x04\x00\x05\x00\x06"
b"\x00\x06\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x03\x00\x00\x00\x00\x00\x00\x04"
b"\x00\x00\x00\x05\x00\x00\x00\x06"
b"\x00\x04\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x04"
b"\x00\x00\x00\x04blah")
# Test encode, little endian
pkt_LE_str = pkt_LE.encode()
self.assertEqual(pkt_LE_str,
b"\x01\x12\x00\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x48\x00\x00\x00"
b"\x04\x00\x00\x00\x05\x00\x06\x00"
b"\x06\x00\x00\x00"
b"\x03\x00\x00\x00\x01\x00\x00\x00"
b"\x02\x00\x00\x00\x03\x00\x00\x00"
b"\x03\x00\x00\x00\x04\x00\x00\x00"
b"\x05\x00\x00\x00\x06\x00\x00\x00"
b"\x04\x00\x00\x00"
b"\x03\x00\x00\x00\x01\x00\x00\x00"
b"\x02\x00\x00\x00\x04\x00\x00\x00"
b"\x04\x00\x00\x00blah")
# Test decode
header, body = slicedata(pkt_str, 20)
header = decode_pduheader(header)
pkt_new = dec(body, header)
test_pducore(self, pkt_new, AX.PDU_RESPONSE, True, 1, 2, 3)
self.assertEqual(pkt_new.sysUptime, 4)
self.assertEqual(pkt_new.resError, 5)
self.assertEqual(pkt_new.resIndex, 6)
self.assertEqual(pkt_new.varbinds,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")))
# Test decode, little endian
header, body = slicedata(pkt_LE_str, 20)
header = decode_pduheader(header)
pkt_LE_new = dec(body, header)
test_pducore(self, pkt_LE_new, AX.PDU_RESPONSE, False, 1, 2, 3)
self.assertEqual(pkt_LE_new.sysUptime, 4)
self.assertEqual(pkt_LE_new.resError, 5)
self.assertEqual(pkt_LE_new.resIndex, 6)
self.assertEqual(pkt_LE_new.varbinds,
(AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4), "blah")))
# Test packetVars
self.assertEqual(pkt_new.packetVars(),
{"pduType": 18,
"bigEndian": True,
"sessionID": 1,
"transactionID": 2,
"packetID": 3,
"sysUptime": 4,
"resError": 5,
"resIndex": 6,
"varbinds": (AX.Varbind(AX.VALUE_OID, (1, 2, 3),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR, (1, 2, 4),
"blah"))})
#
# Data type tests
#
def test_integer32(self):
enc = AX.encode_integer32
dec = AX.decode_integer32
# Encode
self.assertEqual(enc(True, 42), b"\x00\x00\x00\x2A")
# Encode, little endian
self.assertEqual(enc(False, 42), b"\x2A\x00\x00\x00")
# Decode
self.assertEqual(dec(b"\x00\x00\x00\x2A" + extraData, standardFlags),
(42, extraData))
# Decode, little endian
self.assertEqual(dec(b"\x2A\x00\x00\x00" + extraData, lilEndianFlags),
(42, extraData))
def test_nullvalue(self):
enc = AX.encode_nullvalue
dec = AX.decode_nullvalue
# Encode
self.assertEqual(enc(True, "this is ignored"), b"")
# Decode
self.assertEqual(dec(extraData, standardFlags), (None, extraData))
def test_integer64(self):
enc = AX.encode_integer64
dec = AX.decode_integer64
# Encode
self.assertEqual(enc(True, 42), b"\x00\x00\x00\x00\x00\x00\x00\x2A")
# Encode, little endian
self.assertEqual(enc(False, 42), b"\x2A\x00\x00\x00\x00\x00\x00\x00")
# Decode
self.assertEqual(dec(b"\x00\x00\x00\x00\x00\x00\x00\x2A" + extraData,
standardFlags),
(42, extraData))
# Decode, little endian
self.assertEqual(dec(b"\x2A\x00\x00\x00\x00\x00\x00\x00" + extraData,
lilEndianFlags),
(42, extraData))
def test_ipaddr(self):
enc = AX.encode_ipaddr
dec = AX.decode_ipaddr
# Encode correct
self.assertEqual(enc(True, (1, 2, 3, 4)),
b"\x00\x00\x00\x04\x01\x02\x03\x04")
# Encode correct, little endian
self.assertEqual(enc(False, (1, 2, 3, 4)),
b"\x04\x00\x00\x00\x01\x02\x03\x04")
# Encode incorrect
try:
enc(True, (1, 2, 3, 4, 5))
errored = False
except ValueError:
errored = True
self.assertEqual(errored, True)
# Decode
self.assertEqual(dec(b"\x00\x00\x00\x04\x01\x02\x03\x04" + extraData,
standardFlags),
((1, 2, 3, 4), extraData))
# Decode, little endian
self.assertEqual(dec(b"\x04\x00\x00\x00\x01\x02\x03\x04" + extraData,
lilEndianFlags),
((1, 2, 3, 4), extraData))
def test_OID(self):
target = AX.OID
dec = AX.decode_OID
# Encode empty OID
cls = target((), False)
self.assertEqual(cls.encode(True), b"\x00\x00\x00\x00")
# Encode basic OID
cls = target((1, 2, 3, 4, 5), False)
self.assertEqual(cls.encode(True),
b"\x05\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x04"
b"\x00\x00\x00\x05")
# Encode basic OID, little endian
self.assertEqual(cls.encode(False),
b"\x05\x00\x00\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x04\x00\x00\x00"
b"\x05\x00\x00\x00")
# Encode prefixed OID
cls = target((1, 3, 6, 1, 23, 1, 2, 3), False)
self.assertEqual(cls.encode(True),
b"\x03\x17\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03")
# Encode include
cls = target((1, 2), True)
self.assertEqual(cls.encode(True),
b"\x02\x00\x01\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02")
# Encode together
cls = target((1, 3, 6, 1, 1, 3, 4, 5, 6), True)
self.assertEqual(cls.encode(True),
b"\x04\x01\x01\x00"
b"\x00\x00\x00\x03\x00\x00\x00\x04"
b"\x00\x00\x00\x05\x00\x00\x00\x06")
# Encode maximum size
cls = target(maximumOIDsubs, False)
self.assertEqual(cls.encode(True), maximumOIDstr)
# Encode over maximum size
try:
cls = target(maximumOIDsubs + (42,), False)
cls.encode(True)
errored = False
except ValueError:
errored = True
self.assertEqual(errored, True)
# Decode empty OID, extra data
cls, xtr = dec(b"\x00\x00\x00\x00" + extraData, standardFlags)
self.assertEqual(isinstance(cls, target), True)
self.assertEqual(cls.subids, ())
self.assertEqual(cls.include, False)
self.assertEqual(xtr, extraData)
# Decode basic OID, extra data
cls, xtr = dec(b"\x05\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x00\x00\x00\x04\x00\x00\x00\x05" + extraData,
standardFlags)
self.assertEqual(isinstance(cls, target), True)
self.assertEqual(cls.subids, (1, 2, 3, 4, 5))
self.assertEqual(cls.include, False)
self.assertEqual(xtr, extraData)
# Decode basic OID, little endian
cls, xtr = dec(b"\x05\x00\x00\x00\x01\x00\x00\x00"
b"\x02\x00\x00\x00\x03\x00\x00\x00"
b"\x04\x00\x00\x00\x05\x00\x00\x00",
lilEndianFlags)
self.assertEqual(isinstance(cls, target), True)
self.assertEqual(cls.subids, (1, 2, 3, 4, 5))
self.assertEqual(cls.include, False)
self.assertEqual(xtr, b"")
# Decode prefixed OID
cls, xtr = dec(b"\x03\x17\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x03", standardFlags)
self.assertEqual(isinstance(cls, target), True)
self.assertEqual(cls.subids, (1, 3, 6, 1, 23, 1, 2, 3))
self.assertEqual(cls.include, False)
self.assertEqual(xtr, b"")
# Decode include
cls, xtr = dec(b"\x02\x00\x05\x00\x00\x00\x00\x01\x00\x00\x00\x02",
standardFlags)
self.assertEqual(isinstance(cls, target), True)
self.assertEqual(cls.subids, (1, 2))
self.assertEqual(cls.include, True)
self.assertEqual(xtr, b"")
# Decode together
cls, xtr = dec(b"\x04\x01\x02\x00\x00\x00\x00\x03"
b"\x00\x00\x00\x04\x00\x00\x00\x05\x00\x00\x00\x06",
standardFlags)
self.assertEqual(isinstance(cls, target), True)
self.assertEqual(cls.subids, (1, 3, 6, 1, 1, 3, 4, 5, 6))
self.assertEqual(cls.include, True)
self.assertEqual(xtr, b"")
# Decode maximum size
cls, xtr = dec(maximumOIDstr, standardFlags)
self.assertEqual(isinstance(cls, target), True)
self.assertEqual(cls.subids, maximumOIDsubs)
self.assertEqual(cls.include, False)
self.assertEqual(xtr, b"")
# Decode over maximum size
# Need to replace the hardcoded n_subid=128 with 129
fatOID = b"\x81" + maximumOIDstr[1:] + b"\xDE\xAD\xBE\xEF"
try:
cls, xtr = dec(fatOID, standardFlags)
errored = False
except ValueError:
errored = True
self.assertEqual(errored, True)
# Test compareOID
# Test equal
a = target((1, 2, 3, 4))
b = target((1, 2, 3, 4))
self.assertEqual(a.compareOID(b), 0)
# Test equal length, one < two
b = target((1, 2, 3, 5))
self.assertEqual(a.compareOID(b), -1)
# Test equal length, one > two
b = target((1, 2, 3, 0))
self.assertEqual(a.compareOID(b), 1)
# Test one shorter, less than two, equal for length
a = target((1, 2, 3))
b = target((1, 2, 3, 4))
self.assertEqual(a.compareOID(b), -1)
# Test one shorter, less than two
b = target((1, 2, 4, 5))
self.assertEqual(a.compareOID(b), -1)
# Test one shorter, greater than two
b = target((1, 2, 2, 4))
self.assertEqual(a.compareOID(b), 1)
# Test two shorter, less than one, equal for length
a = target((1, 2, 3, 4))
b = target((1, 2, 3))
self.assertEqual(a.compareOID(b), 1)
# Test two shorter, less than one
a = target((1, 2, 4, 5))
self.assertEqual(a.compareOID(b), 1)
# Test two shorter, greater than one
a = target((1, 2, 2, 4))
b = target((1, 2, 3))
self.assertEqual(a.compareOID(b), -1)
# Test direct comparisons
# Test ==
self.assertEqual(target((1, 2, 3)) == target((1, 2, 3)), True)
# Test !=
self.assertEqual(target((1, 2, 3)) != target((1, 2)), True)
# Test <
self.assertEqual(target((1, 2)) < target((1, 2, 3)), True)
# Test >
self.assertEqual(target((1, 2, 3)) > target((1, 2)), True)
# Test <=
self.assertEqual(target((1, 2)) <= target((1, 2, 3)), True)
self.assertEqual(target((1, 2, 3)) <= target((1, 2, 3)), True)
# Test >=
self.assertEqual(target((1, 2, 3)) >= target((1, 2)), True)
self.assertEqual(target((1, 2, 3)) >= target((1, 2, 3)), True)
# Test insane subids type
try:
errored = target("foo")
except TypeError:
errored = True
self.assertEqual(errored, True)
# Test isNull
self.assertEqual(target((1, 2)).isNull(), False)
self.assertEqual(target(()).isNull(), True)
def test_searchrange(self):
target = AX.SearchRange
dec = AX.decode_SearchRange
oid = AX.OID
# Test init
# Basic
cls = target(oid((1, 2), True), oid((3, 4), False))
self.assertEqual(cls.start.subids, (1, 2))
self.assertEqual(cls.start.include, True)
self.assertEqual(cls.end.subids, (3, 4))
self.assertEqual(cls.end.include, False)
# Override
cls = target(oid((1, 2), True), oid((3, 4), True), False)
self.assertEqual(cls.start.subids, (1, 2))
self.assertEqual(cls.start.include, False)
self.assertEqual(cls.end.subids, (3, 4))
self.assertEqual(cls.end.include, False)
# Turn tuples into OIDs
cls = target((1, 2), (3, 4), True)
self.assertEqual(cls.start.subids, (1, 2))
self.assertEqual(cls.start.include, True)
self.assertEqual(cls.end.subids, (3, 4))
self.assertEqual(cls.end.include, False)
# Test encoding
# Encode minimum size
cls = target((), (), False)
self.assertEqual(cls.encode(True),
b"\x00\x00\x00\x00\x00\x00\x00\x00")
# Encode inclusive
cls = target((1, 2, 3, 4), (5, 6, 7, 8), True)
self.assertEqual(cls.encode(True),
b"\x04\x00\x01\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x04"
b"\x04\x00\x00\x00"
b"\x00\x00\x00\x05\x00\x00\x00\x06"
b"\x00\x00\x00\x07\x00\x00\x00\x08")
# Encode exclusive
cls = target((1, 2, 3, 4), (5, 6, 7, 8), False)
self.assertEqual(cls.encode(True),
b"\x04\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x04"
b"\x04\x00\x00\x00"
b"\x00\x00\x00\x05\x00\x00\x00\x06"
b"\x00\x00\x00\x07\x00\x00\x00\x08")
# Encode exclusive, little endian
self.assertEqual(cls.encode(False),
b"\x04\x00\x00\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x04\x00\x00\x00"
b"\x04\x00\x00\x00"
b"\x05\x00\x00\x00\x06\x00\x00\x00"
b"\x07\x00\x00\x00\x08\x00\x00\x00")
# Test decode
# Decode minimum size, extra data
self.assertEqual(dec(b"\x00\x00\x00\x00\x00\x00\x00\x00" + extraData,
standardFlags),
(target((), (), False), extraData))
# Decode inclusive
self.assertEqual(dec(b"\x04\x00\x01\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x04"
b"\x04\x00\x00\x00"
b"\x00\x00\x00\x05\x00\x00\x00\x06"
b"\x00\x00\x00\x07\x00\x00\x00\x08",
standardFlags),
(target((1, 2, 3, 4), (5, 6, 7, 8), True), b""))
# Decode exclusive
self.assertEqual(dec(b"\x04\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x04"
b"\x04\x00\x00\x00"
b"\x00\x00\x00\x05\x00\x00\x00\x06"
b"\x00\x00\x00\x07\x00\x00\x00\x08",
standardFlags),
(target((1, 2, 3, 4), (5, 6, 7, 8), False), b""))
# Decode little endian
self.assertEqual(dec(b"\x04\x00\x01\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x04\x00\x00\x00"
b"\x04\x00\x00\x00"
b"\x05\x00\x00\x00\x06\x00\x00\x00"
b"\x07\x00\x00\x00\x08\x00\x00\x00",
lilEndianFlags),
(target((1, 2, 3, 4), (5, 6, 7, 8), True), b""))
# Test __eq__
# Test equal
a = target((1, 2, 3), (1, 2, 3))
b = target((1, 2, 3), (1, 2, 3))
self.assertEqual(a == b, True)
# Test start unequal
b = target((1, 2, 3), (1, 2, 3), True)
self.assertEqual(a == b, False)
# Test end unequal
b = target((1, 2, 3), (1, 2, 3, 4))
self.assertEqual(a == b, False)
# Test __ne__
# Test equal
a = target((1, 2, 3), (1, 2, 3))
b = target((1, 2, 3), (1, 2, 3))
self.assertEqual(a != b, False)
# Test start unequal
b = target((1, 2, 3), (1, 2, 3), True)
self.assertEqual(a != b, True)
# Test end unequal
b = target((1, 2, 3), (1, 2, 3, 4))
self.assertEqual(a != b, True)
# Test __repr__
self.assertEqual(repr(target((1, 2), (1, 3))),
"SearchRange(OID((1, 2), False), OID((1, 3), False))")
def test_encode_searchrange_list(self):
enc = AX.encode_searchrange_list
srch = AX.SearchRange
# Encode
self.assertEqual(enc(True, (srch((1, 2), (1, 2), True),
srch((2, 3), (3, 4)))),
b"\x02\x00\x01\x00\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x02\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x02\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x02\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x04")
# Encode, little endian
self.assertEqual(enc(False, (srch((1, 2), (1, 2), True),
srch((2, 3), (3, 4)))),
b"\x02\x00\x01\x00\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x02\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x02\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00"
b"\x02\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00")
def test_decode_searchrange_list(self):
dec = AX.decode_searchrange_list
srch = AX.SearchRange
# Decode
self.assertEqual(dec(b"\x02\x00\x01\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x02\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x02\x00\x00\x00"
b"\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x02\x00\x00\x00"
b"\x00\x00\x00\x03\x00\x00\x00\x04",
standardFlags),
(srch((1, 2), (1, 2), True),
srch((2, 3), (3, 4), False)))
# Test, little endian
self.assertEqual(dec(b"\x02\x00\x01\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x02\x00\x00\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x02\x00\x00\x00"
b"\x02\x00\x00\x00\x03\x00\x00\x00"
b"\x02\x00\x00\x00"
b"\x03\x00\x00\x00\x04\x00\x00\x00",
lilEndianFlags),
(srch((1, 2), (1, 2), True),
srch((2, 3), (3, 4), False)))
def test_xcode_octetstr(self):
enc = AX.encode_octetstr
dec = AX.decode_octetstr
san = AX.sanity_octetstr
# Encode empty
self.assertEqual(enc(True, ()), b"\x00\x00\x00\x00")
# Encode word multiple
self.assertEqual(enc(True, (1, 2, 3, 4)),
b"\x00\x00\x00\x04\x01\x02\x03\x04")
# Encode non word multiple
self.assertEqual(enc(True, (1, 2, 3, 4, 5)),
b"\x00\x00\x00\x05\x01\x02\x03\x04\x05\x00\x00\x00")
# Encode string
self.assertEqual(enc(True, "blah"), b"\x00\x00\x00\x04blah")
# Encode string, little endian
self.assertEqual(enc(False, "blah"), b"\x04\x00\x00\x00blah")
# Decode empty
self.assertEqual(dec(b"\x00\x00\x00\x00", standardFlags), ("", b""))
# Decode word multiple, extra data
self.assertEqual(dec(b"\x00\x00\x00\x04blah" + extraData,
standardFlags),
("blah", extraData))
# Decode word multiple, little endian
self.assertEqual(dec(b"\x04\x00\x00\x00blah", lilEndianFlags),
("blah", b""))
# Decode non word multiple, extra data
self.assertEqual(dec(b"\x00\x00\x00\x05"
b"blarg\x00\x00\x00" + extraData,
standardFlags),
("blarg", extraData))
# Test sanity
# Test str
try:
errored = san("foo") # str is always sane
except Exception as e:
errored = e
self.assertEqual(errored, None)
# Test sane list
try:
errored = san([1, 2, 3])
except Exception as e:
errored = e
self.assertEqual(errored, None)
# Test sane tuple
try:
errored = san((1, 2, 3))
except Exception as e:
errored = e
self.assertEqual(errored, None)
# Test insane list
try:
errored = san([23, 300, 42])
except ValueError:
errored = True
self.assertEqual(errored, True)
# Test insane tuple
try:
errored = san((23, 300, 42))
except ValueError:
errored = True
self.assertEqual(errored, True)
# Test insane type
try:
errored = san(42.23)
except TypeError:
errored = True
self.assertEqual(errored, True)
def test_Varbind(self):
target = AX.Varbind
# Test init
cls = target(AX.VALUE_INTEGER, (1, 2, 3), 42)
self.assertEqual(cls.valueType, AX.VALUE_INTEGER)
self.assertEqual(cls.oid, AX.OID((1, 2, 3), False))
self.assertEqual(cls.payload, 42)
# Test repr
self.assertEqual(repr(cls),
"Varbind(vtype=2, oid=OID((1, 2, 3), False), "
"payload=42)")
# Test payloadless types
cls = target(AX.VALUE_NULL, (1, 2, 3))
self.assertEqual(cls.encode(True),
b"\x00\x05\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03")
cls = target(AX.VALUE_NO_SUCH_OBJECT, (1, 2, 3))
self.assertEqual(cls.encode(True),
b"\x00\x80\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03")
cls = target(AX.VALUE_NO_SUCH_INSTANCE, (1, 2, 3))
self.assertEqual(cls.encode(True),
b"\x00\x81\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03")
cls = target(AX.VALUE_END_OF_MIB_VIEW, (1, 2, 3))
self.assertEqual(cls.encode(True),
b"\x00\x82\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03")
# Test octet based types
cls = target(AX.VALUE_OCTET_STR, (1, 2, 3), (1, 2, 3, 4, 5))
self.assertEqual(cls.encode(True),
b"\x00\x04\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x00\x00\x00\x05"
b"\x01\x02\x03\x04\x05\x00\x00\x00")
cls = target(AX.VALUE_IP_ADDR, (1, 2, 3), (16, 32, 48, 64))
self.assertEqual(cls.encode(True),
b"\x00\x40\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x00\x00\x00\x04\x10\x20\x30\x40")
# Test integer32 type
cls = target(AX.VALUE_INTEGER, (1, 2, 3), -42)
self.assertEqual(cls.encode(True),
b"\x00\x02\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03"
b"\xFF\xFF\xFF\xD6")
# Test unsigned32 types
cls = target(AX.VALUE_COUNTER32, (1, 2, 3), 42)
self.assertEqual(cls.encode(True),
b"\x00\x41\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x00\x00\x00\x2A")
cls = target(AX.VALUE_GAUGE32, (1, 2, 3), 42)
self.assertEqual(cls.encode(True),
b"\x00\x42\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x00\x00\x00\x2A")
cls = target(AX.VALUE_TIME_TICKS, (1, 2, 3), 42)
self.assertEqual(cls.encode(True),
b"\x00\x43\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x00\x00\x00\x2A")
# Test integer64 type
cls = target(AX.VALUE_COUNTER64, (1, 2, 3), 42)
self.assertEqual(cls.encode(True),
b"\x00\x46\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x00\x00\x00\x00\x00\x00\x00\x2A")
# Test oid type
cls = target(AX.VALUE_OID, (1, 2, 3), AX.OID((16, 42, 256), False))
self.assertEqual(cls.encode(True),
b"\x00\x06\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x03\x00\x00\x00\x00\x00\x00\x10"
b"\x00\x00\x00\x2A\x00\x00\x01\x00")
# Test oid type, little endian
cls = target(AX.VALUE_OID, (1, 2, 3), AX.OID((16, 42, 256), False))
self.assertEqual(cls.encode(False),
b"\x06\x00\x00\x00\x03\x00\x00\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00"
b"\x03\x00\x00\x00\x10\x00\x00\x00"
b"\x2A\x00\x00\x00\x00\x01\x00\x00")
# Test __eq__
one = target(AX.VALUE_INTEGER, (1, 2, 3), 1)
two = target(AX.VALUE_INTEGER, (1, 2, 3), 1)
# Test equal
self.assertEqual(one == two, True)
# Test different type
two = target(AX.VALUE_GAUGE32, (1, 2, 3), 1)
self.assertEqual(one == two, False)
# Test different OID
two = target(AX.VALUE_INTEGER, (1, 2, 3, 4), 1)
self.assertEqual(one == two, False)
# Test different payload
two = target(AX.VALUE_INTEGER, (1, 2, 3), 2)
self.assertEqual(one == two, False)
def test_decode_varbind(self):
f = AX.decode_Varbind
# Test payloadless types
self.assertEqual(f(b"\x00\x05\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03",
standardFlags),
(AX.Varbind(AX.VALUE_NULL, AX.OID((1, 2, 3), False)),
b""))
self.assertEqual(f(b"\x00\x80\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03",
standardFlags),
(AX.Varbind(AX.VALUE_NO_SUCH_OBJECT,
AX.OID((1, 2, 3), False)),
b""))
self.assertEqual(f(b"\x00\x81\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03",
standardFlags),
(AX.Varbind(AX.VALUE_NO_SUCH_INSTANCE,
AX.OID((1, 2, 3), False)),
b""))
self.assertEqual(f(b"\x00\x82\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03",
standardFlags),
(AX.Varbind(AX.VALUE_END_OF_MIB_VIEW,
AX.OID((1, 2, 3), False)),
b""))
# Test octet based types
self.assertEqual(f(b"\x00\x04\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x00\x00\x00\x0512345\x00\x00\x00",
standardFlags),
(AX.Varbind(AX.VALUE_OCTET_STR,
AX.OID((1, 2, 3), False),
"12345"),
b""))
self.assertEqual(f(b"\x00\x40\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x00\x00\x00\x04\x10\x20\x30\x40", standardFlags),
(AX.Varbind(AX.VALUE_IP_ADDR,
AX.OID((1, 2, 3), False),
(16, 32, 48, 64)),
b""))
# Test integer32 type
self.assertEqual(f(b"\x00\x02\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03"
b"\xFF\xFF\xFF\xD6", standardFlags),
(AX.Varbind(AX.VALUE_INTEGER,
AX.OID((1, 2, 3), False),
-42),
b""))
self.assertEqual(f(b"\x00\x41\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x00\x00\x00\x2A", standardFlags),
(AX.Varbind(AX.VALUE_COUNTER32,
AX.OID((1, 2, 3), False),
42),
b""))
# Test unsigned32 types
self.assertEqual(f(b"\x00\x42\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x00\x00\x00\x2A", standardFlags),
(AX.Varbind(AX.VALUE_GAUGE32,
AX.OID((1, 2, 3), False),
42),
b""))
self.assertEqual(f(b"\x00\x43\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x00\x00\x00\x2A", standardFlags),
(AX.Varbind(AX.VALUE_TIME_TICKS,
AX.OID((1, 2, 3), False),
42),
b""))
# Test integer64 type
self.assertEqual(f(b"\x00\x46\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x00\x00\x00\x00\x00\x00\x00\x2A", standardFlags),
(AX.Varbind(AX.VALUE_COUNTER64,
AX.OID((1, 2, 3), False),
42),
b""))
# Test oid type
self.assertEqual(f(b"\x00\x06\x00\x00\x03\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x03\x00\x00\x00\x00\x00\x00\x10"
b"\x00\x00\x00\x2A\x00\x00\x01\x00", standardFlags),
(AX.Varbind(AX.VALUE_OID,
AX.OID((1, 2, 3), False),
AX.OID((16, 42, 256), False)),
b""))
# Test integer32 with little endian
self.assertEqual(f(b"\x43\x00\x00\x00\x03\x00\x00\x00"
b"\x01\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00"
b"\x2A\x00\x00\x00", lilEndianFlags),
(AX.Varbind(AX.VALUE_TIME_TICKS,
AX.OID((1, 2, 3), False),
42),
b""))
def test_xcode_varbindlist(self):
enc = AX.encode_varbindlist
dec = AX.decode_varbindlist
vb = AX.Varbind
# Test encode empty
self.assertEqual(enc(True, []), b"")
# Test encode big endian
big = enc(True,
[vb(AX.VALUE_INTEGER, (1, 2), 1),
vb(AX.VALUE_INTEGER, (3, 4), 2)])
self.assertEqual(big, b"\x00\x02\x00\x00"
b"\x02\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x01"
b"\x00\x02\x00\x00"
b"\x02\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x04"
b"\x00\x00\x00\x02")
# Test encode little endian
little = enc(False,
[vb(AX.VALUE_INTEGER, (1, 2), 1),
vb(AX.VALUE_INTEGER, (3, 4), 2)])
self.assertEqual(little, b"\x02\x00\x00\x00"
b"\x02\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00"
b"\x01\x00\x00\x00"
b"\x02\x00\x00\x00"
b"\x02\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00"
b"\x02\x00\x00\x00")
# Test decode empty
self.assertEqual(dec(b"", standardFlags), None)
# Test decode big endian
self.assertEqual(dec(big, standardFlags),
(vb(AX.VALUE_INTEGER, (1, 2), 1),
vb(AX.VALUE_INTEGER, (3, 4), 2)))
# Test decode little endian
self.assertEqual(dec(little, lilEndianFlags),
(vb(AX.VALUE_INTEGER, (1, 2), 1),
vb(AX.VALUE_INTEGER, (3, 4), 2)))
def test_encode_flagbyte(self):
f = AX.encode_flagbyte
self.assertEqual(f(makeflags(False, True, False, True, False)), 0x0A)
self.assertEqual(f(makeflags(True, False, True, False, True)), 0x15)
def test_decode_flagbyte(self):
f = AX.decode_flagbyte
self.assertEqual(f(0x0A), makeflags(False, True, False, True, False))
self.assertEqual(f(0x15), makeflags(True, False, True, False, True))
#
# Misc tests
#
def test_makeflags(self):
f = AX.makeflags
self.assertEqual(f(True, False, True, False, True),
{"instReg": True,
"newIndex": False,
"anyIndex": True,
"contextP": False,
"bigEndian": True})
def test_getendian(self):
f = AX.getendian
# Test big endian
self.assertEqual(f(True), ">")
# Test little endian
self.assertEqual(f(False), "<")
def test_encode_pduheader(self):
f = AX.encode_pduheader
# Test "empty" header
self.assertEqual(f(AX.PDU_OPEN,
False, False, False, False, False,
0xDEADBEEF, 0xCAFEBABE, 0xFACEF00D, 0),
b"\x01\x01\x00\x00"
b"\xEF\xBE\xAD\xDE\xBE\xBA\xFE\xCA"
b"\x0D\xF0\xCE\xFA\x00\x00\x00\x00")
# Test flags
self.assertEqual(f(AX.PDU_OPEN,
True, True, True, True, True,
0xDEADBEEF, 0xCAFEBABE, 0xFACEF00D, 0),
b"\x01\x01\x1F\x00"
b"\xDE\xAD\xBE\xEF\xCA\xFE\xBA\xBE"
b"\xFA\xCE\xF0\x0D\x00\x00\x00\x00")
def test_decode_pduheader(self):
f = AX.decode_pduheader
# Test "empty" header
self.assertEqual(f(b"\x01\x01\x10\x00"
b"\xDE\xAD\xBE\xEF\xCA\xFE\xBA\xBE"
b"\xFA\xCE\xF0\x0D\x00\x00\x00\x00"),
{"version": 1,
"type": AX.PDU_OPEN,
"flags": {"instReg": False,
"newIndex": False,
"anyIndex": False,
"contextP": False,
"bigEndian": True},
"session_id": 0xDEADBEEF,
"transaction_id": 0xCAFEBABE,
"packet_id": 0xFACEF00D,
"length": 0})
# Test "empty" header, little endian
self.assertEqual(f(b"\x01\x01\x00\x00"
b"\xEF\xBE\xAD\xDE\xBE\xBA\xFE\xCA"
b"\x0D\xF0\xCE\xFA\x00\x00\x00\x00"),
{"version": 1,
"type": AX.PDU_OPEN,
"flags": {"instReg": False,
"newIndex": False,
"anyIndex": False,
"contextP": False,
"bigEndian": False},
"session_id": 0xDEADBEEF,
"transaction_id": 0xCAFEBABE,
"packet_id": 0xFACEF00D,
"length": 0})
# Test "empty" header, extra data
self.assertEqual(f(b"\x01\x01\x10\x00"
b"\xDE\xAD\xBE\xEF\xCA\xFE\xBA\xBE"
b"\xFA\xCE\xF0\x0D\x00\x00\x00\x00" + extraData),
{"version": 1,
"type": AX.PDU_OPEN,
"flags": {"instReg": False,
"newIndex": False,
"anyIndex": False,
"contextP": False,
"bigEndian": True},
"session_id": 0xDEADBEEF,
"transaction_id": 0xCAFEBABE,
"packet_id": 0xFACEF00D,
"length": 0})
# Test flags
self.assertEqual(f(b"\x01\x01\x1F\x00"
b"\xDE\xAD\xBE\xEF\xCA\xFE\xBA\xBE"
b"\xFA\xCE\xF0\x0D\x00\x00\x00\x00"),
{"version": 1,
"type": AX.PDU_OPEN,
"flags": {"instReg": True,
"newIndex": True,
"anyIndex": True,
"contextP": True,
"bigEndian": True},
"session_id": 0xDEADBEEF,
"transaction_id": 0xCAFEBABE,
"packet_id": 0xFACEF00D,
"length": 0})
def test_decode_packet(self):
f = AX.decode_packet
srch = AX.SearchRange
# Not testing all the variants of each packet type, that is
# the job of the other tests.
self.maxDiff = None
# Test open
self.assertEqual(f(b"\x01\x01\x10\x00"
b"\x00\x00\x00\x0C\x00\x00\x00\x22"
b"\x00\x00\x00\x38\x00\x00\x00\x20"
b"\x4E\x00\x00\x00"
b"\x04\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x04"
b"\x00\x00\x00\x03foo\x00"),
(AX.OpenPDU(True, 12, 34, 56, 78,
AX.OID((1, 2, 3, 4), False),
"foo"),
True,
b""))
# Test open, extraData
self.assertEqual(f(b"\x01\x01\x10\x00"
b"\x00\x00\x00\x0C\x00\x00\x00\x22"
b"\x00\x00\x00\x38\x00\x00\x00\x20"
b"\x4E\x00\x00\x00"
b"\x04\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x04"
b"\x00\x00\x00\x03foo\x00" + extraData),
(AX.OpenPDU(True, 12, 34, 56, 78,
AX.OID((1, 2, 3, 4), False),
"foo"),
True,
extraData))
# Test close
self.assertEqual(f(b"\x01\x02\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x04"
b"\x01\x00\x00\x00"),
(AX.ClosePDU(True, 1, 2, 3, AX.RSN_OTHER),
True,
b""))
# Test register
self.assertEqual(f(b"\x01\x03\x11\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x14"
b"\x04\x05\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x03"),
(AX.RegisterPDU(True, 1, 2, 3, 4, 5,
AX.OID((1, 2, 3), False),
0, None, None),
True,
b""))
# Test unregister
self.assertEqual(f(b"\x01\x04\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x14"
b"\x00\x05\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x03"),
(AX.UnregisterPDU(True, 1, 2, 3, 5,
AX.OID((1, 2, 3), False),
0, None, None),
True,
b""))
# Test get
self.assertEqual(f(b"\x01\x05\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x00"),
(AX.GetPDU(True, 1, 2, 3, ()),
True,
b""))
# Test get next
self.assertEqual(f(b"\x01\x06\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x00"),
(AX.GetNextPDU(True, 1, 2, 3, ()),
True,
b""))
# Test get bulk
self.assertEqual(f(b"\x01\x07\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x34"
b"\x00\x01\x00\x05"
b"\x02\x00\x00\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x02\x00\x00\x00"
b"\x00\x00\x00\x03\x00\x00\x00\x04"
b"\x02\x00\x01\x00"
b"\x00\x00\x00\x06\x00\x00\x00\x07"
b"\x02\x00\x00\x00"
b"\x00\x00\x00\x08\x00\x00\x00\x09"),
(AX.GetBulkPDU(True, 1, 2, 3, 1, 5,
(srch((1, 2), (3, 4), False),
srch((6, 7), (8, 9), True))),
True,
b""))
# Test test set
self.assertEqual(f(b"\x01\x08\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x40"
b"\x00\x06\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x03\x00\x00\x00\x00\x00\x00\x04"
b"\x00\x00\x00\x05\x00\x00\x00\x06"
b"\x00\x04\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x04"
b"\x00\x00\x00\x04blah"),
(AX.TestSetPDU(True, 1, 2, 3,
(AX.Varbind(AX.VALUE_OID,
AX.OID((1, 2, 3), False),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR,
AX.OID((1, 2, 4), False),
"blah"))),
True,
b""))
# Test commit set
self.assertEqual(f(b"\x01\x09\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x00"),
(AX.CommitSetPDU(True, 1, 2, 3),
True,
b""))
# Test undo set
self.assertEqual(f(b"\x01\x0A\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x00"),
(AX.UndoSetPDU(True, 1, 2, 3),
True,
b""))
# Test cleanup set
self.assertEqual(f(b"\x01\x0B\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x00"),
(AX.CleanupSetPDU(True, 1, 2, 3),
True,
b""))
# Test notify
self.assertEqual(f(b"\x01\x0C\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x40"
b"\x00\x06\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x03\x00\x00\x00\x00\x00\x00\x04"
b"\x00\x00\x00\x05\x00\x00\x00\x06"
b"\x00\x04\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x04"
b"\x00\x00\x00\x04blah"),
(AX.NotifyPDU(True, 1, 2, 3,
(AX.Varbind(AX.VALUE_OID,
AX.OID((1, 2, 3), False),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR,
AX.OID((1, 2, 4), False),
"blah"))),
True,
b""))
# Test ping
self.assertEqual(f(b"\x01\x0D\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x00"),
(AX.PingPDU(True, 1, 2, 3),
True,
b""))
# Test index alloc
self.assertEqual(f(b"\x01\x0E\x16\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x40"
b"\x00\x06\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x03\x00\x00\x00\x00\x00\x00\x04"
b"\x00\x00\x00\x05\x00\x00\x00\x06"
b"\x00\x04\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x04"
b"\x00\x00\x00\x04blah"),
(AX.IndexAllocPDU(True, 1, 2, 3, True, True,
(AX.Varbind(AX.VALUE_OID,
AX.OID((1, 2, 3), False),
AX.OID((4, 5, 6), False)),
AX.Varbind(AX.VALUE_OCTET_STR,
AX.OID((1, 2, 4), False),
"blah"))),
True,
b""))
# Test index dealloc
self.assertEqual(f(b"\x01\x0F\x16\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x40"
b"\x00\x06\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x03"
b"\x03\x00\x00\x00\x00\x00\x00\x04"
b"\x00\x00\x00\x05\x00\x00\x00\x06"
b"\x00\x04\x00\x00"
b"\x03\x00\x00\x00\x00\x00\x00\x01"
b"\x00\x00\x00\x02\x00\x00\x00\x04"
b"\x00\x00\x00\x04blah"),
(AX.IndexDeallocPDU(True, 1, 2, 3, True, True,
(AX.Varbind(AX.VALUE_OID,
AX.OID((1, 2, 3),
False),
AX.OID((4, 5, 6),
False)),
AX.Varbind(AX.VALUE_OCTET_STR,
AX.OID((1, 2, 4),
False),
"blah"))),
True,
b""))
# Test add agent caps
self.assertEqual(f(b"\x01\x10\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x18"
b"\x03\x00\x00\x00\x00\x00\x00\x04"
b"\x00\x00\x00\x05\x00\x00\x00\x06"
b"\x00\x00\x00\x04blah"),
(AX.AddAgentCapsPDU(True, 1, 2, 3,
AX.OID((4, 5, 6), False),
"blah"),
True,
b""))
# Test rm agent caps
self.assertEqual(f(b"\x01\x11\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x10"
b"\x03\x00\x00\x00\x00\x00\x00\x04"
b"\x00\x00\x00\x05\x00\x00\x00\x06"),
(AX.RMAgentCapsPDU(True, 1, 2, 3,
AX.OID((4, 5, 6), False)),
True,
b""))
# Test response
self.assertEqual(f(b"\x01\x12\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x08"
b"\x00\x00\x00\x04\x00\x05\x00\x06"),
(AX.ResponsePDU(True, 1, 2, 3, 4, 5, 6),
True,
b""))
# Test errors
# Test insufficient data for header
try:
self.assertEqual(f(b""), None)
errored = False
except AX.ParseError as e:
errored = e
self.assertEqual(errored.message, "Data too short for header")
# Test insufficient data for packet
self.assertEqual(f(b"\x01\x11\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x10"
b"\x03\x00\x00\x00\x00\x00\x00\x04"),
(None, False,
b"\x01\x11\x10\x00"
b"\x00\x00\x00\x01\x00\x00\x00\x02"
b"\x00\x00\x00\x03\x00\x00\x00\x10"
b"\x03\x00\x00\x00\x00\x00\x00\x04"))
# Test wrong version
try:
f(b"\x02\x11\x10\x00"
b"\x00\x00\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00\x00\x08"
b"blahblahjabber")
errored = False
except AX.ParseError as e:
errored = e
self.assertEqual(errored.message, "Unknown packet version 2")
self.assertEqual(errored.packetData, b"blahblah")
self.assertEqual(errored.remainingData, b"jabber")
# Test unrecognized packet type
try:
f(b"\x01\xFF\x10\x00"
b"\x00\x00\x00\x00\x00\x00\x00\x00"
b"\x00\x00\x00\x00\x00\x00\x00\x08"
b"blahblah")
errored = False
except AX.ParseError as e:
errored = e
self.assertEqual(errored.message, "PDU type 255 not in defined types")
def test_bits2Bools(self):
bits2bool = AX.bits2Bools
# Test empty
self.assertEqual(bits2bool(""), [])
# Test round bytes
self.assertEqual(bits2bool("\xFA\xCE"),
[True, True, True, True,
True, False, True, False,
True, True, False, False,
True, True, True, False])
# Test partial bytes
self.assertEqual(bits2bool("\xFF\xE1", 12), # The extra bit is to
[True, True, True, True, # confirm crop
True, True, True, True,
True, True, True, False])
def test_bools2bits(self):
bool2bits = AX.bools2Bits
# Test empty
self.assertEqual(bool2bits([]), "")
# Test round bytes
self.assertEqual(bool2bits([True, True, True, True,
True, False, True, False,
True, True, False, False,
True, True, True, False]),
"\xFA\xCE")
# Test partial bytes
self.assertEqual(bool2bits([True, True, True, True,
True, True, True, True,
True, True, True, False]),
"\xFF\xE0")
if __name__ == "__main__":
unittest.main()
| 47.387425 | 81 | 0.468668 | 15,505 | 125,861 | 3.731183 | 0.033409 | 0.247148 | 0.200218 | 0.056627 | 0.871292 | 0.845658 | 0.818537 | 0.793801 | 0.764433 | 0.744745 | 0 | 0.161166 | 0.395547 | 125,861 | 2,655 | 82 | 47.405273 | 0.599274 | 0.053901 | 0 | 0.68125 | 0 | 0.040179 | 0.199107 | 0.144318 | 0 | 0 | 0.001651 | 0 | 0.21875 | 1 | 0.018304 | false | 0 | 0.001339 | 0 | 0.020089 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
540b35e238828c9724f722734c3c6cbf9827a8c2 | 7,979 | py | Python | lib/dataloader.py | PaperCodeSubmission/ICML2020-697 | 00f7732c236b9c6234e76a47dfebe5de314d5c01 | [
"MIT"
] | 12 | 2019-09-26T01:55:25.000Z | 2020-01-21T06:53:04.000Z | lib/dataloader.py | PaperCodeSubmission/ICML2020-697 | 00f7732c236b9c6234e76a47dfebe5de314d5c01 | [
"MIT"
] | 2 | 2021-08-09T03:53:26.000Z | 2021-08-18T10:16:25.000Z | lib/dataloader.py | PaperCodeSubmission/ICML2020-697 | 00f7732c236b9c6234e76a47dfebe5de314d5c01 | [
"MIT"
] | 4 | 2019-09-27T02:18:38.000Z | 2020-01-21T06:53:15.000Z | import torch
from torch.utils.data.sampler import SubsetRandomSampler
from torchvision import transforms, datasets
def mnist_dataset(dataset_base_path, train_flag=True):
transform = transforms.Compose([
transforms.Pad(4, padding_mode='reflect'),
transforms.RandomCrop(32),
transforms.ToTensor()
])
if train_flag:
transform = transforms.Compose([
transforms.RandomHorizontalFlip(),
transform
])
if train_flag:
dataset = datasets.MNIST(root=dataset_base_path, train=True, transform=transform, download=True)
else:
dataset = datasets.MNIST(root=dataset_base_path, train=False, transform=transform, download=True)
return dataset
def svhn_dataset(dataset_base_path, train_flag=True):
transform = transforms.Compose([
transforms.ToTensor()
])
if train_flag:
transform = transforms.Compose([
transforms.Pad(4, padding_mode='reflect'),
transforms.RandomHorizontalFlip(),
transforms.RandomCrop(32),
transform
])
if train_flag:
dataset = datasets.SVHN(root=dataset_base_path, split='train', transform=transform, download=True)
else:
dataset = datasets.SVHN(root=dataset_base_path, split='test', transform=transform, download=True)
return dataset
def cifar100_dataset(dataset_base_path, train_flag=True):
transform = transforms.Compose([
transforms.ToTensor()
])
if train_flag:
transform = transforms.Compose([
transforms.Pad(4, padding_mode='reflect'),
transforms.RandomHorizontalFlip(),
transforms.RandomCrop(32),
transform
])
dataset = datasets.CIFAR100(root=dataset_base_path, train=train_flag,
download=True, transform=transform)
return dataset
def cifar10_dataset(dataset_base_path, train_flag=True):
transform = transforms.Compose([
transforms.ToTensor()
])
if train_flag:
transform = transforms.Compose([
transforms.Pad(4, padding_mode='reflect'),
transforms.RandomHorizontalFlip(),
transforms.RandomCrop(32),
transform
])
dataset = datasets.CIFAR10(root=dataset_base_path, train=train_flag, download=True, transform=transform)
return dataset
def get_cifar10_sl_sampler(labels, valid_num_per_class, num_classes):
"""
:param labels: torch.array(int tensor)
:param valid_num_per_class: the number of validation for each class
:param num_classes: the total number of classes
:return: sampler_l,sampler_u
"""
sampler_valid = []
sampler_train = []
for i in range(num_classes):
loc = torch.nonzero(labels == i)
loc = loc.view(-1)
# do random perm to make sure uniform sample
loc = loc[torch.randperm(loc.size(0))]
sampler_valid.extend(loc[:valid_num_per_class].tolist())
sampler_train.extend(loc[valid_num_per_class:].tolist())
sampler_valid = SubsetRandomSampler(sampler_valid)
sampler_train = SubsetRandomSampler(sampler_train)
return sampler_valid, sampler_train
def get_cifar100_sl_sampler(labels, valid_num_per_class, num_classes=100):
"""
:param labels: torch.array(int tensor)
:param valid_num_per_class: the number of validation for each class
:param num_classes: the total number of classes
:return: sampler_l,sampler_u
"""
sampler_valid = []
sampler_train = []
for i in range(num_classes):
loc = torch.nonzero(labels == i)
loc = loc.view(-1)
# do random perm to make sure uniform sample
loc = loc[torch.randperm(loc.size(0))]
sampler_valid.extend(loc[:valid_num_per_class].tolist())
sampler_train.extend(loc[valid_num_per_class:].tolist())
sampler_valid = SubsetRandomSampler(sampler_valid)
sampler_train = SubsetRandomSampler(sampler_train)
return sampler_valid, sampler_train
def get_ssl_sampler(labels, valid_num_per_class, annotated_num_per_class, num_classes):
"""
:param labels: torch.array(int tensor)
:param valid_num_per_class: the number of validation for each class
:param annotated_num_per_class: the number of annotation we use for each classes
:param num_classes: the total number of classes
:return: sampler_l,sampler_u
"""
sampler_valid = []
sampler_train_l = []
sampler_train_u = []
for i in range(num_classes):
loc = torch.nonzero(labels == i)
loc = loc.view(-1)
# do random perm to make sure uniform sample
loc = loc[torch.randperm(loc.size(0))]
sampler_valid.extend(loc[:valid_num_per_class].tolist())
sampler_train_l.extend(loc[valid_num_per_class:valid_num_per_class + annotated_num_per_class].tolist())
# sampler_train_u.extend(loc[num_valid + annotated_num_per_class:].tolist())
# here the unsampled part also include the train_l part
sampler_train_u.extend(loc[valid_num_per_class:].tolist())
sampler_valid = SubsetRandomSampler(sampler_valid)
sampler_train_l = SubsetRandomSampler(sampler_train_l)
sampler_train_u = SubsetRandomSampler(sampler_train_u)
return sampler_valid, sampler_train_l, sampler_train_u
def get_cifar10_ssl_sampler(labels, valid_num_per_class, annotated_num_per_class, num_classes):
"""
:param labels: torch.array(int tensor)
:param valid_num_per_class: the number of validation for each class
:param annotated_num_per_class: the number of annotation we use for each classes
:param num_classes: the total number of classes
:return: sampler_l,sampler_u
"""
sampler_valid = []
sampler_train_l = []
sampler_train_u = []
for i in range(num_classes):
loc = torch.nonzero(labels == i)
loc = loc.view(-1)
# do random perm to make sure uniform sample
loc = loc[torch.randperm(loc.size(0))]
sampler_valid.extend(loc[:valid_num_per_class].tolist())
sampler_train_l.extend(loc[valid_num_per_class:valid_num_per_class + annotated_num_per_class].tolist())
# sampler_train_u.extend(loc[num_valid + annotated_num_per_class:].tolist())
# here the unsampled part also include the train_l part
sampler_train_u.extend(loc[valid_num_per_class:].tolist())
sampler_valid = SubsetRandomSampler(sampler_valid)
sampler_train_l = SubsetRandomSampler(sampler_train_l)
sampler_train_u = SubsetRandomSampler(sampler_train_u)
return sampler_valid, sampler_train_l, sampler_train_u
def get_cifar100_ssl_sampler(labels, valid_num_per_class, annotated_num_per_class, num_classes=100):
"""
:param labels: torch.array(int tensor)
:param valid_num_per_class: the number of validation for each class
:param annotated_num_per_class: the number of annotation we use for each classes
:param num_classes: the total number of classes
:return: sampler_l,sampler_u
"""
sampler_valid = []
sampler_train_l = []
sampler_train_u = []
for i in range(num_classes):
loc = torch.nonzero(labels == i)
loc = loc.view(-1)
# do random perm to make sure uniform sample
loc = loc[torch.randperm(loc.size(0))]
sampler_valid.extend(loc[:valid_num_per_class].tolist())
sampler_train_l.extend(loc[valid_num_per_class:valid_num_per_class + annotated_num_per_class].tolist())
# sampler_train_u.extend(loc[num_valid + annotated_num_per_class:].tolist())
# here the unsampled part also include the train_l part
sampler_train_u.extend(loc[valid_num_per_class:].tolist())
sampler_valid = SubsetRandomSampler(sampler_valid)
sampler_train_l = SubsetRandomSampler(sampler_train_l)
sampler_train_u = SubsetRandomSampler(sampler_train_u)
return sampler_valid, sampler_train_l, sampler_train_u
if __name__ == "__main__":
from torch.utils.data import DataLoader
| 40.29798 | 111 | 0.703848 | 1,026 | 7,979 | 5.173489 | 0.092593 | 0.097212 | 0.078749 | 0.078372 | 0.949322 | 0.949322 | 0.943482 | 0.913338 | 0.880558 | 0.860776 | 0 | 0.007575 | 0.20579 | 7,979 | 197 | 112 | 40.502538 | 0.830046 | 0.221206 | 0 | 0.842105 | 0 | 0 | 0.007431 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.067669 | false | 0 | 0.030075 | 0 | 0.165414 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
581d2b64d4bae0f205d91d4588c0dbb718f1445b | 120 | py | Python | MUP_TG_BOT/src/utils/link_generator.py | DobroAlex/Simferopol_MUP_Telegram_Bot | a26d510178ed704690c497d599e0fc4d48b0deea | [
"MIT"
] | null | null | null | MUP_TG_BOT/src/utils/link_generator.py | DobroAlex/Simferopol_MUP_Telegram_Bot | a26d510178ed704690c497d599e0fc4d48b0deea | [
"MIT"
] | null | null | null | MUP_TG_BOT/src/utils/link_generator.py | DobroAlex/Simferopol_MUP_Telegram_Bot | a26d510178ed704690c497d599e0fc4d48b0deea | [
"MIT"
] | null | null | null | def generate_page_from_account(account):
return f'http://mup-kgs-simf.ru/index.php?str=nach_dolg&lschet={account}'
| 30 | 77 | 0.766667 | 20 | 120 | 4.4 | 0.9 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.075 | 120 | 3 | 78 | 40 | 0.792793 | 0 | 0 | 0 | 1 | 0.5 | 0.529412 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
583dcd482ac718f03ca58458dc2ee0abc7aafea4 | 4,038 | py | Python | Server/app/docs/admin/question.py | Jaws-bar/Entry3.0-InterviewSystem | 15385f9982c0c4e9aed970263b7ea1e50d6163ca | [
"MIT"
] | null | null | null | Server/app/docs/admin/question.py | Jaws-bar/Entry3.0-InterviewSystem | 15385f9982c0c4e9aed970263b7ea1e50d6163ca | [
"MIT"
] | null | null | null | Server/app/docs/admin/question.py | Jaws-bar/Entry3.0-InterviewSystem | 15385f9982c0c4e9aed970263b7ea1e50d6163ca | [
"MIT"
] | null | null | null | NEW_POST = {
'tags': ['Admin'],
'description': '새 질문 생성',
'parameters': [
{
'name': 'access_token',
'description': '엑세스 토큰, 헤더의 Authentication',
'in': ' header',
'type': 'str',
'required': True
},
{
'name': 'title',
'description': '제목',
'in': 'json',
'type': 'str',
'required': True
},
{
'name': 'body',
'description': '본문',
'in': 'json',
'type': 'str',
'required': True
},
{
'name': 'form',
'description': '평가항목',
'in': 'json',
'type': 'json',
'required': True
}
],
'responses': {
'200': {
'description': '성공'
}
}
}
MAIN_GET = {
'tags': ['Admin'],
'description': '질문 목록 불러오기',
'parameters': [
{
'name': 'access_token',
'description': '엑세스 토큰, 헤더의 Authentication',
'in': ' header',
'type': 'str',
'required': True
}
],
'responses': {
'200': {
'description': '성공',
'examples': [
{
"form": {
"1": "subject name"
},
"title": "question 1",
"question_id": 1,
"body": "body"
}
]
}
}
}
MANAGE_GET = {
'tags': ['Admin'],
'description': '질문 보기',
'parameters': [
{
'name': 'access_token',
'description': '엑세스 토큰, 헤더의 Authentication',
'in': ' header',
'type': 'str',
'required': True
},
{
'name': 'question id',
'description': '질문 번호',
'in': ' path',
'type': 'int',
'required': True
}
],
'responses': {
'200': {
'description': '성공',
'examples': {
"form":
{
"1": "subject name"
},
"title": "question 1",
"question_id": 1,
"body": "body"
}
}
}
}
MANAGE_PATCH = {
'tags': ['Admin'],
'description': '질문 수정',
'parameters': [
{
'name': 'access_token',
'description': '엑세스 토큰, 헤더의 Authentication',
'in': ' header',
'type': 'str',
'required': True
},
{
'name': 'question id',
'description': '질문 번호',
'in': ' path',
'type': 'int',
'required': True
},
{
'name': 'title',
'description': '제목',
'in': 'json',
'type': 'str',
'required': False
},
{
'name': 'body',
'description': '본문',
'in': 'json',
'type': 'str',
'required': False
},
{
'name': 'form',
'description': '평가항목',
'in': 'json',
'type': 'json',
'required': False
}
],
'responses': {
'200': {
'description': '성공'
}
}
}
MANAGE_DELETE = {
'tags': ['Admin'],
'description': '질문 삭제',
'parameters': [
{
'name': 'access_token',
'description': '엑세스 토큰, 헤더의 Authentication',
'in': ' header',
'type': 'str',
'required': True
},
{
'name': 'question id',
'description': '질문 번호',
'in': ' path',
'type': 'int',
'required': True
}
],
'responses': {
'200': {
'description': '성공'
},
'403': {
'description': '질문 검색 실패'
}
}
} | 22.558659 | 56 | 0.325904 | 259 | 4,038 | 5.034749 | 0.208494 | 0.101227 | 0.103528 | 0.101994 | 0.878067 | 0.839724 | 0.817485 | 0.803681 | 0.803681 | 0.670245 | 0 | 0.011982 | 0.503962 | 4,038 | 179 | 57 | 22.558659 | 0.639041 | 0 | 0 | 0.617143 | 0 | 0 | 0.30973 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5442eec480f9fb2afe2f95327cda696d139fc45f | 77,923 | py | Python | com/vmware/nsx/serviceinsertion_client.py | vishal-12/vsphere-automation-sdk-python | 9cf363971db77ea5a12928eecd5cf5170a7fcd8a | [
"MIT"
] | null | null | null | com/vmware/nsx/serviceinsertion_client.py | vishal-12/vsphere-automation-sdk-python | 9cf363971db77ea5a12928eecd5cf5170a7fcd8a | [
"MIT"
] | null | null | null | com/vmware/nsx/serviceinsertion_client.py | vishal-12/vsphere-automation-sdk-python | 9cf363971db77ea5a12928eecd5cf5170a7fcd8a | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#---------------------------------------------------------------------------
# Copyright 2019 VMware, Inc. All rights reserved.
# AUTO GENERATED FILE -- DO NOT MODIFY!
#
# vAPI stub file for package com.vmware.nsx.serviceinsertion.
#---------------------------------------------------------------------------
"""
"""
__author__ = 'VMware, Inc.'
__docformat__ = 'restructuredtext en'
import sys
from vmware.vapi.bindings import type
from vmware.vapi.bindings.converter import TypeConverter
from vmware.vapi.bindings.enum import Enum
from vmware.vapi.bindings.error import VapiError
from vmware.vapi.bindings.struct import VapiStruct
from vmware.vapi.bindings.stub import (
ApiInterfaceStub, StubFactoryBase, VapiInterface)
from vmware.vapi.bindings.common import raise_core_exception
from vmware.vapi.data.validator import (UnionValidator, HasFieldsOfValidator)
from vmware.vapi.exception import CoreException
from vmware.vapi.lib.constants import TaskType
from vmware.vapi.lib.rest import OperationRestMetadata
class Sections(VapiInterface):
"""
"""
CREATE_OPERATION_TOP = "insert_top"
"""
Possible value for ``operation`` of method :func:`Sections.create`.
"""
CREATE_OPERATION_BOTTOM = "insert_bottom"
"""
Possible value for ``operation`` of method :func:`Sections.create`.
"""
CREATE_OPERATION_AFTER = "insert_after"
"""
Possible value for ``operation`` of method :func:`Sections.create`.
"""
CREATE_OPERATION_BEFORE = "insert_before"
"""
Possible value for ``operation`` of method :func:`Sections.create`.
"""
CREATEWITHRULES_OPERATION_TOP = "insert_top"
"""
Possible value for ``operation`` of method :func:`Sections.createwithrules`.
"""
CREATEWITHRULES_OPERATION_BOTTOM = "insert_bottom"
"""
Possible value for ``operation`` of method :func:`Sections.createwithrules`.
"""
CREATEWITHRULES_OPERATION_AFTER = "insert_after"
"""
Possible value for ``operation`` of method :func:`Sections.createwithrules`.
"""
CREATEWITHRULES_OPERATION_BEFORE = "insert_before"
"""
Possible value for ``operation`` of method :func:`Sections.createwithrules`.
"""
LIST_EXCLUDE_APPLIED_TO_TYPE_NSGROUP = "NSGroup"
"""
Possible value for ``excludeAppliedToType`` of method :func:`Sections.list`.
"""
LIST_EXCLUDE_APPLIED_TO_TYPE_LOGICALSWITCH = "LogicalSwitch"
"""
Possible value for ``excludeAppliedToType`` of method :func:`Sections.list`.
"""
LIST_EXCLUDE_APPLIED_TO_TYPE_LOGICALROUTER = "LogicalRouter"
"""
Possible value for ``excludeAppliedToType`` of method :func:`Sections.list`.
"""
LIST_EXCLUDE_APPLIED_TO_TYPE_LOGICALPORT = "LogicalPort"
"""
Possible value for ``excludeAppliedToType`` of method :func:`Sections.list`.
"""
LIST_FILTER_TYPE_FILTER = "FILTER"
"""
Possible value for ``filterType`` of method :func:`Sections.list`.
"""
LIST_FILTER_TYPE_SEARCH = "SEARCH"
"""
Possible value for ``filterType`` of method :func:`Sections.list`.
"""
LIST_INCLUDE_APPLIED_TO_TYPE_NSGROUP = "NSGroup"
"""
Possible value for ``includeAppliedToType`` of method :func:`Sections.list`.
"""
LIST_INCLUDE_APPLIED_TO_TYPE_LOGICALSWITCH = "LogicalSwitch"
"""
Possible value for ``includeAppliedToType`` of method :func:`Sections.list`.
"""
LIST_INCLUDE_APPLIED_TO_TYPE_LOGICALROUTER = "LogicalRouter"
"""
Possible value for ``includeAppliedToType`` of method :func:`Sections.list`.
"""
LIST_INCLUDE_APPLIED_TO_TYPE_LOGICALPORT = "LogicalPort"
"""
Possible value for ``includeAppliedToType`` of method :func:`Sections.list`.
"""
LIST_TYPE_L3REDIRECT = "L3REDIRECT"
"""
Possible value for ``type`` of method :func:`Sections.list`.
"""
REVISE_OPERATION_TOP = "insert_top"
"""
Possible value for ``operation`` of method :func:`Sections.revise`.
"""
REVISE_OPERATION_BOTTOM = "insert_bottom"
"""
Possible value for ``operation`` of method :func:`Sections.revise`.
"""
REVISE_OPERATION_AFTER = "insert_after"
"""
Possible value for ``operation`` of method :func:`Sections.revise`.
"""
REVISE_OPERATION_BEFORE = "insert_before"
"""
Possible value for ``operation`` of method :func:`Sections.revise`.
"""
REVISEWITHRULES_OPERATION_TOP = "insert_top"
"""
Possible value for ``operation`` of method :func:`Sections.revisewithrules`.
"""
REVISEWITHRULES_OPERATION_BOTTOM = "insert_bottom"
"""
Possible value for ``operation`` of method :func:`Sections.revisewithrules`.
"""
REVISEWITHRULES_OPERATION_AFTER = "insert_after"
"""
Possible value for ``operation`` of method :func:`Sections.revisewithrules`.
"""
REVISEWITHRULES_OPERATION_BEFORE = "insert_before"
"""
Possible value for ``operation`` of method :func:`Sections.revisewithrules`.
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.serviceinsertion.sections'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _SectionsStub)
def create(self,
service_insertion_section,
id=None,
operation=None,
):
"""
Creates new empty Service Insertion section in the system.
:type service_insertion_section: :class:`com.vmware.nsx.model_client.ServiceInsertionSection`
:param service_insertion_section: (required)
:type id: :class:`str` or ``None``
:param id: Identifier of the anchor rule or section. This is a required field
in case operation like 'insert_before' and 'insert_after'.
(optional)
:type operation: :class:`str` or ``None``
:param operation: Operation (optional, default to insert_top)
:rtype: :class:`com.vmware.nsx.model_client.ServiceInsertionSection`
:return: com.vmware.nsx.model.ServiceInsertionSection
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'service_insertion_section': service_insertion_section,
'id': id,
'operation': operation,
})
def createwithrules(self,
service_insertion_section_rule_list,
id=None,
operation=None,
):
"""
Creates a new serviceinsertion section with rules. The limit on the
number of rules is defined by maxItems in collection types for
ServiceInsertionRule (ServiceInsertionRuleXXXList types). When invoked
on a section with a large number of rules, this API is supported only
at low rates of invocation (not more than 4-5 times per minute). The
typical latency of this API with about 1024 rules is about 4-5 seconds.
This API should not be invoked with large payloads at automation
speeds. More than 50 rules are not supported. Instead, to create
sections, use: POST /api/v1/serviceinsertion/sections To create rules,
use: POST /api/v1/serviceinsertion/sections/<section-id>/rules
:type service_insertion_section_rule_list: :class:`com.vmware.nsx.model_client.ServiceInsertionSectionRuleList`
:param service_insertion_section_rule_list: (required)
:type id: :class:`str` or ``None``
:param id: Identifier of the anchor rule or section. This is a required field
in case operation like 'insert_before' and 'insert_after'.
(optional)
:type operation: :class:`str` or ``None``
:param operation: Operation (optional, default to insert_top)
:rtype: :class:`com.vmware.nsx.model_client.ServiceInsertionSectionRuleList`
:return: com.vmware.nsx.model.ServiceInsertionSectionRuleList
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('createwithrules',
{
'service_insertion_section_rule_list': service_insertion_section_rule_list,
'id': id,
'operation': operation,
})
def delete(self,
section_id,
cascade=None,
):
"""
Removes serviceinsertion section from the system. ServiceInsertion
section with rules can only be deleted by passing \"cascade=true\"
parameter.
:type section_id: :class:`str`
:param section_id: (required)
:type cascade: :class:`bool` or ``None``
:param cascade: Flag to cascade delete of this object to all it's child objects.
(optional, default to false)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'section_id': section_id,
'cascade': cascade,
})
def get(self,
section_id,
):
"""
Returns information about serviceinsertion section for the identifier.
:type section_id: :class:`str`
:param section_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.ServiceInsertionSection`
:return: com.vmware.nsx.model.ServiceInsertionSection
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'section_id': section_id,
})
def list(self,
applied_tos=None,
cursor=None,
destinations=None,
exclude_applied_to_type=None,
filter_type=None,
include_applied_to_type=None,
included_fields=None,
page_size=None,
services=None,
sort_ascending=None,
sort_by=None,
sources=None,
type=None,
):
"""
List all Service Insertion section in paginated form. A default page
size is limited to 1000 sections. By default, the list of section is
filtered by L3REDIRECT type.
:type applied_tos: :class:`str` or ``None``
:param applied_tos: AppliedTo's referenced by this section or section's Distributed
Service Rules . (optional)
:type cursor: :class:`str` or ``None``
:param cursor: Opaque cursor to be used for getting next page of records (supplied
by current result page) (optional)
:type destinations: :class:`str` or ``None``
:param destinations: Destinations referenced by this section's Distributed Service Rules
. (optional)
:type exclude_applied_to_type: :class:`str` or ``None``
:param exclude_applied_to_type: Resource type valid for use as AppliedTo filter in section API
(optional)
:type filter_type: :class:`str` or ``None``
:param filter_type: Filter type (optional, default to FILTER)
:type include_applied_to_type: :class:`str` or ``None``
:param include_applied_to_type: Resource type valid for use as AppliedTo filter in section API
(optional)
:type included_fields: :class:`str` or ``None``
:param included_fields: Comma separated list of fields that should be included in query
result (optional)
:type page_size: :class:`long` or ``None``
:param page_size: Maximum number of results to return in this page (server may return
fewer) (optional, default to 1000)
:type services: :class:`str` or ``None``
:param services: NSService referenced by this section's Distributed Service Rules .
(optional)
:type sort_ascending: :class:`bool` or ``None``
:param sort_ascending: (optional)
:type sort_by: :class:`str` or ``None``
:param sort_by: Field by which records are sorted (optional)
:type sources: :class:`str` or ``None``
:param sources: Sources referenced by this section's Distributed Service Rules .
(optional)
:type type: :class:`str` or ``None``
:param type: Section Type (optional, default to L3REDIRECT)
:rtype: :class:`com.vmware.nsx.model_client.ServiceInsertionSectionListResult`
:return: com.vmware.nsx.model.ServiceInsertionSectionListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list',
{
'applied_tos': applied_tos,
'cursor': cursor,
'destinations': destinations,
'exclude_applied_to_type': exclude_applied_to_type,
'filter_type': filter_type,
'include_applied_to_type': include_applied_to_type,
'included_fields': included_fields,
'page_size': page_size,
'services': services,
'sort_ascending': sort_ascending,
'sort_by': sort_by,
'sources': sources,
'type': type,
})
def listwithrules(self,
section_id,
):
"""
Returns serviceinsertion section information with rules for a section
identifier. When invoked on a section with a large number of rules,
this API is supported only at low rates of invocation (not more than
4-5 times per minute). The typical latency of this API with about 1024
rules is about 4-5 seconds. This API should not be invoked with large
payloads at automation speeds. More than 50 rules are not supported.
Instead, to read serviceinsertion rules, use: GET
/api/v1/serviceinsertion/sections/<section-id>/rules with the
appropriate page_size.
:type section_id: :class:`str`
:param section_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.ServiceInsertionSectionRuleList`
:return: com.vmware.nsx.model.ServiceInsertionSectionRuleList
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('listwithrules',
{
'section_id': section_id,
})
def revise(self,
section_id,
service_insertion_section,
id=None,
operation=None,
):
"""
Modifies an existing serviceinsertion section along with its relative
position among other serviceinsertion sections in the system.
:type section_id: :class:`str`
:param section_id: (required)
:type service_insertion_section: :class:`com.vmware.nsx.model_client.ServiceInsertionSection`
:param service_insertion_section: (required)
:type id: :class:`str` or ``None``
:param id: Identifier of the anchor rule or section. This is a required field
in case operation like 'insert_before' and 'insert_after'.
(optional)
:type operation: :class:`str` or ``None``
:param operation: Operation (optional, default to insert_top)
:rtype: :class:`com.vmware.nsx.model_client.ServiceInsertionSection`
:return: com.vmware.nsx.model.ServiceInsertionSection
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('revise',
{
'section_id': section_id,
'service_insertion_section': service_insertion_section,
'id': id,
'operation': operation,
})
def revisewithrules(self,
section_id,
service_insertion_section_rule_list,
id=None,
operation=None,
):
"""
Modifies an existing serviceinsertion section along with its relative
position among other serviceinsertion sections with rules. When invoked
on a large number of rules, this API is supported only at low rates of
invocation (not more than 2 times per minute). The typical latency of
this API with about 1024 rules is about 15 seconds in a cluster setup.
This API should not be invoked with large payloads at automation
speeds. Instead, to move a section above or below another section, use:
POST /api/v1/serviceinsertion/sections/<section-id>?action=revise To
modify rules, use: PUT
/api/v1/serviceinsertion/sections/<section-id>/rules/<rule-id>
:type section_id: :class:`str`
:param section_id: (required)
:type service_insertion_section_rule_list: :class:`com.vmware.nsx.model_client.ServiceInsertionSectionRuleList`
:param service_insertion_section_rule_list: (required)
:type id: :class:`str` or ``None``
:param id: Identifier of the anchor rule or section. This is a required field
in case operation like 'insert_before' and 'insert_after'.
(optional)
:type operation: :class:`str` or ``None``
:param operation: Operation (optional, default to insert_top)
:rtype: :class:`com.vmware.nsx.model_client.ServiceInsertionSectionRuleList`
:return: com.vmware.nsx.model.ServiceInsertionSectionRuleList
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('revisewithrules',
{
'section_id': section_id,
'service_insertion_section_rule_list': service_insertion_section_rule_list,
'id': id,
'operation': operation,
})
def update(self,
section_id,
service_insertion_section,
):
"""
Modifies the specified section, but does not modify the section's
associated rules.
:type section_id: :class:`str`
:param section_id: (required)
:type service_insertion_section: :class:`com.vmware.nsx.model_client.ServiceInsertionSection`
:param service_insertion_section: (required)
:rtype: :class:`com.vmware.nsx.model_client.ServiceInsertionSection`
:return: com.vmware.nsx.model.ServiceInsertionSection
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'section_id': section_id,
'service_insertion_section': service_insertion_section,
})
def updatewithrules(self,
section_id,
service_insertion_section_rule_list,
):
"""
Modifies existing serviceinsertion section along with its association
with rules. When invoked on a large number of rules, this API is
supported only at low rates of invocation (not more than 2 times per
minute). The typical latency of this API with about 1024 rules is about
15 seconds in a cluster setup. This API should not be invoked with
large payloads at automation speeds. Instead, to update rule content,
use: PUT /api/v1/serviceinsertion/sections/<section-id>/rules/<rule-id>
:type section_id: :class:`str`
:param section_id: (required)
:type service_insertion_section_rule_list: :class:`com.vmware.nsx.model_client.ServiceInsertionSectionRuleList`
:param service_insertion_section_rule_list: (required)
:rtype: :class:`com.vmware.nsx.model_client.ServiceInsertionSectionRuleList`
:return: com.vmware.nsx.model.ServiceInsertionSectionRuleList
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('updatewithrules',
{
'section_id': section_id,
'service_insertion_section_rule_list': service_insertion_section_rule_list,
})
class ServiceAttachments(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.serviceinsertion.service_attachments'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ServiceAttachmentsStub)
def create(self,
service_attachment,
):
"""
Adds a new Service attachment. A service attachment represents a point
on NSX entity (Example: Logical Router) to which service instance can
be connected through an InstanceEndpoint.
:type service_attachment: :class:`com.vmware.nsx.model_client.ServiceAttachment`
:param service_attachment: (required)
:rtype: :class:`com.vmware.nsx.model_client.ServiceAttachment`
:return: com.vmware.nsx.model.ServiceAttachment
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'service_attachment': service_attachment,
})
def delete(self,
service_attachment_id,
):
"""
Delete existing service attachment from system. Before deletion, please
make sure that, no instance endpoints are connected to this attachment.
In turn no appliance should be connected to this attachment.
:type service_attachment_id: :class:`str`
:param service_attachment_id: (required)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'service_attachment_id': service_attachment_id,
})
def get(self,
service_attachment_id,
):
"""
Returns detailed Attachment information for a given service attachment.
:type service_attachment_id: :class:`str`
:param service_attachment_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.ServiceAttachment`
:return: com.vmware.nsx.model.ServiceAttachment
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'service_attachment_id': service_attachment_id,
})
def list(self):
"""
Returns all Service-Attachement(s) present in the system.
:rtype: :class:`com.vmware.nsx.model_client.ServiceAttachmentListResult`
:return: com.vmware.nsx.model.ServiceAttachmentListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list', None)
class Services(VapiInterface):
"""
"""
_VAPI_SERVICE_ID = 'com.vmware.nsx.serviceinsertion.services'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _ServicesStub)
def create(self,
service_definition,
):
"""
Creates new Service-Insertion Service in the system.
:type service_definition: :class:`com.vmware.nsx.model_client.ServiceDefinition`
:param service_definition: (required)
:rtype: :class:`com.vmware.nsx.model_client.ServiceDefinition`
:return: com.vmware.nsx.model.ServiceDefinition
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('create',
{
'service_definition': service_definition,
})
def delete(self,
service_id,
cascade=None,
):
"""
Removes Service-Insertion Service from the system. A Service with
Service-Instances can only be deleted by passing \"cascade=true\"
parameter.
:type service_id: :class:`str`
:param service_id: (required)
:type cascade: :class:`bool` or ``None``
:param cascade: Flag to cascade delete all the child objects, associated with it.
(optional, default to false)
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('delete',
{
'service_id': service_id,
'cascade': cascade,
})
def get(self,
service_id,
):
"""
Returns information about Service-Insertion Service with the given
identifier.
:type service_id: :class:`str`
:param service_id: (required)
:rtype: :class:`com.vmware.nsx.model_client.ServiceDefinition`
:return: com.vmware.nsx.model.ServiceDefinition
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('get',
{
'service_id': service_id,
})
def list(self):
"""
List all Service-Insertion Service Definitions.
:rtype: :class:`com.vmware.nsx.model_client.ServiceInsertionServiceListResult`
:return: com.vmware.nsx.model.ServiceInsertionServiceListResult
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('list', None)
def update(self,
service_id,
service_definition,
):
"""
Modifies the specified Service.
:type service_id: :class:`str`
:param service_id: (required)
:type service_definition: :class:`com.vmware.nsx.model_client.ServiceDefinition`
:param service_definition: (required)
:rtype: :class:`com.vmware.nsx.model_client.ServiceDefinition`
:return: com.vmware.nsx.model.ServiceDefinition
:raise: :class:`com.vmware.vapi.std.errors_client.ServiceUnavailable`
Service Unavailable
:raise: :class:`com.vmware.vapi.std.errors_client.InvalidRequest`
Bad Request, Precondition Failed
:raise: :class:`com.vmware.vapi.std.errors_client.InternalServerError`
Internal Server Error
:raise: :class:`com.vmware.vapi.std.errors_client.Unauthorized`
Forbidden
:raise: :class:`com.vmware.vapi.std.errors_client.NotFound`
Not Found
"""
return self._invoke('update',
{
'service_id': service_id,
'service_definition': service_definition,
})
class _SectionsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'service_insertion_section': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceInsertionSection'),
'id': type.OptionalType(type.StringType()),
'operation': type.OptionalType(type.StringType()),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/serviceinsertion/sections',
request_body_parameter='service_insertion_section',
path_variables={
},
query_parameters={
'id': 'id',
'operation': 'operation',
},
content_type='application/json'
)
# properties for createwithrules operation
createwithrules_input_type = type.StructType('operation-input', {
'service_insertion_section_rule_list': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceInsertionSectionRuleList'),
'id': type.OptionalType(type.StringType()),
'operation': type.OptionalType(type.StringType()),
})
createwithrules_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
createwithrules_input_value_validator_list = [
HasFieldsOfValidator()
]
createwithrules_output_validator_list = [
HasFieldsOfValidator()
]
createwithrules_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/serviceinsertion/sections?action=create_with_rules',
request_body_parameter='service_insertion_section_rule_list',
path_variables={
},
query_parameters={
'id': 'id',
'operation': 'operation',
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'section_id': type.StringType(),
'cascade': type.OptionalType(type.BooleanType()),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/serviceinsertion/sections/{section-id}',
path_variables={
'section_id': 'section-id',
},
query_parameters={
'cascade': 'cascade',
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'section_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/serviceinsertion/sections/{section-id}',
path_variables={
'section_id': 'section-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {
'applied_tos': type.OptionalType(type.StringType()),
'cursor': type.OptionalType(type.StringType()),
'destinations': type.OptionalType(type.StringType()),
'exclude_applied_to_type': type.OptionalType(type.StringType()),
'filter_type': type.OptionalType(type.StringType()),
'include_applied_to_type': type.OptionalType(type.StringType()),
'included_fields': type.OptionalType(type.StringType()),
'page_size': type.OptionalType(type.IntegerType()),
'services': type.OptionalType(type.StringType()),
'sort_ascending': type.OptionalType(type.BooleanType()),
'sort_by': type.OptionalType(type.StringType()),
'sources': type.OptionalType(type.StringType()),
'type': type.OptionalType(type.StringType()),
})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/serviceinsertion/sections',
path_variables={
},
query_parameters={
'applied_tos': 'applied_tos',
'cursor': 'cursor',
'destinations': 'destinations',
'exclude_applied_to_type': 'exclude_applied_to_type',
'filter_type': 'filter_type',
'include_applied_to_type': 'include_applied_to_type',
'included_fields': 'included_fields',
'page_size': 'page_size',
'services': 'services',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
'sources': 'sources',
'type': 'type',
},
content_type='application/json'
)
# properties for listwithrules operation
listwithrules_input_type = type.StructType('operation-input', {
'section_id': type.StringType(),
})
listwithrules_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
listwithrules_input_value_validator_list = [
]
listwithrules_output_validator_list = [
HasFieldsOfValidator()
]
listwithrules_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/serviceinsertion/sections/{section-id}?action=list_with_rules',
path_variables={
'section_id': 'section-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for revise operation
revise_input_type = type.StructType('operation-input', {
'section_id': type.StringType(),
'service_insertion_section': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceInsertionSection'),
'id': type.OptionalType(type.StringType()),
'operation': type.OptionalType(type.StringType()),
})
revise_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
revise_input_value_validator_list = [
]
revise_output_validator_list = [
]
revise_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/serviceinsertion/sections/{section-id}?action=revise',
request_body_parameter='service_insertion_section',
path_variables={
'section_id': 'section-id',
},
query_parameters={
'id': 'id',
'operation': 'operation',
},
content_type='application/json'
)
# properties for revisewithrules operation
revisewithrules_input_type = type.StructType('operation-input', {
'section_id': type.StringType(),
'service_insertion_section_rule_list': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceInsertionSectionRuleList'),
'id': type.OptionalType(type.StringType()),
'operation': type.OptionalType(type.StringType()),
})
revisewithrules_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
revisewithrules_input_value_validator_list = [
HasFieldsOfValidator()
]
revisewithrules_output_validator_list = [
HasFieldsOfValidator()
]
revisewithrules_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/serviceinsertion/sections/{section-id}?action=revise_with_rules',
request_body_parameter='service_insertion_section_rule_list',
path_variables={
'section_id': 'section-id',
},
query_parameters={
'id': 'id',
'operation': 'operation',
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'section_id': type.StringType(),
'service_insertion_section': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceInsertionSection'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/serviceinsertion/sections/{section-id}',
request_body_parameter='service_insertion_section',
path_variables={
'section_id': 'section-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for updatewithrules operation
updatewithrules_input_type = type.StructType('operation-input', {
'section_id': type.StringType(),
'service_insertion_section_rule_list': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceInsertionSectionRuleList'),
})
updatewithrules_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
updatewithrules_input_value_validator_list = [
HasFieldsOfValidator()
]
updatewithrules_output_validator_list = [
HasFieldsOfValidator()
]
updatewithrules_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/serviceinsertion/sections/{section-id}?action=update_with_rules',
request_body_parameter='service_insertion_section_rule_list',
path_variables={
'section_id': 'section-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceInsertionSection'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'createwithrules': {
'input_type': createwithrules_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceInsertionSectionRuleList'),
'errors': createwithrules_error_dict,
'input_value_validator_list': createwithrules_input_value_validator_list,
'output_validator_list': createwithrules_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceInsertionSection'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceInsertionSectionListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'listwithrules': {
'input_type': listwithrules_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceInsertionSectionRuleList'),
'errors': listwithrules_error_dict,
'input_value_validator_list': listwithrules_input_value_validator_list,
'output_validator_list': listwithrules_output_validator_list,
'task_type': TaskType.NONE,
},
'revise': {
'input_type': revise_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceInsertionSection'),
'errors': revise_error_dict,
'input_value_validator_list': revise_input_value_validator_list,
'output_validator_list': revise_output_validator_list,
'task_type': TaskType.NONE,
},
'revisewithrules': {
'input_type': revisewithrules_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceInsertionSectionRuleList'),
'errors': revisewithrules_error_dict,
'input_value_validator_list': revisewithrules_input_value_validator_list,
'output_validator_list': revisewithrules_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceInsertionSection'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
'updatewithrules': {
'input_type': updatewithrules_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceInsertionSectionRuleList'),
'errors': updatewithrules_error_dict,
'input_value_validator_list': updatewithrules_input_value_validator_list,
'output_validator_list': updatewithrules_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'createwithrules': createwithrules_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'listwithrules': listwithrules_rest_metadata,
'revise': revise_rest_metadata,
'revisewithrules': revisewithrules_rest_metadata,
'update': update_rest_metadata,
'updatewithrules': updatewithrules_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.serviceinsertion.sections',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _ServiceAttachmentsStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'service_attachment': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceAttachment'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/serviceinsertion/service-attachments',
request_body_parameter='service_attachment',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'service_attachment_id': type.StringType(),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/serviceinsertion/service-attachments/{service-attachment-id}',
path_variables={
'service_attachment_id': 'service-attachment-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'service_attachment_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/serviceinsertion/service-attachments/{service-attachment-id}',
path_variables={
'service_attachment_id': 'service-attachment-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/serviceinsertion/service-attachments',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceAttachment'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceAttachment'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceAttachmentListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.serviceinsertion.service_attachments',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class _ServicesStub(ApiInterfaceStub):
def __init__(self, config):
# properties for create operation
create_input_type = type.StructType('operation-input', {
'service_definition': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceDefinition'),
})
create_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
create_input_value_validator_list = [
]
create_output_validator_list = [
]
create_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/api/v1/serviceinsertion/services',
request_body_parameter='service_definition',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for delete operation
delete_input_type = type.StructType('operation-input', {
'service_id': type.StringType(),
'cascade': type.OptionalType(type.BooleanType()),
})
delete_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
delete_input_value_validator_list = [
]
delete_output_validator_list = [
]
delete_rest_metadata = OperationRestMetadata(
http_method='DELETE',
url_template='/api/v1/serviceinsertion/services/{service-id}',
path_variables={
'service_id': 'service-id',
},
query_parameters={
'cascade': 'cascade',
},
content_type='application/json'
)
# properties for get operation
get_input_type = type.StructType('operation-input', {
'service_id': type.StringType(),
})
get_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/serviceinsertion/services/{service-id}',
path_variables={
'service_id': 'service-id',
},
query_parameters={
},
content_type='application/json'
)
# properties for list operation
list_input_type = type.StructType('operation-input', {})
list_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/api/v1/serviceinsertion/services',
path_variables={
},
query_parameters={
},
content_type='application/json'
)
# properties for update operation
update_input_type = type.StructType('operation-input', {
'service_id': type.StringType(),
'service_definition': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceDefinition'),
})
update_error_dict = {
'com.vmware.vapi.std.errors.service_unavailable':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'ServiceUnavailable'),
'com.vmware.vapi.std.errors.invalid_request':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InvalidRequest'),
'com.vmware.vapi.std.errors.internal_server_error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'InternalServerError'),
'com.vmware.vapi.std.errors.unauthorized':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Unauthorized'),
'com.vmware.vapi.std.errors.not_found':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'NotFound'),
}
update_input_value_validator_list = [
]
update_output_validator_list = [
]
update_rest_metadata = OperationRestMetadata(
http_method='PUT',
url_template='/api/v1/serviceinsertion/services/{service-id}',
request_body_parameter='service_definition',
path_variables={
'service_id': 'service-id',
},
query_parameters={
},
content_type='application/json'
)
operations = {
'create': {
'input_type': create_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceDefinition'),
'errors': create_error_dict,
'input_value_validator_list': create_input_value_validator_list,
'output_validator_list': create_output_validator_list,
'task_type': TaskType.NONE,
},
'delete': {
'input_type': delete_input_type,
'output_type': type.VoidType(),
'errors': delete_error_dict,
'input_value_validator_list': delete_input_value_validator_list,
'output_validator_list': delete_output_validator_list,
'task_type': TaskType.NONE,
},
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceDefinition'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
'list': {
'input_type': list_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceInsertionServiceListResult'),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'update': {
'input_type': update_input_type,
'output_type': type.ReferenceType('com.vmware.nsx.model_client', 'ServiceDefinition'),
'errors': update_error_dict,
'input_value_validator_list': update_input_value_validator_list,
'output_validator_list': update_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'create': create_rest_metadata,
'delete': delete_rest_metadata,
'get': get_rest_metadata,
'list': list_rest_metadata,
'update': update_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.nsx.serviceinsertion.services',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=False)
class StubFactory(StubFactoryBase):
_attrs = {
'Sections': Sections,
'ServiceAttachments': ServiceAttachments,
'Services': Services,
'sections': 'com.vmware.nsx.serviceinsertion.sections_client.StubFactory',
'services': 'com.vmware.nsx.serviceinsertion.services_client.StubFactory',
}
| 44.731917 | 136 | 0.609063 | 7,544 | 77,923 | 6.070122 | 0.045997 | 0.070753 | 0.080908 | 0.099579 | 0.878715 | 0.861442 | 0.843382 | 0.824274 | 0.809054 | 0.804795 | 0 | 0.001365 | 0.285307 | 77,923 | 1,741 | 137 | 44.757611 | 0.820905 | 0.273462 | 0 | 0.662524 | 1 | 0 | 0.322088 | 0.226414 | 0 | 0 | 0 | 0 | 0 | 1 | 0.023901 | false | 0 | 0.011472 | 0 | 0.089866 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
545618d5bbc1a18a13f8ebab1d16c4eb8fcd86fe | 262 | py | Python | src/__init__.py | damiancs/CodNumericPersonal | 5c415ebce9e3bcfb36821dba7395a5bd24b5a2cb | [
"MIT"
] | null | null | null | src/__init__.py | damiancs/CodNumericPersonal | 5c415ebce9e3bcfb36821dba7395a5bd24b5a2cb | [
"MIT"
] | null | null | null | src/__init__.py | damiancs/CodNumericPersonal | 5c415ebce9e3bcfb36821dba7395a5bd24b5a2cb | [
"MIT"
] | null | null | null | # coding=utf-8
"""
Initializing file for CodNumericPersonal module.
"""
from CodNumericPersonal.cnp import CNP
from CodNumericPersonal.errors import CNPError, SexError, DateError, CountyError
__all__ = ["CNP", "CNPError", "SexError", "DateError", "CountyError"] | 32.75 | 80 | 0.774809 | 27 | 262 | 7.37037 | 0.62963 | 0.221106 | 0.251256 | 0.361809 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004255 | 0.103053 | 262 | 8 | 81 | 32.75 | 0.842553 | 0.236641 | 0 | 0 | 0 | 0 | 0.202073 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
54582c3e6820e85e9ee8e9173af7e252e8086374 | 5,874 | py | Python | test-framework/test-suites/integration/tests/ansible/test_stacki_storage_controller_info.py | sammeidinger/stack | a8085dce179dbe903f65f136f4b63bcc076cc057 | [
"BSD-3-Clause"
] | 123 | 2015-05-12T23:36:45.000Z | 2017-07-05T23:26:57.000Z | test-framework/test-suites/integration/tests/ansible/test_stacki_storage_controller_info.py | sammeidinger/stack | a8085dce179dbe903f65f136f4b63bcc076cc057 | [
"BSD-3-Clause"
] | 177 | 2015-06-05T19:17:47.000Z | 2017-07-07T17:57:24.000Z | test-framework/test-suites/integration/tests/ansible/test_stacki_storage_controller_info.py | sammeidinger/stack | a8085dce179dbe903f65f136f4b63bcc076cc057 | [
"BSD-3-Clause"
] | 32 | 2015-06-07T02:25:03.000Z | 2017-06-23T07:35:35.000Z | class TestStackiStorageControllerInfo:
def test_global_scope_no_name(self, run_ansible_module):
result = run_ansible_module("stacki_storage_controller_info")
assert result.status == "SUCCESS"
assert result.data["changed"] == False
assert result.data["controllers"] == [{
"adapter": None,
"arrayid": "*",
"enclosure": None,
"options": "",
"raidlevel": "0",
"slot": "*"
}]
def test_global_scope_with_name(self, run_ansible_module):
result = run_ansible_module("stacki_storage_controller_info", name="foo")
assert result.status == "FAILED!"
assert result.data["changed"] == False
assert "error" in result.data["msg"]
assert "Arguments are not allowed" in result.data["msg"]
def test_appliance_scope_no_name(self, host, run_ansible_module):
result = host.run(
'stack add appliance storage controller backend raidlevel=0 enclosure=1 '
'adapter=2 slot=3 arrayid=4'
)
assert result.rc == 0
result = run_ansible_module("stacki_storage_controller_info", scope="appliance")
assert result.status == "SUCCESS"
assert result.data["changed"] == False
assert result.data["controllers"] == [{
"adapter": 2,
"appliance": "backend",
"arrayid": "4",
"enclosure": 1,
"options": "",
"raidlevel": "0",
"slot": "3"
}]
def test_appliance_scope_with_name(self, host, run_ansible_module):
result = host.run(
'stack add appliance storage controller backend raidlevel=0 enclosure=1 '
'adapter=2 slot=3 arrayid=4'
)
assert result.rc == 0
result = run_ansible_module("stacki_storage_controller_info", scope="appliance", name="backend")
assert result.status == "SUCCESS"
assert result.data["changed"] == False
assert result.data["controllers"] == [{
"adapter": 2,
"appliance": "backend",
"arrayid": "4",
"enclosure": 1,
"options": "",
"raidlevel": "0",
"slot": "3"
}]
def test_os_scope_no_name(self, host, run_ansible_module):
result = host.run(
'stack add os storage controller sles raidlevel=0 enclosure=1 '
'adapter=2 slot=3 arrayid=4'
)
assert result.rc == 0
result = run_ansible_module("stacki_storage_controller_info", scope="os")
assert result.status == "SUCCESS"
assert result.data["changed"] == False
assert result.data["controllers"] == [{
"adapter": 2,
"arrayid": "4",
"enclosure": 1,
"options": "",
"os": "sles",
"raidlevel": "0",
"slot": "3"
}]
def test_os_scope_with_name(self, host, run_ansible_module):
result = host.run(
'stack add os storage controller sles raidlevel=0 enclosure=1 '
'adapter=2 slot=3 arrayid=4'
)
assert result.rc == 0
result = run_ansible_module("stacki_storage_controller_info", scope="os", name="sles")
assert result.status == "SUCCESS"
assert result.data["changed"] == False
assert result.data["controllers"] == [{
"adapter": 2,
"arrayid": "4",
"enclosure": 1,
"options": "",
"os": "sles",
"raidlevel": "0",
"slot": "3"
}]
def test_environment_scope_no_name(self, host, add_environment, run_ansible_module):
result = host.run(
'stack add environment storage controller test raidlevel=0 enclosure=1 '
'adapter=2 slot=3 arrayid=4'
)
assert result.rc == 0
result = run_ansible_module("stacki_storage_controller_info", scope="environment")
assert result.status == "SUCCESS"
assert result.data["changed"] == False
assert result.data["controllers"] == [{
"adapter": 2,
"arrayid": "4",
"enclosure": 1,
"environment": "test",
"options": "",
"raidlevel": "0",
"slot": "3"
}]
def test_environment_scope_with_name(self, host, add_environment, run_ansible_module):
result = host.run(
'stack add environment storage controller test raidlevel=0 enclosure=1 '
'adapter=2 slot=3 arrayid=4'
)
assert result.rc == 0
result = run_ansible_module("stacki_storage_controller_info", scope="environment", name="test")
assert result.status == "SUCCESS"
assert result.data["changed"] == False
assert result.data["controllers"] == [{
"adapter": 2,
"arrayid": "4",
"enclosure": 1,
"environment": "test",
"options": "",
"raidlevel": "0",
"slot": "3"
}]
def test_host_scope_no_name(self, host, add_host, run_ansible_module):
result = host.run(
'stack add host storage controller backend-0-0 raidlevel=0 enclosure=1 '
'adapter=2 slot=3 arrayid=4'
)
assert result.rc == 0
result = run_ansible_module("stacki_storage_controller_info", scope="host")
assert result.status == "SUCCESS"
assert result.data["changed"] == False
assert result.data["controllers"] == [
{
"adapter": 2,
"arrayid": "4",
"enclosure": 1,
"host": "backend-0-0",
"options": "",
"raidlevel": "0",
"slot": "3",
"source": "H"
},
{
"adapter": None,
"arrayid": "*",
"enclosure": None,
"host": "frontend-0-0",
"options": "",
"raidlevel": "0",
"slot": "*",
"source": "G"
}
]
def test_host_scope_with_name(self, host, add_host, run_ansible_module):
result = host.run(
'stack add host storage controller backend-0-0 raidlevel=0 enclosure=1 '
'adapter=2 slot=3 arrayid=4'
)
assert result.rc == 0
result = run_ansible_module("stacki_storage_controller_info", scope="host", name="backend-0-0")
assert result.status == "SUCCESS"
assert result.data["changed"] == False
assert result.data["controllers"] == [{
"adapter": 2,
"arrayid": "4",
"enclosure": 1,
"host": "backend-0-0",
"options": "",
"raidlevel": "0",
"slot": "3",
"source": "H"
}]
def test_bad_name(self, run_ansible_module):
result = run_ansible_module("stacki_storage_controller_info", scope="appliance", name="foo")
assert result.status == "FAILED!"
assert result.data["changed"] == False
assert "error" in result.data["msg"]
assert "not a valid appliance" in result.data["msg"]
| 26.223214 | 98 | 0.655941 | 735 | 5,874 | 5.07483 | 0.084354 | 0.125469 | 0.09437 | 0.064879 | 0.926542 | 0.911796 | 0.899732 | 0.899732 | 0.89008 | 0.89008 | 0 | 0.021184 | 0.180286 | 5,874 | 223 | 99 | 26.340807 | 0.753479 | 0 | 0 | 0.765027 | 0 | 0 | 0.356997 | 0.05618 | 0 | 0 | 0 | 0 | 0.234973 | 1 | 0.060109 | false | 0 | 0 | 0 | 0.065574 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
49b7273eacaee86646ddd73abe2ec20d573d8fc0 | 89,824 | py | Python | sdk/python/pulumi_aws_native/redshift/cluster.py | AaronFriel/pulumi-aws-native | 5621690373ac44accdbd20b11bae3be1baf022d1 | [
"Apache-2.0"
] | 29 | 2021-09-30T19:32:07.000Z | 2022-03-22T21:06:08.000Z | sdk/python/pulumi_aws_native/redshift/cluster.py | AaronFriel/pulumi-aws-native | 5621690373ac44accdbd20b11bae3be1baf022d1 | [
"Apache-2.0"
] | 232 | 2021-09-30T19:26:26.000Z | 2022-03-31T23:22:06.000Z | sdk/python/pulumi_aws_native/redshift/cluster.py | AaronFriel/pulumi-aws-native | 5621690373ac44accdbd20b11bae3be1baf022d1 | [
"Apache-2.0"
] | 4 | 2021-11-10T19:42:01.000Z | 2022-02-05T10:15:49.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ClusterArgs', 'Cluster']
@pulumi.input_type
class ClusterArgs:
def __init__(__self__, *,
cluster_type: pulumi.Input[str],
d_b_name: pulumi.Input[str],
master_user_password: pulumi.Input[str],
master_username: pulumi.Input[str],
node_type: pulumi.Input[str],
allow_version_upgrade: Optional[pulumi.Input[bool]] = None,
aqua_configuration_status: Optional[pulumi.Input[str]] = None,
automated_snapshot_retention_period: Optional[pulumi.Input[int]] = None,
availability_zone: Optional[pulumi.Input[str]] = None,
availability_zone_relocation: Optional[pulumi.Input[bool]] = None,
availability_zone_relocation_status: Optional[pulumi.Input[str]] = None,
classic: Optional[pulumi.Input[bool]] = None,
cluster_identifier: Optional[pulumi.Input[str]] = None,
cluster_parameter_group_name: Optional[pulumi.Input[str]] = None,
cluster_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
cluster_subnet_group_name: Optional[pulumi.Input[str]] = None,
cluster_version: Optional[pulumi.Input[str]] = None,
defer_maintenance: Optional[pulumi.Input[bool]] = None,
defer_maintenance_duration: Optional[pulumi.Input[int]] = None,
defer_maintenance_end_time: Optional[pulumi.Input[str]] = None,
defer_maintenance_start_time: Optional[pulumi.Input[str]] = None,
destination_region: Optional[pulumi.Input[str]] = None,
elastic_ip: Optional[pulumi.Input[str]] = None,
encrypted: Optional[pulumi.Input[bool]] = None,
endpoint: Optional[pulumi.Input['ClusterEndpointArgs']] = None,
enhanced_vpc_routing: Optional[pulumi.Input[bool]] = None,
hsm_client_certificate_identifier: Optional[pulumi.Input[str]] = None,
hsm_configuration_identifier: Optional[pulumi.Input[str]] = None,
iam_roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
kms_key_id: Optional[pulumi.Input[str]] = None,
logging_properties: Optional[pulumi.Input['ClusterLoggingPropertiesArgs']] = None,
maintenance_track_name: Optional[pulumi.Input[str]] = None,
manual_snapshot_retention_period: Optional[pulumi.Input[int]] = None,
number_of_nodes: Optional[pulumi.Input[int]] = None,
owner_account: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None,
preferred_maintenance_window: Optional[pulumi.Input[str]] = None,
publicly_accessible: Optional[pulumi.Input[bool]] = None,
resource_action: Optional[pulumi.Input[str]] = None,
revision_target: Optional[pulumi.Input[str]] = None,
rotate_encryption_key: Optional[pulumi.Input[bool]] = None,
snapshot_cluster_identifier: Optional[pulumi.Input[str]] = None,
snapshot_copy_grant_name: Optional[pulumi.Input[str]] = None,
snapshot_copy_manual: Optional[pulumi.Input[bool]] = None,
snapshot_copy_retention_period: Optional[pulumi.Input[int]] = None,
snapshot_identifier: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterTagArgs']]]] = None,
vpc_security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a Cluster resource.
:param pulumi.Input[str] cluster_type: The type of the cluster. When cluster type is specified as single-node, the NumberOfNodes parameter is not required and if multi-node, the NumberOfNodes parameter is required
:param pulumi.Input[str] d_b_name: The name of the first database to be created when the cluster is created. To create additional databases after the cluster is created, connect to the cluster with a SQL client and use SQL commands to create a database.
:param pulumi.Input[str] master_user_password: The password associated with the master user account for the cluster that is being created. Password must be between 8 and 64 characters in length, should have at least one uppercase letter.Must contain at least one lowercase letter.Must contain one number.Can be any printable ASCII character.
:param pulumi.Input[str] master_username: The user name associated with the master user account for the cluster that is being created. The user name can't be PUBLIC and first character must be a letter.
:param pulumi.Input[str] node_type: The node type to be provisioned for the cluster.Valid Values: ds2.xlarge | ds2.8xlarge | dc1.large | dc1.8xlarge | dc2.large | dc2.8xlarge | ra3.4xlarge | ra3.16xlarge
:param pulumi.Input[bool] allow_version_upgrade: Major version upgrades can be applied during the maintenance window to the Amazon Redshift engine that is running on the cluster. Default value is True
:param pulumi.Input[str] aqua_configuration_status: The value represents how the cluster is configured to use AQUA (Advanced Query Accelerator) after the cluster is restored. Possible values include the following.
enabled - Use AQUA if it is available for the current Region and Amazon Redshift node type.
disabled - Don't use AQUA.
auto - Amazon Redshift determines whether to use AQUA.
:param pulumi.Input[int] automated_snapshot_retention_period: The number of days that automated snapshots are retained. If the value is 0, automated snapshots are disabled. Default value is 1
:param pulumi.Input[str] availability_zone: The EC2 Availability Zone (AZ) in which you want Amazon Redshift to provision the cluster. Default: A random, system-chosen Availability Zone in the region that is specified by the endpoint
:param pulumi.Input[bool] availability_zone_relocation: The option to enable relocation for an Amazon Redshift cluster between Availability Zones after the cluster modification is complete.
:param pulumi.Input[str] availability_zone_relocation_status: The availability zone relocation status of the cluster
:param pulumi.Input[bool] classic: A boolean value indicating whether the resize operation is using the classic resize process. If you don't provide this parameter or set the value to false , the resize type is elastic.
:param pulumi.Input[str] cluster_identifier: A unique identifier for the cluster. You use this identifier to refer to the cluster for any subsequent cluster operations such as deleting or modifying. All alphabetical characters must be lower case, no hypens at the end, no two consecutive hyphens. Cluster name should be unique for all clusters within an AWS account
:param pulumi.Input[str] cluster_parameter_group_name: The name of the parameter group to be associated with this cluster.
:param pulumi.Input[Sequence[pulumi.Input[str]]] cluster_security_groups: A list of security groups to be associated with this cluster.
:param pulumi.Input[str] cluster_subnet_group_name: The name of a cluster subnet group to be associated with this cluster.
:param pulumi.Input[str] cluster_version: The version of the Amazon Redshift engine software that you want to deploy on the cluster.The version selected runs on all the nodes in the cluster.
:param pulumi.Input[bool] defer_maintenance: A boolean indicating whether to enable the deferred maintenance window.
:param pulumi.Input[int] defer_maintenance_duration: An integer indicating the duration of the maintenance window in days. If you specify a duration, you can't specify an end time. The duration must be 45 days or less.
:param pulumi.Input[str] defer_maintenance_end_time: A timestamp indicating end time for the deferred maintenance window. If you specify an end time, you can't specify a duration.
:param pulumi.Input[str] defer_maintenance_start_time: A timestamp indicating the start time for the deferred maintenance window.
:param pulumi.Input[str] destination_region: The destination AWS Region that you want to copy snapshots to. Constraints: Must be the name of a valid AWS Region. For more information, see Regions and Endpoints in the Amazon Web Services [https://docs.aws.amazon.com/general/latest/gr/rande.html#redshift_region] General Reference
:param pulumi.Input[str] elastic_ip: The Elastic IP (EIP) address for the cluster.
:param pulumi.Input[bool] encrypted: If true, the data in the cluster is encrypted at rest.
:param pulumi.Input[bool] enhanced_vpc_routing: An option that specifies whether to create the cluster with enhanced VPC routing enabled. To create a cluster that uses enhanced VPC routing, the cluster must be in a VPC. For more information, see Enhanced VPC Routing in the Amazon Redshift Cluster Management Guide.
If this option is true , enhanced VPC routing is enabled.
Default: false
:param pulumi.Input[str] hsm_client_certificate_identifier: Specifies the name of the HSM client certificate the Amazon Redshift cluster uses to retrieve the data encryption keys stored in an HSM
:param pulumi.Input[str] hsm_configuration_identifier: Specifies the name of the HSM configuration that contains the information the Amazon Redshift cluster can use to retrieve and store keys in an HSM.
:param pulumi.Input[Sequence[pulumi.Input[str]]] iam_roles: A list of AWS Identity and Access Management (IAM) roles that can be used by the cluster to access other AWS services. You must supply the IAM roles in their Amazon Resource Name (ARN) format. You can supply up to 10 IAM roles in a single request
:param pulumi.Input[str] kms_key_id: The AWS Key Management Service (KMS) key ID of the encryption key that you want to use to encrypt data in the cluster.
:param pulumi.Input[str] maintenance_track_name: The name for the maintenance track that you want to assign for the cluster. This name change is asynchronous. The new track name stays in the PendingModifiedValues for the cluster until the next maintenance window. When the maintenance track changes, the cluster is switched to the latest cluster release available for the maintenance track. At this point, the maintenance track name is applied.
:param pulumi.Input[int] manual_snapshot_retention_period: The number of days to retain newly copied snapshots in the destination AWS Region after they are copied from the source AWS Region. If the value is -1, the manual snapshot is retained indefinitely.
The value must be either -1 or an integer between 1 and 3,653.
:param pulumi.Input[int] number_of_nodes: The number of compute nodes in the cluster. This parameter is required when the ClusterType parameter is specified as multi-node.
:param pulumi.Input[int] port: The port number on which the cluster accepts incoming connections. The cluster is accessible only via the JDBC and ODBC connection strings
:param pulumi.Input[str] preferred_maintenance_window: The weekly time range (in UTC) during which automated cluster maintenance can occur.
:param pulumi.Input[bool] publicly_accessible: If true, the cluster can be accessed from a public network.
:param pulumi.Input[str] resource_action: The Redshift operation to be performed. Resource Action supports pause-cluster, resume-cluster APIs
:param pulumi.Input[str] revision_target: The identifier of the database revision. You can retrieve this value from the response to the DescribeClusterDbRevisions request.
:param pulumi.Input[bool] rotate_encryption_key: A boolean indicating if we want to rotate Encryption Keys.
:param pulumi.Input[str] snapshot_cluster_identifier: The name of the cluster the source snapshot was created from. This parameter is required if your IAM user has a policy containing a snapshot resource element that specifies anything other than * for the cluster name.
:param pulumi.Input[str] snapshot_copy_grant_name: The name of the snapshot copy grant to use when snapshots of an AWS KMS-encrypted cluster are copied to the destination region.
:param pulumi.Input[bool] snapshot_copy_manual: Indicates whether to apply the snapshot retention period to newly copied manual snapshots instead of automated snapshots.
:param pulumi.Input[int] snapshot_copy_retention_period: The number of days to retain automated snapshots in the destination region after they are copied from the source region.
Default is 7.
Constraints: Must be at least 1 and no more than 35.
:param pulumi.Input[str] snapshot_identifier: The name of the snapshot from which to create the new cluster. This parameter isn't case sensitive.
:param pulumi.Input[Sequence[pulumi.Input['ClusterTagArgs']]] tags: The list of tags for the cluster parameter group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] vpc_security_group_ids: A list of Virtual Private Cloud (VPC) security groups to be associated with the cluster.
"""
pulumi.set(__self__, "cluster_type", cluster_type)
pulumi.set(__self__, "d_b_name", d_b_name)
pulumi.set(__self__, "master_user_password", master_user_password)
pulumi.set(__self__, "master_username", master_username)
pulumi.set(__self__, "node_type", node_type)
if allow_version_upgrade is not None:
pulumi.set(__self__, "allow_version_upgrade", allow_version_upgrade)
if aqua_configuration_status is not None:
pulumi.set(__self__, "aqua_configuration_status", aqua_configuration_status)
if automated_snapshot_retention_period is not None:
pulumi.set(__self__, "automated_snapshot_retention_period", automated_snapshot_retention_period)
if availability_zone is not None:
pulumi.set(__self__, "availability_zone", availability_zone)
if availability_zone_relocation is not None:
pulumi.set(__self__, "availability_zone_relocation", availability_zone_relocation)
if availability_zone_relocation_status is not None:
pulumi.set(__self__, "availability_zone_relocation_status", availability_zone_relocation_status)
if classic is not None:
pulumi.set(__self__, "classic", classic)
if cluster_identifier is not None:
pulumi.set(__self__, "cluster_identifier", cluster_identifier)
if cluster_parameter_group_name is not None:
pulumi.set(__self__, "cluster_parameter_group_name", cluster_parameter_group_name)
if cluster_security_groups is not None:
pulumi.set(__self__, "cluster_security_groups", cluster_security_groups)
if cluster_subnet_group_name is not None:
pulumi.set(__self__, "cluster_subnet_group_name", cluster_subnet_group_name)
if cluster_version is not None:
pulumi.set(__self__, "cluster_version", cluster_version)
if defer_maintenance is not None:
pulumi.set(__self__, "defer_maintenance", defer_maintenance)
if defer_maintenance_duration is not None:
pulumi.set(__self__, "defer_maintenance_duration", defer_maintenance_duration)
if defer_maintenance_end_time is not None:
pulumi.set(__self__, "defer_maintenance_end_time", defer_maintenance_end_time)
if defer_maintenance_start_time is not None:
pulumi.set(__self__, "defer_maintenance_start_time", defer_maintenance_start_time)
if destination_region is not None:
pulumi.set(__self__, "destination_region", destination_region)
if elastic_ip is not None:
pulumi.set(__self__, "elastic_ip", elastic_ip)
if encrypted is not None:
pulumi.set(__self__, "encrypted", encrypted)
if endpoint is not None:
pulumi.set(__self__, "endpoint", endpoint)
if enhanced_vpc_routing is not None:
pulumi.set(__self__, "enhanced_vpc_routing", enhanced_vpc_routing)
if hsm_client_certificate_identifier is not None:
pulumi.set(__self__, "hsm_client_certificate_identifier", hsm_client_certificate_identifier)
if hsm_configuration_identifier is not None:
pulumi.set(__self__, "hsm_configuration_identifier", hsm_configuration_identifier)
if iam_roles is not None:
pulumi.set(__self__, "iam_roles", iam_roles)
if kms_key_id is not None:
pulumi.set(__self__, "kms_key_id", kms_key_id)
if logging_properties is not None:
pulumi.set(__self__, "logging_properties", logging_properties)
if maintenance_track_name is not None:
pulumi.set(__self__, "maintenance_track_name", maintenance_track_name)
if manual_snapshot_retention_period is not None:
pulumi.set(__self__, "manual_snapshot_retention_period", manual_snapshot_retention_period)
if number_of_nodes is not None:
pulumi.set(__self__, "number_of_nodes", number_of_nodes)
if owner_account is not None:
pulumi.set(__self__, "owner_account", owner_account)
if port is not None:
pulumi.set(__self__, "port", port)
if preferred_maintenance_window is not None:
pulumi.set(__self__, "preferred_maintenance_window", preferred_maintenance_window)
if publicly_accessible is not None:
pulumi.set(__self__, "publicly_accessible", publicly_accessible)
if resource_action is not None:
pulumi.set(__self__, "resource_action", resource_action)
if revision_target is not None:
pulumi.set(__self__, "revision_target", revision_target)
if rotate_encryption_key is not None:
pulumi.set(__self__, "rotate_encryption_key", rotate_encryption_key)
if snapshot_cluster_identifier is not None:
pulumi.set(__self__, "snapshot_cluster_identifier", snapshot_cluster_identifier)
if snapshot_copy_grant_name is not None:
pulumi.set(__self__, "snapshot_copy_grant_name", snapshot_copy_grant_name)
if snapshot_copy_manual is not None:
pulumi.set(__self__, "snapshot_copy_manual", snapshot_copy_manual)
if snapshot_copy_retention_period is not None:
pulumi.set(__self__, "snapshot_copy_retention_period", snapshot_copy_retention_period)
if snapshot_identifier is not None:
pulumi.set(__self__, "snapshot_identifier", snapshot_identifier)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if vpc_security_group_ids is not None:
pulumi.set(__self__, "vpc_security_group_ids", vpc_security_group_ids)
@property
@pulumi.getter(name="clusterType")
def cluster_type(self) -> pulumi.Input[str]:
"""
The type of the cluster. When cluster type is specified as single-node, the NumberOfNodes parameter is not required and if multi-node, the NumberOfNodes parameter is required
"""
return pulumi.get(self, "cluster_type")
@cluster_type.setter
def cluster_type(self, value: pulumi.Input[str]):
pulumi.set(self, "cluster_type", value)
@property
@pulumi.getter(name="dBName")
def d_b_name(self) -> pulumi.Input[str]:
"""
The name of the first database to be created when the cluster is created. To create additional databases after the cluster is created, connect to the cluster with a SQL client and use SQL commands to create a database.
"""
return pulumi.get(self, "d_b_name")
@d_b_name.setter
def d_b_name(self, value: pulumi.Input[str]):
pulumi.set(self, "d_b_name", value)
@property
@pulumi.getter(name="masterUserPassword")
def master_user_password(self) -> pulumi.Input[str]:
"""
The password associated with the master user account for the cluster that is being created. Password must be between 8 and 64 characters in length, should have at least one uppercase letter.Must contain at least one lowercase letter.Must contain one number.Can be any printable ASCII character.
"""
return pulumi.get(self, "master_user_password")
@master_user_password.setter
def master_user_password(self, value: pulumi.Input[str]):
pulumi.set(self, "master_user_password", value)
@property
@pulumi.getter(name="masterUsername")
def master_username(self) -> pulumi.Input[str]:
"""
The user name associated with the master user account for the cluster that is being created. The user name can't be PUBLIC and first character must be a letter.
"""
return pulumi.get(self, "master_username")
@master_username.setter
def master_username(self, value: pulumi.Input[str]):
pulumi.set(self, "master_username", value)
@property
@pulumi.getter(name="nodeType")
def node_type(self) -> pulumi.Input[str]:
"""
The node type to be provisioned for the cluster.Valid Values: ds2.xlarge | ds2.8xlarge | dc1.large | dc1.8xlarge | dc2.large | dc2.8xlarge | ra3.4xlarge | ra3.16xlarge
"""
return pulumi.get(self, "node_type")
@node_type.setter
def node_type(self, value: pulumi.Input[str]):
pulumi.set(self, "node_type", value)
@property
@pulumi.getter(name="allowVersionUpgrade")
def allow_version_upgrade(self) -> Optional[pulumi.Input[bool]]:
"""
Major version upgrades can be applied during the maintenance window to the Amazon Redshift engine that is running on the cluster. Default value is True
"""
return pulumi.get(self, "allow_version_upgrade")
@allow_version_upgrade.setter
def allow_version_upgrade(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "allow_version_upgrade", value)
@property
@pulumi.getter(name="aquaConfigurationStatus")
def aqua_configuration_status(self) -> Optional[pulumi.Input[str]]:
"""
The value represents how the cluster is configured to use AQUA (Advanced Query Accelerator) after the cluster is restored. Possible values include the following.
enabled - Use AQUA if it is available for the current Region and Amazon Redshift node type.
disabled - Don't use AQUA.
auto - Amazon Redshift determines whether to use AQUA.
"""
return pulumi.get(self, "aqua_configuration_status")
@aqua_configuration_status.setter
def aqua_configuration_status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "aqua_configuration_status", value)
@property
@pulumi.getter(name="automatedSnapshotRetentionPeriod")
def automated_snapshot_retention_period(self) -> Optional[pulumi.Input[int]]:
"""
The number of days that automated snapshots are retained. If the value is 0, automated snapshots are disabled. Default value is 1
"""
return pulumi.get(self, "automated_snapshot_retention_period")
@automated_snapshot_retention_period.setter
def automated_snapshot_retention_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "automated_snapshot_retention_period", value)
@property
@pulumi.getter(name="availabilityZone")
def availability_zone(self) -> Optional[pulumi.Input[str]]:
"""
The EC2 Availability Zone (AZ) in which you want Amazon Redshift to provision the cluster. Default: A random, system-chosen Availability Zone in the region that is specified by the endpoint
"""
return pulumi.get(self, "availability_zone")
@availability_zone.setter
def availability_zone(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "availability_zone", value)
@property
@pulumi.getter(name="availabilityZoneRelocation")
def availability_zone_relocation(self) -> Optional[pulumi.Input[bool]]:
"""
The option to enable relocation for an Amazon Redshift cluster between Availability Zones after the cluster modification is complete.
"""
return pulumi.get(self, "availability_zone_relocation")
@availability_zone_relocation.setter
def availability_zone_relocation(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "availability_zone_relocation", value)
@property
@pulumi.getter(name="availabilityZoneRelocationStatus")
def availability_zone_relocation_status(self) -> Optional[pulumi.Input[str]]:
"""
The availability zone relocation status of the cluster
"""
return pulumi.get(self, "availability_zone_relocation_status")
@availability_zone_relocation_status.setter
def availability_zone_relocation_status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "availability_zone_relocation_status", value)
@property
@pulumi.getter
def classic(self) -> Optional[pulumi.Input[bool]]:
"""
A boolean value indicating whether the resize operation is using the classic resize process. If you don't provide this parameter or set the value to false , the resize type is elastic.
"""
return pulumi.get(self, "classic")
@classic.setter
def classic(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "classic", value)
@property
@pulumi.getter(name="clusterIdentifier")
def cluster_identifier(self) -> Optional[pulumi.Input[str]]:
"""
A unique identifier for the cluster. You use this identifier to refer to the cluster for any subsequent cluster operations such as deleting or modifying. All alphabetical characters must be lower case, no hypens at the end, no two consecutive hyphens. Cluster name should be unique for all clusters within an AWS account
"""
return pulumi.get(self, "cluster_identifier")
@cluster_identifier.setter
def cluster_identifier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_identifier", value)
@property
@pulumi.getter(name="clusterParameterGroupName")
def cluster_parameter_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the parameter group to be associated with this cluster.
"""
return pulumi.get(self, "cluster_parameter_group_name")
@cluster_parameter_group_name.setter
def cluster_parameter_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_parameter_group_name", value)
@property
@pulumi.getter(name="clusterSecurityGroups")
def cluster_security_groups(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of security groups to be associated with this cluster.
"""
return pulumi.get(self, "cluster_security_groups")
@cluster_security_groups.setter
def cluster_security_groups(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "cluster_security_groups", value)
@property
@pulumi.getter(name="clusterSubnetGroupName")
def cluster_subnet_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of a cluster subnet group to be associated with this cluster.
"""
return pulumi.get(self, "cluster_subnet_group_name")
@cluster_subnet_group_name.setter
def cluster_subnet_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_subnet_group_name", value)
@property
@pulumi.getter(name="clusterVersion")
def cluster_version(self) -> Optional[pulumi.Input[str]]:
"""
The version of the Amazon Redshift engine software that you want to deploy on the cluster.The version selected runs on all the nodes in the cluster.
"""
return pulumi.get(self, "cluster_version")
@cluster_version.setter
def cluster_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_version", value)
@property
@pulumi.getter(name="deferMaintenance")
def defer_maintenance(self) -> Optional[pulumi.Input[bool]]:
"""
A boolean indicating whether to enable the deferred maintenance window.
"""
return pulumi.get(self, "defer_maintenance")
@defer_maintenance.setter
def defer_maintenance(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "defer_maintenance", value)
@property
@pulumi.getter(name="deferMaintenanceDuration")
def defer_maintenance_duration(self) -> Optional[pulumi.Input[int]]:
"""
An integer indicating the duration of the maintenance window in days. If you specify a duration, you can't specify an end time. The duration must be 45 days or less.
"""
return pulumi.get(self, "defer_maintenance_duration")
@defer_maintenance_duration.setter
def defer_maintenance_duration(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "defer_maintenance_duration", value)
@property
@pulumi.getter(name="deferMaintenanceEndTime")
def defer_maintenance_end_time(self) -> Optional[pulumi.Input[str]]:
"""
A timestamp indicating end time for the deferred maintenance window. If you specify an end time, you can't specify a duration.
"""
return pulumi.get(self, "defer_maintenance_end_time")
@defer_maintenance_end_time.setter
def defer_maintenance_end_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "defer_maintenance_end_time", value)
@property
@pulumi.getter(name="deferMaintenanceStartTime")
def defer_maintenance_start_time(self) -> Optional[pulumi.Input[str]]:
"""
A timestamp indicating the start time for the deferred maintenance window.
"""
return pulumi.get(self, "defer_maintenance_start_time")
@defer_maintenance_start_time.setter
def defer_maintenance_start_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "defer_maintenance_start_time", value)
@property
@pulumi.getter(name="destinationRegion")
def destination_region(self) -> Optional[pulumi.Input[str]]:
"""
The destination AWS Region that you want to copy snapshots to. Constraints: Must be the name of a valid AWS Region. For more information, see Regions and Endpoints in the Amazon Web Services [https://docs.aws.amazon.com/general/latest/gr/rande.html#redshift_region] General Reference
"""
return pulumi.get(self, "destination_region")
@destination_region.setter
def destination_region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "destination_region", value)
@property
@pulumi.getter(name="elasticIp")
def elastic_ip(self) -> Optional[pulumi.Input[str]]:
"""
The Elastic IP (EIP) address for the cluster.
"""
return pulumi.get(self, "elastic_ip")
@elastic_ip.setter
def elastic_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "elastic_ip", value)
@property
@pulumi.getter
def encrypted(self) -> Optional[pulumi.Input[bool]]:
"""
If true, the data in the cluster is encrypted at rest.
"""
return pulumi.get(self, "encrypted")
@encrypted.setter
def encrypted(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "encrypted", value)
@property
@pulumi.getter
def endpoint(self) -> Optional[pulumi.Input['ClusterEndpointArgs']]:
return pulumi.get(self, "endpoint")
@endpoint.setter
def endpoint(self, value: Optional[pulumi.Input['ClusterEndpointArgs']]):
pulumi.set(self, "endpoint", value)
@property
@pulumi.getter(name="enhancedVpcRouting")
def enhanced_vpc_routing(self) -> Optional[pulumi.Input[bool]]:
"""
An option that specifies whether to create the cluster with enhanced VPC routing enabled. To create a cluster that uses enhanced VPC routing, the cluster must be in a VPC. For more information, see Enhanced VPC Routing in the Amazon Redshift Cluster Management Guide.
If this option is true , enhanced VPC routing is enabled.
Default: false
"""
return pulumi.get(self, "enhanced_vpc_routing")
@enhanced_vpc_routing.setter
def enhanced_vpc_routing(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enhanced_vpc_routing", value)
@property
@pulumi.getter(name="hsmClientCertificateIdentifier")
def hsm_client_certificate_identifier(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the HSM client certificate the Amazon Redshift cluster uses to retrieve the data encryption keys stored in an HSM
"""
return pulumi.get(self, "hsm_client_certificate_identifier")
@hsm_client_certificate_identifier.setter
def hsm_client_certificate_identifier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "hsm_client_certificate_identifier", value)
@property
@pulumi.getter(name="hsmConfigurationIdentifier")
def hsm_configuration_identifier(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the HSM configuration that contains the information the Amazon Redshift cluster can use to retrieve and store keys in an HSM.
"""
return pulumi.get(self, "hsm_configuration_identifier")
@hsm_configuration_identifier.setter
def hsm_configuration_identifier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "hsm_configuration_identifier", value)
@property
@pulumi.getter(name="iamRoles")
def iam_roles(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of AWS Identity and Access Management (IAM) roles that can be used by the cluster to access other AWS services. You must supply the IAM roles in their Amazon Resource Name (ARN) format. You can supply up to 10 IAM roles in a single request
"""
return pulumi.get(self, "iam_roles")
@iam_roles.setter
def iam_roles(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "iam_roles", value)
@property
@pulumi.getter(name="kmsKeyId")
def kms_key_id(self) -> Optional[pulumi.Input[str]]:
"""
The AWS Key Management Service (KMS) key ID of the encryption key that you want to use to encrypt data in the cluster.
"""
return pulumi.get(self, "kms_key_id")
@kms_key_id.setter
def kms_key_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kms_key_id", value)
@property
@pulumi.getter(name="loggingProperties")
def logging_properties(self) -> Optional[pulumi.Input['ClusterLoggingPropertiesArgs']]:
return pulumi.get(self, "logging_properties")
@logging_properties.setter
def logging_properties(self, value: Optional[pulumi.Input['ClusterLoggingPropertiesArgs']]):
pulumi.set(self, "logging_properties", value)
@property
@pulumi.getter(name="maintenanceTrackName")
def maintenance_track_name(self) -> Optional[pulumi.Input[str]]:
"""
The name for the maintenance track that you want to assign for the cluster. This name change is asynchronous. The new track name stays in the PendingModifiedValues for the cluster until the next maintenance window. When the maintenance track changes, the cluster is switched to the latest cluster release available for the maintenance track. At this point, the maintenance track name is applied.
"""
return pulumi.get(self, "maintenance_track_name")
@maintenance_track_name.setter
def maintenance_track_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "maintenance_track_name", value)
@property
@pulumi.getter(name="manualSnapshotRetentionPeriod")
def manual_snapshot_retention_period(self) -> Optional[pulumi.Input[int]]:
"""
The number of days to retain newly copied snapshots in the destination AWS Region after they are copied from the source AWS Region. If the value is -1, the manual snapshot is retained indefinitely.
The value must be either -1 or an integer between 1 and 3,653.
"""
return pulumi.get(self, "manual_snapshot_retention_period")
@manual_snapshot_retention_period.setter
def manual_snapshot_retention_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "manual_snapshot_retention_period", value)
@property
@pulumi.getter(name="numberOfNodes")
def number_of_nodes(self) -> Optional[pulumi.Input[int]]:
"""
The number of compute nodes in the cluster. This parameter is required when the ClusterType parameter is specified as multi-node.
"""
return pulumi.get(self, "number_of_nodes")
@number_of_nodes.setter
def number_of_nodes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "number_of_nodes", value)
@property
@pulumi.getter(name="ownerAccount")
def owner_account(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "owner_account")
@owner_account.setter
def owner_account(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "owner_account", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
The port number on which the cluster accepts incoming connections. The cluster is accessible only via the JDBC and ODBC connection strings
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter(name="preferredMaintenanceWindow")
def preferred_maintenance_window(self) -> Optional[pulumi.Input[str]]:
"""
The weekly time range (in UTC) during which automated cluster maintenance can occur.
"""
return pulumi.get(self, "preferred_maintenance_window")
@preferred_maintenance_window.setter
def preferred_maintenance_window(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "preferred_maintenance_window", value)
@property
@pulumi.getter(name="publiclyAccessible")
def publicly_accessible(self) -> Optional[pulumi.Input[bool]]:
"""
If true, the cluster can be accessed from a public network.
"""
return pulumi.get(self, "publicly_accessible")
@publicly_accessible.setter
def publicly_accessible(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "publicly_accessible", value)
@property
@pulumi.getter(name="resourceAction")
def resource_action(self) -> Optional[pulumi.Input[str]]:
"""
The Redshift operation to be performed. Resource Action supports pause-cluster, resume-cluster APIs
"""
return pulumi.get(self, "resource_action")
@resource_action.setter
def resource_action(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_action", value)
@property
@pulumi.getter(name="revisionTarget")
def revision_target(self) -> Optional[pulumi.Input[str]]:
"""
The identifier of the database revision. You can retrieve this value from the response to the DescribeClusterDbRevisions request.
"""
return pulumi.get(self, "revision_target")
@revision_target.setter
def revision_target(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "revision_target", value)
@property
@pulumi.getter(name="rotateEncryptionKey")
def rotate_encryption_key(self) -> Optional[pulumi.Input[bool]]:
"""
A boolean indicating if we want to rotate Encryption Keys.
"""
return pulumi.get(self, "rotate_encryption_key")
@rotate_encryption_key.setter
def rotate_encryption_key(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "rotate_encryption_key", value)
@property
@pulumi.getter(name="snapshotClusterIdentifier")
def snapshot_cluster_identifier(self) -> Optional[pulumi.Input[str]]:
"""
The name of the cluster the source snapshot was created from. This parameter is required if your IAM user has a policy containing a snapshot resource element that specifies anything other than * for the cluster name.
"""
return pulumi.get(self, "snapshot_cluster_identifier")
@snapshot_cluster_identifier.setter
def snapshot_cluster_identifier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "snapshot_cluster_identifier", value)
@property
@pulumi.getter(name="snapshotCopyGrantName")
def snapshot_copy_grant_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the snapshot copy grant to use when snapshots of an AWS KMS-encrypted cluster are copied to the destination region.
"""
return pulumi.get(self, "snapshot_copy_grant_name")
@snapshot_copy_grant_name.setter
def snapshot_copy_grant_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "snapshot_copy_grant_name", value)
@property
@pulumi.getter(name="snapshotCopyManual")
def snapshot_copy_manual(self) -> Optional[pulumi.Input[bool]]:
"""
Indicates whether to apply the snapshot retention period to newly copied manual snapshots instead of automated snapshots.
"""
return pulumi.get(self, "snapshot_copy_manual")
@snapshot_copy_manual.setter
def snapshot_copy_manual(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "snapshot_copy_manual", value)
@property
@pulumi.getter(name="snapshotCopyRetentionPeriod")
def snapshot_copy_retention_period(self) -> Optional[pulumi.Input[int]]:
"""
The number of days to retain automated snapshots in the destination region after they are copied from the source region.
Default is 7.
Constraints: Must be at least 1 and no more than 35.
"""
return pulumi.get(self, "snapshot_copy_retention_period")
@snapshot_copy_retention_period.setter
def snapshot_copy_retention_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "snapshot_copy_retention_period", value)
@property
@pulumi.getter(name="snapshotIdentifier")
def snapshot_identifier(self) -> Optional[pulumi.Input[str]]:
"""
The name of the snapshot from which to create the new cluster. This parameter isn't case sensitive.
"""
return pulumi.get(self, "snapshot_identifier")
@snapshot_identifier.setter
def snapshot_identifier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "snapshot_identifier", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ClusterTagArgs']]]]:
"""
The list of tags for the cluster parameter group.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ClusterTagArgs']]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="vpcSecurityGroupIds")
def vpc_security_group_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of Virtual Private Cloud (VPC) security groups to be associated with the cluster.
"""
return pulumi.get(self, "vpc_security_group_ids")
@vpc_security_group_ids.setter
def vpc_security_group_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "vpc_security_group_ids", value)
class Cluster(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
allow_version_upgrade: Optional[pulumi.Input[bool]] = None,
aqua_configuration_status: Optional[pulumi.Input[str]] = None,
automated_snapshot_retention_period: Optional[pulumi.Input[int]] = None,
availability_zone: Optional[pulumi.Input[str]] = None,
availability_zone_relocation: Optional[pulumi.Input[bool]] = None,
availability_zone_relocation_status: Optional[pulumi.Input[str]] = None,
classic: Optional[pulumi.Input[bool]] = None,
cluster_identifier: Optional[pulumi.Input[str]] = None,
cluster_parameter_group_name: Optional[pulumi.Input[str]] = None,
cluster_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
cluster_subnet_group_name: Optional[pulumi.Input[str]] = None,
cluster_type: Optional[pulumi.Input[str]] = None,
cluster_version: Optional[pulumi.Input[str]] = None,
d_b_name: Optional[pulumi.Input[str]] = None,
defer_maintenance: Optional[pulumi.Input[bool]] = None,
defer_maintenance_duration: Optional[pulumi.Input[int]] = None,
defer_maintenance_end_time: Optional[pulumi.Input[str]] = None,
defer_maintenance_start_time: Optional[pulumi.Input[str]] = None,
destination_region: Optional[pulumi.Input[str]] = None,
elastic_ip: Optional[pulumi.Input[str]] = None,
encrypted: Optional[pulumi.Input[bool]] = None,
endpoint: Optional[pulumi.Input[pulumi.InputType['ClusterEndpointArgs']]] = None,
enhanced_vpc_routing: Optional[pulumi.Input[bool]] = None,
hsm_client_certificate_identifier: Optional[pulumi.Input[str]] = None,
hsm_configuration_identifier: Optional[pulumi.Input[str]] = None,
iam_roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
kms_key_id: Optional[pulumi.Input[str]] = None,
logging_properties: Optional[pulumi.Input[pulumi.InputType['ClusterLoggingPropertiesArgs']]] = None,
maintenance_track_name: Optional[pulumi.Input[str]] = None,
manual_snapshot_retention_period: Optional[pulumi.Input[int]] = None,
master_user_password: Optional[pulumi.Input[str]] = None,
master_username: Optional[pulumi.Input[str]] = None,
node_type: Optional[pulumi.Input[str]] = None,
number_of_nodes: Optional[pulumi.Input[int]] = None,
owner_account: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None,
preferred_maintenance_window: Optional[pulumi.Input[str]] = None,
publicly_accessible: Optional[pulumi.Input[bool]] = None,
resource_action: Optional[pulumi.Input[str]] = None,
revision_target: Optional[pulumi.Input[str]] = None,
rotate_encryption_key: Optional[pulumi.Input[bool]] = None,
snapshot_cluster_identifier: Optional[pulumi.Input[str]] = None,
snapshot_copy_grant_name: Optional[pulumi.Input[str]] = None,
snapshot_copy_manual: Optional[pulumi.Input[bool]] = None,
snapshot_copy_retention_period: Optional[pulumi.Input[int]] = None,
snapshot_identifier: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterTagArgs']]]]] = None,
vpc_security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
"""
An example resource schema demonstrating some basic constructs and validation rules.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] allow_version_upgrade: Major version upgrades can be applied during the maintenance window to the Amazon Redshift engine that is running on the cluster. Default value is True
:param pulumi.Input[str] aqua_configuration_status: The value represents how the cluster is configured to use AQUA (Advanced Query Accelerator) after the cluster is restored. Possible values include the following.
enabled - Use AQUA if it is available for the current Region and Amazon Redshift node type.
disabled - Don't use AQUA.
auto - Amazon Redshift determines whether to use AQUA.
:param pulumi.Input[int] automated_snapshot_retention_period: The number of days that automated snapshots are retained. If the value is 0, automated snapshots are disabled. Default value is 1
:param pulumi.Input[str] availability_zone: The EC2 Availability Zone (AZ) in which you want Amazon Redshift to provision the cluster. Default: A random, system-chosen Availability Zone in the region that is specified by the endpoint
:param pulumi.Input[bool] availability_zone_relocation: The option to enable relocation for an Amazon Redshift cluster between Availability Zones after the cluster modification is complete.
:param pulumi.Input[str] availability_zone_relocation_status: The availability zone relocation status of the cluster
:param pulumi.Input[bool] classic: A boolean value indicating whether the resize operation is using the classic resize process. If you don't provide this parameter or set the value to false , the resize type is elastic.
:param pulumi.Input[str] cluster_identifier: A unique identifier for the cluster. You use this identifier to refer to the cluster for any subsequent cluster operations such as deleting or modifying. All alphabetical characters must be lower case, no hypens at the end, no two consecutive hyphens. Cluster name should be unique for all clusters within an AWS account
:param pulumi.Input[str] cluster_parameter_group_name: The name of the parameter group to be associated with this cluster.
:param pulumi.Input[Sequence[pulumi.Input[str]]] cluster_security_groups: A list of security groups to be associated with this cluster.
:param pulumi.Input[str] cluster_subnet_group_name: The name of a cluster subnet group to be associated with this cluster.
:param pulumi.Input[str] cluster_type: The type of the cluster. When cluster type is specified as single-node, the NumberOfNodes parameter is not required and if multi-node, the NumberOfNodes parameter is required
:param pulumi.Input[str] cluster_version: The version of the Amazon Redshift engine software that you want to deploy on the cluster.The version selected runs on all the nodes in the cluster.
:param pulumi.Input[str] d_b_name: The name of the first database to be created when the cluster is created. To create additional databases after the cluster is created, connect to the cluster with a SQL client and use SQL commands to create a database.
:param pulumi.Input[bool] defer_maintenance: A boolean indicating whether to enable the deferred maintenance window.
:param pulumi.Input[int] defer_maintenance_duration: An integer indicating the duration of the maintenance window in days. If you specify a duration, you can't specify an end time. The duration must be 45 days or less.
:param pulumi.Input[str] defer_maintenance_end_time: A timestamp indicating end time for the deferred maintenance window. If you specify an end time, you can't specify a duration.
:param pulumi.Input[str] defer_maintenance_start_time: A timestamp indicating the start time for the deferred maintenance window.
:param pulumi.Input[str] destination_region: The destination AWS Region that you want to copy snapshots to. Constraints: Must be the name of a valid AWS Region. For more information, see Regions and Endpoints in the Amazon Web Services [https://docs.aws.amazon.com/general/latest/gr/rande.html#redshift_region] General Reference
:param pulumi.Input[str] elastic_ip: The Elastic IP (EIP) address for the cluster.
:param pulumi.Input[bool] encrypted: If true, the data in the cluster is encrypted at rest.
:param pulumi.Input[bool] enhanced_vpc_routing: An option that specifies whether to create the cluster with enhanced VPC routing enabled. To create a cluster that uses enhanced VPC routing, the cluster must be in a VPC. For more information, see Enhanced VPC Routing in the Amazon Redshift Cluster Management Guide.
If this option is true , enhanced VPC routing is enabled.
Default: false
:param pulumi.Input[str] hsm_client_certificate_identifier: Specifies the name of the HSM client certificate the Amazon Redshift cluster uses to retrieve the data encryption keys stored in an HSM
:param pulumi.Input[str] hsm_configuration_identifier: Specifies the name of the HSM configuration that contains the information the Amazon Redshift cluster can use to retrieve and store keys in an HSM.
:param pulumi.Input[Sequence[pulumi.Input[str]]] iam_roles: A list of AWS Identity and Access Management (IAM) roles that can be used by the cluster to access other AWS services. You must supply the IAM roles in their Amazon Resource Name (ARN) format. You can supply up to 10 IAM roles in a single request
:param pulumi.Input[str] kms_key_id: The AWS Key Management Service (KMS) key ID of the encryption key that you want to use to encrypt data in the cluster.
:param pulumi.Input[str] maintenance_track_name: The name for the maintenance track that you want to assign for the cluster. This name change is asynchronous. The new track name stays in the PendingModifiedValues for the cluster until the next maintenance window. When the maintenance track changes, the cluster is switched to the latest cluster release available for the maintenance track. At this point, the maintenance track name is applied.
:param pulumi.Input[int] manual_snapshot_retention_period: The number of days to retain newly copied snapshots in the destination AWS Region after they are copied from the source AWS Region. If the value is -1, the manual snapshot is retained indefinitely.
The value must be either -1 or an integer between 1 and 3,653.
:param pulumi.Input[str] master_user_password: The password associated with the master user account for the cluster that is being created. Password must be between 8 and 64 characters in length, should have at least one uppercase letter.Must contain at least one lowercase letter.Must contain one number.Can be any printable ASCII character.
:param pulumi.Input[str] master_username: The user name associated with the master user account for the cluster that is being created. The user name can't be PUBLIC and first character must be a letter.
:param pulumi.Input[str] node_type: The node type to be provisioned for the cluster.Valid Values: ds2.xlarge | ds2.8xlarge | dc1.large | dc1.8xlarge | dc2.large | dc2.8xlarge | ra3.4xlarge | ra3.16xlarge
:param pulumi.Input[int] number_of_nodes: The number of compute nodes in the cluster. This parameter is required when the ClusterType parameter is specified as multi-node.
:param pulumi.Input[int] port: The port number on which the cluster accepts incoming connections. The cluster is accessible only via the JDBC and ODBC connection strings
:param pulumi.Input[str] preferred_maintenance_window: The weekly time range (in UTC) during which automated cluster maintenance can occur.
:param pulumi.Input[bool] publicly_accessible: If true, the cluster can be accessed from a public network.
:param pulumi.Input[str] resource_action: The Redshift operation to be performed. Resource Action supports pause-cluster, resume-cluster APIs
:param pulumi.Input[str] revision_target: The identifier of the database revision. You can retrieve this value from the response to the DescribeClusterDbRevisions request.
:param pulumi.Input[bool] rotate_encryption_key: A boolean indicating if we want to rotate Encryption Keys.
:param pulumi.Input[str] snapshot_cluster_identifier: The name of the cluster the source snapshot was created from. This parameter is required if your IAM user has a policy containing a snapshot resource element that specifies anything other than * for the cluster name.
:param pulumi.Input[str] snapshot_copy_grant_name: The name of the snapshot copy grant to use when snapshots of an AWS KMS-encrypted cluster are copied to the destination region.
:param pulumi.Input[bool] snapshot_copy_manual: Indicates whether to apply the snapshot retention period to newly copied manual snapshots instead of automated snapshots.
:param pulumi.Input[int] snapshot_copy_retention_period: The number of days to retain automated snapshots in the destination region after they are copied from the source region.
Default is 7.
Constraints: Must be at least 1 and no more than 35.
:param pulumi.Input[str] snapshot_identifier: The name of the snapshot from which to create the new cluster. This parameter isn't case sensitive.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterTagArgs']]]] tags: The list of tags for the cluster parameter group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] vpc_security_group_ids: A list of Virtual Private Cloud (VPC) security groups to be associated with the cluster.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ClusterArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
An example resource schema demonstrating some basic constructs and validation rules.
:param str resource_name: The name of the resource.
:param ClusterArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ClusterArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
allow_version_upgrade: Optional[pulumi.Input[bool]] = None,
aqua_configuration_status: Optional[pulumi.Input[str]] = None,
automated_snapshot_retention_period: Optional[pulumi.Input[int]] = None,
availability_zone: Optional[pulumi.Input[str]] = None,
availability_zone_relocation: Optional[pulumi.Input[bool]] = None,
availability_zone_relocation_status: Optional[pulumi.Input[str]] = None,
classic: Optional[pulumi.Input[bool]] = None,
cluster_identifier: Optional[pulumi.Input[str]] = None,
cluster_parameter_group_name: Optional[pulumi.Input[str]] = None,
cluster_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
cluster_subnet_group_name: Optional[pulumi.Input[str]] = None,
cluster_type: Optional[pulumi.Input[str]] = None,
cluster_version: Optional[pulumi.Input[str]] = None,
d_b_name: Optional[pulumi.Input[str]] = None,
defer_maintenance: Optional[pulumi.Input[bool]] = None,
defer_maintenance_duration: Optional[pulumi.Input[int]] = None,
defer_maintenance_end_time: Optional[pulumi.Input[str]] = None,
defer_maintenance_start_time: Optional[pulumi.Input[str]] = None,
destination_region: Optional[pulumi.Input[str]] = None,
elastic_ip: Optional[pulumi.Input[str]] = None,
encrypted: Optional[pulumi.Input[bool]] = None,
endpoint: Optional[pulumi.Input[pulumi.InputType['ClusterEndpointArgs']]] = None,
enhanced_vpc_routing: Optional[pulumi.Input[bool]] = None,
hsm_client_certificate_identifier: Optional[pulumi.Input[str]] = None,
hsm_configuration_identifier: Optional[pulumi.Input[str]] = None,
iam_roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
kms_key_id: Optional[pulumi.Input[str]] = None,
logging_properties: Optional[pulumi.Input[pulumi.InputType['ClusterLoggingPropertiesArgs']]] = None,
maintenance_track_name: Optional[pulumi.Input[str]] = None,
manual_snapshot_retention_period: Optional[pulumi.Input[int]] = None,
master_user_password: Optional[pulumi.Input[str]] = None,
master_username: Optional[pulumi.Input[str]] = None,
node_type: Optional[pulumi.Input[str]] = None,
number_of_nodes: Optional[pulumi.Input[int]] = None,
owner_account: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None,
preferred_maintenance_window: Optional[pulumi.Input[str]] = None,
publicly_accessible: Optional[pulumi.Input[bool]] = None,
resource_action: Optional[pulumi.Input[str]] = None,
revision_target: Optional[pulumi.Input[str]] = None,
rotate_encryption_key: Optional[pulumi.Input[bool]] = None,
snapshot_cluster_identifier: Optional[pulumi.Input[str]] = None,
snapshot_copy_grant_name: Optional[pulumi.Input[str]] = None,
snapshot_copy_manual: Optional[pulumi.Input[bool]] = None,
snapshot_copy_retention_period: Optional[pulumi.Input[int]] = None,
snapshot_identifier: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterTagArgs']]]]] = None,
vpc_security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ClusterArgs.__new__(ClusterArgs)
__props__.__dict__["allow_version_upgrade"] = allow_version_upgrade
__props__.__dict__["aqua_configuration_status"] = aqua_configuration_status
__props__.__dict__["automated_snapshot_retention_period"] = automated_snapshot_retention_period
__props__.__dict__["availability_zone"] = availability_zone
__props__.__dict__["availability_zone_relocation"] = availability_zone_relocation
__props__.__dict__["availability_zone_relocation_status"] = availability_zone_relocation_status
__props__.__dict__["classic"] = classic
__props__.__dict__["cluster_identifier"] = cluster_identifier
__props__.__dict__["cluster_parameter_group_name"] = cluster_parameter_group_name
__props__.__dict__["cluster_security_groups"] = cluster_security_groups
__props__.__dict__["cluster_subnet_group_name"] = cluster_subnet_group_name
if cluster_type is None and not opts.urn:
raise TypeError("Missing required property 'cluster_type'")
__props__.__dict__["cluster_type"] = cluster_type
__props__.__dict__["cluster_version"] = cluster_version
if d_b_name is None and not opts.urn:
raise TypeError("Missing required property 'd_b_name'")
__props__.__dict__["d_b_name"] = d_b_name
__props__.__dict__["defer_maintenance"] = defer_maintenance
__props__.__dict__["defer_maintenance_duration"] = defer_maintenance_duration
__props__.__dict__["defer_maintenance_end_time"] = defer_maintenance_end_time
__props__.__dict__["defer_maintenance_start_time"] = defer_maintenance_start_time
__props__.__dict__["destination_region"] = destination_region
__props__.__dict__["elastic_ip"] = elastic_ip
__props__.__dict__["encrypted"] = encrypted
__props__.__dict__["endpoint"] = endpoint
__props__.__dict__["enhanced_vpc_routing"] = enhanced_vpc_routing
__props__.__dict__["hsm_client_certificate_identifier"] = hsm_client_certificate_identifier
__props__.__dict__["hsm_configuration_identifier"] = hsm_configuration_identifier
__props__.__dict__["iam_roles"] = iam_roles
__props__.__dict__["kms_key_id"] = kms_key_id
__props__.__dict__["logging_properties"] = logging_properties
__props__.__dict__["maintenance_track_name"] = maintenance_track_name
__props__.__dict__["manual_snapshot_retention_period"] = manual_snapshot_retention_period
if master_user_password is None and not opts.urn:
raise TypeError("Missing required property 'master_user_password'")
__props__.__dict__["master_user_password"] = master_user_password
if master_username is None and not opts.urn:
raise TypeError("Missing required property 'master_username'")
__props__.__dict__["master_username"] = master_username
if node_type is None and not opts.urn:
raise TypeError("Missing required property 'node_type'")
__props__.__dict__["node_type"] = node_type
__props__.__dict__["number_of_nodes"] = number_of_nodes
__props__.__dict__["owner_account"] = owner_account
__props__.__dict__["port"] = port
__props__.__dict__["preferred_maintenance_window"] = preferred_maintenance_window
__props__.__dict__["publicly_accessible"] = publicly_accessible
__props__.__dict__["resource_action"] = resource_action
__props__.__dict__["revision_target"] = revision_target
__props__.__dict__["rotate_encryption_key"] = rotate_encryption_key
__props__.__dict__["snapshot_cluster_identifier"] = snapshot_cluster_identifier
__props__.__dict__["snapshot_copy_grant_name"] = snapshot_copy_grant_name
__props__.__dict__["snapshot_copy_manual"] = snapshot_copy_manual
__props__.__dict__["snapshot_copy_retention_period"] = snapshot_copy_retention_period
__props__.__dict__["snapshot_identifier"] = snapshot_identifier
__props__.__dict__["tags"] = tags
__props__.__dict__["vpc_security_group_ids"] = vpc_security_group_ids
__props__.__dict__["defer_maintenance_identifier"] = None
super(Cluster, __self__).__init__(
'aws-native:redshift:Cluster',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Cluster':
"""
Get an existing Cluster resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = ClusterArgs.__new__(ClusterArgs)
__props__.__dict__["allow_version_upgrade"] = None
__props__.__dict__["aqua_configuration_status"] = None
__props__.__dict__["automated_snapshot_retention_period"] = None
__props__.__dict__["availability_zone"] = None
__props__.__dict__["availability_zone_relocation"] = None
__props__.__dict__["availability_zone_relocation_status"] = None
__props__.__dict__["classic"] = None
__props__.__dict__["cluster_identifier"] = None
__props__.__dict__["cluster_parameter_group_name"] = None
__props__.__dict__["cluster_security_groups"] = None
__props__.__dict__["cluster_subnet_group_name"] = None
__props__.__dict__["cluster_type"] = None
__props__.__dict__["cluster_version"] = None
__props__.__dict__["d_b_name"] = None
__props__.__dict__["defer_maintenance"] = None
__props__.__dict__["defer_maintenance_duration"] = None
__props__.__dict__["defer_maintenance_end_time"] = None
__props__.__dict__["defer_maintenance_identifier"] = None
__props__.__dict__["defer_maintenance_start_time"] = None
__props__.__dict__["destination_region"] = None
__props__.__dict__["elastic_ip"] = None
__props__.__dict__["encrypted"] = None
__props__.__dict__["endpoint"] = None
__props__.__dict__["enhanced_vpc_routing"] = None
__props__.__dict__["hsm_client_certificate_identifier"] = None
__props__.__dict__["hsm_configuration_identifier"] = None
__props__.__dict__["iam_roles"] = None
__props__.__dict__["kms_key_id"] = None
__props__.__dict__["logging_properties"] = None
__props__.__dict__["maintenance_track_name"] = None
__props__.__dict__["manual_snapshot_retention_period"] = None
__props__.__dict__["master_user_password"] = None
__props__.__dict__["master_username"] = None
__props__.__dict__["node_type"] = None
__props__.__dict__["number_of_nodes"] = None
__props__.__dict__["owner_account"] = None
__props__.__dict__["port"] = None
__props__.__dict__["preferred_maintenance_window"] = None
__props__.__dict__["publicly_accessible"] = None
__props__.__dict__["resource_action"] = None
__props__.__dict__["revision_target"] = None
__props__.__dict__["rotate_encryption_key"] = None
__props__.__dict__["snapshot_cluster_identifier"] = None
__props__.__dict__["snapshot_copy_grant_name"] = None
__props__.__dict__["snapshot_copy_manual"] = None
__props__.__dict__["snapshot_copy_retention_period"] = None
__props__.__dict__["snapshot_identifier"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["vpc_security_group_ids"] = None
return Cluster(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="allowVersionUpgrade")
def allow_version_upgrade(self) -> pulumi.Output[Optional[bool]]:
"""
Major version upgrades can be applied during the maintenance window to the Amazon Redshift engine that is running on the cluster. Default value is True
"""
return pulumi.get(self, "allow_version_upgrade")
@property
@pulumi.getter(name="aquaConfigurationStatus")
def aqua_configuration_status(self) -> pulumi.Output[Optional[str]]:
"""
The value represents how the cluster is configured to use AQUA (Advanced Query Accelerator) after the cluster is restored. Possible values include the following.
enabled - Use AQUA if it is available for the current Region and Amazon Redshift node type.
disabled - Don't use AQUA.
auto - Amazon Redshift determines whether to use AQUA.
"""
return pulumi.get(self, "aqua_configuration_status")
@property
@pulumi.getter(name="automatedSnapshotRetentionPeriod")
def automated_snapshot_retention_period(self) -> pulumi.Output[Optional[int]]:
"""
The number of days that automated snapshots are retained. If the value is 0, automated snapshots are disabled. Default value is 1
"""
return pulumi.get(self, "automated_snapshot_retention_period")
@property
@pulumi.getter(name="availabilityZone")
def availability_zone(self) -> pulumi.Output[Optional[str]]:
"""
The EC2 Availability Zone (AZ) in which you want Amazon Redshift to provision the cluster. Default: A random, system-chosen Availability Zone in the region that is specified by the endpoint
"""
return pulumi.get(self, "availability_zone")
@property
@pulumi.getter(name="availabilityZoneRelocation")
def availability_zone_relocation(self) -> pulumi.Output[Optional[bool]]:
"""
The option to enable relocation for an Amazon Redshift cluster between Availability Zones after the cluster modification is complete.
"""
return pulumi.get(self, "availability_zone_relocation")
@property
@pulumi.getter(name="availabilityZoneRelocationStatus")
def availability_zone_relocation_status(self) -> pulumi.Output[Optional[str]]:
"""
The availability zone relocation status of the cluster
"""
return pulumi.get(self, "availability_zone_relocation_status")
@property
@pulumi.getter
def classic(self) -> pulumi.Output[Optional[bool]]:
"""
A boolean value indicating whether the resize operation is using the classic resize process. If you don't provide this parameter or set the value to false , the resize type is elastic.
"""
return pulumi.get(self, "classic")
@property
@pulumi.getter(name="clusterIdentifier")
def cluster_identifier(self) -> pulumi.Output[Optional[str]]:
"""
A unique identifier for the cluster. You use this identifier to refer to the cluster for any subsequent cluster operations such as deleting or modifying. All alphabetical characters must be lower case, no hypens at the end, no two consecutive hyphens. Cluster name should be unique for all clusters within an AWS account
"""
return pulumi.get(self, "cluster_identifier")
@property
@pulumi.getter(name="clusterParameterGroupName")
def cluster_parameter_group_name(self) -> pulumi.Output[Optional[str]]:
"""
The name of the parameter group to be associated with this cluster.
"""
return pulumi.get(self, "cluster_parameter_group_name")
@property
@pulumi.getter(name="clusterSecurityGroups")
def cluster_security_groups(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of security groups to be associated with this cluster.
"""
return pulumi.get(self, "cluster_security_groups")
@property
@pulumi.getter(name="clusterSubnetGroupName")
def cluster_subnet_group_name(self) -> pulumi.Output[Optional[str]]:
"""
The name of a cluster subnet group to be associated with this cluster.
"""
return pulumi.get(self, "cluster_subnet_group_name")
@property
@pulumi.getter(name="clusterType")
def cluster_type(self) -> pulumi.Output[str]:
"""
The type of the cluster. When cluster type is specified as single-node, the NumberOfNodes parameter is not required and if multi-node, the NumberOfNodes parameter is required
"""
return pulumi.get(self, "cluster_type")
@property
@pulumi.getter(name="clusterVersion")
def cluster_version(self) -> pulumi.Output[Optional[str]]:
"""
The version of the Amazon Redshift engine software that you want to deploy on the cluster.The version selected runs on all the nodes in the cluster.
"""
return pulumi.get(self, "cluster_version")
@property
@pulumi.getter(name="dBName")
def d_b_name(self) -> pulumi.Output[str]:
"""
The name of the first database to be created when the cluster is created. To create additional databases after the cluster is created, connect to the cluster with a SQL client and use SQL commands to create a database.
"""
return pulumi.get(self, "d_b_name")
@property
@pulumi.getter(name="deferMaintenance")
def defer_maintenance(self) -> pulumi.Output[Optional[bool]]:
"""
A boolean indicating whether to enable the deferred maintenance window.
"""
return pulumi.get(self, "defer_maintenance")
@property
@pulumi.getter(name="deferMaintenanceDuration")
def defer_maintenance_duration(self) -> pulumi.Output[Optional[int]]:
"""
An integer indicating the duration of the maintenance window in days. If you specify a duration, you can't specify an end time. The duration must be 45 days or less.
"""
return pulumi.get(self, "defer_maintenance_duration")
@property
@pulumi.getter(name="deferMaintenanceEndTime")
def defer_maintenance_end_time(self) -> pulumi.Output[Optional[str]]:
"""
A timestamp indicating end time for the deferred maintenance window. If you specify an end time, you can't specify a duration.
"""
return pulumi.get(self, "defer_maintenance_end_time")
@property
@pulumi.getter(name="deferMaintenanceIdentifier")
def defer_maintenance_identifier(self) -> pulumi.Output[str]:
"""
A unique identifier for the deferred maintenance window.
"""
return pulumi.get(self, "defer_maintenance_identifier")
@property
@pulumi.getter(name="deferMaintenanceStartTime")
def defer_maintenance_start_time(self) -> pulumi.Output[Optional[str]]:
"""
A timestamp indicating the start time for the deferred maintenance window.
"""
return pulumi.get(self, "defer_maintenance_start_time")
@property
@pulumi.getter(name="destinationRegion")
def destination_region(self) -> pulumi.Output[Optional[str]]:
"""
The destination AWS Region that you want to copy snapshots to. Constraints: Must be the name of a valid AWS Region. For more information, see Regions and Endpoints in the Amazon Web Services [https://docs.aws.amazon.com/general/latest/gr/rande.html#redshift_region] General Reference
"""
return pulumi.get(self, "destination_region")
@property
@pulumi.getter(name="elasticIp")
def elastic_ip(self) -> pulumi.Output[Optional[str]]:
"""
The Elastic IP (EIP) address for the cluster.
"""
return pulumi.get(self, "elastic_ip")
@property
@pulumi.getter
def encrypted(self) -> pulumi.Output[Optional[bool]]:
"""
If true, the data in the cluster is encrypted at rest.
"""
return pulumi.get(self, "encrypted")
@property
@pulumi.getter
def endpoint(self) -> pulumi.Output[Optional['outputs.ClusterEndpoint']]:
return pulumi.get(self, "endpoint")
@property
@pulumi.getter(name="enhancedVpcRouting")
def enhanced_vpc_routing(self) -> pulumi.Output[Optional[bool]]:
"""
An option that specifies whether to create the cluster with enhanced VPC routing enabled. To create a cluster that uses enhanced VPC routing, the cluster must be in a VPC. For more information, see Enhanced VPC Routing in the Amazon Redshift Cluster Management Guide.
If this option is true , enhanced VPC routing is enabled.
Default: false
"""
return pulumi.get(self, "enhanced_vpc_routing")
@property
@pulumi.getter(name="hsmClientCertificateIdentifier")
def hsm_client_certificate_identifier(self) -> pulumi.Output[Optional[str]]:
"""
Specifies the name of the HSM client certificate the Amazon Redshift cluster uses to retrieve the data encryption keys stored in an HSM
"""
return pulumi.get(self, "hsm_client_certificate_identifier")
@property
@pulumi.getter(name="hsmConfigurationIdentifier")
def hsm_configuration_identifier(self) -> pulumi.Output[Optional[str]]:
"""
Specifies the name of the HSM configuration that contains the information the Amazon Redshift cluster can use to retrieve and store keys in an HSM.
"""
return pulumi.get(self, "hsm_configuration_identifier")
@property
@pulumi.getter(name="iamRoles")
def iam_roles(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of AWS Identity and Access Management (IAM) roles that can be used by the cluster to access other AWS services. You must supply the IAM roles in their Amazon Resource Name (ARN) format. You can supply up to 10 IAM roles in a single request
"""
return pulumi.get(self, "iam_roles")
@property
@pulumi.getter(name="kmsKeyId")
def kms_key_id(self) -> pulumi.Output[Optional[str]]:
"""
The AWS Key Management Service (KMS) key ID of the encryption key that you want to use to encrypt data in the cluster.
"""
return pulumi.get(self, "kms_key_id")
@property
@pulumi.getter(name="loggingProperties")
def logging_properties(self) -> pulumi.Output[Optional['outputs.ClusterLoggingProperties']]:
return pulumi.get(self, "logging_properties")
@property
@pulumi.getter(name="maintenanceTrackName")
def maintenance_track_name(self) -> pulumi.Output[Optional[str]]:
"""
The name for the maintenance track that you want to assign for the cluster. This name change is asynchronous. The new track name stays in the PendingModifiedValues for the cluster until the next maintenance window. When the maintenance track changes, the cluster is switched to the latest cluster release available for the maintenance track. At this point, the maintenance track name is applied.
"""
return pulumi.get(self, "maintenance_track_name")
@property
@pulumi.getter(name="manualSnapshotRetentionPeriod")
def manual_snapshot_retention_period(self) -> pulumi.Output[Optional[int]]:
"""
The number of days to retain newly copied snapshots in the destination AWS Region after they are copied from the source AWS Region. If the value is -1, the manual snapshot is retained indefinitely.
The value must be either -1 or an integer between 1 and 3,653.
"""
return pulumi.get(self, "manual_snapshot_retention_period")
@property
@pulumi.getter(name="masterUserPassword")
def master_user_password(self) -> pulumi.Output[str]:
"""
The password associated with the master user account for the cluster that is being created. Password must be between 8 and 64 characters in length, should have at least one uppercase letter.Must contain at least one lowercase letter.Must contain one number.Can be any printable ASCII character.
"""
return pulumi.get(self, "master_user_password")
@property
@pulumi.getter(name="masterUsername")
def master_username(self) -> pulumi.Output[str]:
"""
The user name associated with the master user account for the cluster that is being created. The user name can't be PUBLIC and first character must be a letter.
"""
return pulumi.get(self, "master_username")
@property
@pulumi.getter(name="nodeType")
def node_type(self) -> pulumi.Output[str]:
"""
The node type to be provisioned for the cluster.Valid Values: ds2.xlarge | ds2.8xlarge | dc1.large | dc1.8xlarge | dc2.large | dc2.8xlarge | ra3.4xlarge | ra3.16xlarge
"""
return pulumi.get(self, "node_type")
@property
@pulumi.getter(name="numberOfNodes")
def number_of_nodes(self) -> pulumi.Output[Optional[int]]:
"""
The number of compute nodes in the cluster. This parameter is required when the ClusterType parameter is specified as multi-node.
"""
return pulumi.get(self, "number_of_nodes")
@property
@pulumi.getter(name="ownerAccount")
def owner_account(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "owner_account")
@property
@pulumi.getter
def port(self) -> pulumi.Output[Optional[int]]:
"""
The port number on which the cluster accepts incoming connections. The cluster is accessible only via the JDBC and ODBC connection strings
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="preferredMaintenanceWindow")
def preferred_maintenance_window(self) -> pulumi.Output[Optional[str]]:
"""
The weekly time range (in UTC) during which automated cluster maintenance can occur.
"""
return pulumi.get(self, "preferred_maintenance_window")
@property
@pulumi.getter(name="publiclyAccessible")
def publicly_accessible(self) -> pulumi.Output[Optional[bool]]:
"""
If true, the cluster can be accessed from a public network.
"""
return pulumi.get(self, "publicly_accessible")
@property
@pulumi.getter(name="resourceAction")
def resource_action(self) -> pulumi.Output[Optional[str]]:
"""
The Redshift operation to be performed. Resource Action supports pause-cluster, resume-cluster APIs
"""
return pulumi.get(self, "resource_action")
@property
@pulumi.getter(name="revisionTarget")
def revision_target(self) -> pulumi.Output[Optional[str]]:
"""
The identifier of the database revision. You can retrieve this value from the response to the DescribeClusterDbRevisions request.
"""
return pulumi.get(self, "revision_target")
@property
@pulumi.getter(name="rotateEncryptionKey")
def rotate_encryption_key(self) -> pulumi.Output[Optional[bool]]:
"""
A boolean indicating if we want to rotate Encryption Keys.
"""
return pulumi.get(self, "rotate_encryption_key")
@property
@pulumi.getter(name="snapshotClusterIdentifier")
def snapshot_cluster_identifier(self) -> pulumi.Output[Optional[str]]:
"""
The name of the cluster the source snapshot was created from. This parameter is required if your IAM user has a policy containing a snapshot resource element that specifies anything other than * for the cluster name.
"""
return pulumi.get(self, "snapshot_cluster_identifier")
@property
@pulumi.getter(name="snapshotCopyGrantName")
def snapshot_copy_grant_name(self) -> pulumi.Output[Optional[str]]:
"""
The name of the snapshot copy grant to use when snapshots of an AWS KMS-encrypted cluster are copied to the destination region.
"""
return pulumi.get(self, "snapshot_copy_grant_name")
@property
@pulumi.getter(name="snapshotCopyManual")
def snapshot_copy_manual(self) -> pulumi.Output[Optional[bool]]:
"""
Indicates whether to apply the snapshot retention period to newly copied manual snapshots instead of automated snapshots.
"""
return pulumi.get(self, "snapshot_copy_manual")
@property
@pulumi.getter(name="snapshotCopyRetentionPeriod")
def snapshot_copy_retention_period(self) -> pulumi.Output[Optional[int]]:
"""
The number of days to retain automated snapshots in the destination region after they are copied from the source region.
Default is 7.
Constraints: Must be at least 1 and no more than 35.
"""
return pulumi.get(self, "snapshot_copy_retention_period")
@property
@pulumi.getter(name="snapshotIdentifier")
def snapshot_identifier(self) -> pulumi.Output[Optional[str]]:
"""
The name of the snapshot from which to create the new cluster. This parameter isn't case sensitive.
"""
return pulumi.get(self, "snapshot_identifier")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Sequence['outputs.ClusterTag']]]:
"""
The list of tags for the cluster parameter group.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="vpcSecurityGroupIds")
def vpc_security_group_ids(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of Virtual Private Cloud (VPC) security groups to be associated with the cluster.
"""
return pulumi.get(self, "vpc_security_group_ids")
| 58.555411 | 452 | 0.701249 | 11,198 | 89,824 | 5.394356 | 0.043133 | 0.065739 | 0.070771 | 0.043704 | 0.930768 | 0.885607 | 0.834354 | 0.807105 | 0.783929 | 0.717975 | 0 | 0.002001 | 0.215533 | 89,824 | 1,533 | 453 | 58.593607 | 0.855259 | 0.373686 | 0 | 0.473684 | 1 | 0 | 0.153481 | 0.087482 | 0 | 0 | 0 | 0 | 0 | 1 | 0.162191 | false | 0.019334 | 0.007519 | 0.006445 | 0.277121 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b7151ddfe5d8d4991ae8b5298c9fc9e48e68bd11 | 3,148 | py | Python | bogons_test.py | moogzy/python-bogons | 5bca7ee46d5cc05803c2d0bc545b043accc308c8 | [
"Apache-2.0"
] | 2 | 2021-02-11T14:15:57.000Z | 2021-02-11T14:16:08.000Z | bogons_test.py | moogzy/python-bogons | 5bca7ee46d5cc05803c2d0bc545b043accc308c8 | [
"Apache-2.0"
] | 3 | 2021-02-07T03:19:36.000Z | 2022-02-28T15:37:43.000Z | bogons_test.py | moogzy/python-bogons | 5bca7ee46d5cc05803c2d0bc545b043accc308c8 | [
"Apache-2.0"
] | 3 | 2021-02-08T00:23:23.000Z | 2022-02-28T01:12:58.000Z | import bogons
import unittest
class valid_public_asnTest(unittest.TestCase):
def test_valid_public_asn(self):
self.assertEqual(bogons.valid_public_asn(-1), False)
self.assertEqual(bogons.valid_public_asn(0), False)
self.assertEqual(bogons.valid_public_asn(1), True)
self.assertEqual(bogons.valid_public_asn(23456), False)
self.assertEqual(bogons.valid_public_asn(64496), False)
self.assertEqual(bogons.valid_public_asn(64511), False)
self.assertEqual(bogons.valid_public_asn(64512), False)
self.assertEqual(bogons.valid_public_asn(65534), False)
self.assertEqual(bogons.valid_public_asn(65535), False)
self.assertEqual(bogons.valid_public_asn(65551), False)
self.assertEqual(bogons.valid_public_asn(131071), False)
self.assertEqual(bogons.valid_public_asn(4199999999), True)
self.assertEqual(bogons.valid_public_asn(4200000000), False)
self.assertEqual(bogons.valid_public_asn(4294967295), False)
self.assertEqual(bogons.valid_public_asn(18446744073709551615), False)
self.assertEqual(bogons.valid_public_asn("word"), False)
def test_is_public(self):
self.assertEqual(bogons.is_public_ip("0.0.0.1"), False)
self.assertEqual(bogons.is_public_ip("1.1.1.1"), True)
self.assertEqual(bogons.is_public_ip("10.1.1.1"), False)
self.assertEqual(bogons.is_public_ip("11.1.1.1"), True)
self.assertEqual(bogons.is_public_ip("11.1.1.1.1"), False)
self.assertEqual(bogons.is_public_ip("172.16.1.1"), False)
self.assertEqual(bogons.is_public_ip("100.64.0.1"), False)
self.assertEqual(bogons.is_public_ip("100.127.0.0"), False)
self.assertEqual(bogons.is_public_ip("169.254.10.1"), False)
self.assertEqual(bogons.is_public_ip("192.0.0.1"), False)
self.assertEqual(bogons.is_public_ip("192.0.1.1"), True)
self.assertEqual(bogons.is_public_ip("192.0.2.1"), False)
self.assertEqual(bogons.is_public_ip("192.168.1.1"), False)
self.assertEqual(bogons.is_public_ip("193.168.1.1"), True)
self.assertEqual(bogons.is_public_ip("198.18.100.2"), False)
self.assertEqual(bogons.is_public_ip("198.51.100.2"), False)
self.assertEqual(bogons.is_public_ip("203.0.113.2"), False)
self.assertEqual(bogons.is_public_ip("224.168.1.1"), False)
self.assertEqual(bogons.is_public_ip("🎈"), False)
self.assertEqual(bogons.is_public_ip("2001:0:1:2::3"), False)
self.assertEqual(bogons.is_public_ip("2001:1:2::3"), False)
self.assertEqual(bogons.is_public_ip("2001:db8:1:2::3"), False)
self.assertEqual(bogons.is_public_ip("2002:b8:1:2::3"), False)
self.assertEqual(bogons.is_public_ip("2600::"), True)
self.assertEqual(bogons.is_public_ip("3ffe::"), False)
self.assertEqual(bogons.is_public_ip("3fff::"), True)
self.assertEqual(bogons.is_public_ip("3fff:::"), False)
self.assertEqual(bogons.is_public_ip("4600::"), False)
self.assertEqual(bogons.is_public_ip(""), False)
if __name__ == '__main__':
unittest.main()
| 55.22807 | 78 | 0.694727 | 455 | 3,148 | 4.578022 | 0.140659 | 0.324052 | 0.453673 | 0.43687 | 0.854537 | 0.853577 | 0.838694 | 0.424388 | 0.385502 | 0.253 | 0 | 0.097113 | 0.152795 | 3,148 | 56 | 79 | 56.214286 | 0.68354 | 0 | 0 | 0 | 0 | 0 | 0.087357 | 0 | 0 | 0 | 0 | 0 | 0.865385 | 1 | 0.038462 | false | 0 | 0.038462 | 0 | 0.096154 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b7250df546db51842933e48f742b9ad1361fe4df | 9,819 | py | Python | tests/components/wilight/test_light.py | tbarbette/core | 8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c | [
"Apache-2.0"
] | 6 | 2017-08-02T19:26:39.000Z | 2020-03-14T22:47:41.000Z | tests/components/wilight/test_light.py | tbarbette/core | 8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c | [
"Apache-2.0"
] | 58 | 2020-08-03T07:33:02.000Z | 2022-03-31T06:02:05.000Z | tests/components/wilight/test_light.py | tbarbette/core | 8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c | [
"Apache-2.0"
] | 14 | 2018-08-19T16:28:26.000Z | 2021-09-02T18:26:53.000Z | """Tests for the WiLight integration."""
from unittest.mock import patch
import pytest
import pywilight
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_HS_COLOR,
DOMAIN as LIGHT_DOMAIN,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
)
from homeassistant.helpers.typing import HomeAssistantType
from tests.components.wilight import (
HOST,
UPNP_MAC_ADDRESS,
UPNP_MODEL_NAME_COLOR,
UPNP_MODEL_NAME_DIMMER,
UPNP_MODEL_NAME_LIGHT_FAN,
UPNP_MODEL_NAME_P_B,
UPNP_MODEL_NUMBER,
UPNP_SERIAL,
WILIGHT_ID,
setup_integration,
)
@pytest.fixture(name="dummy_get_components_from_model_light")
def mock_dummy_get_components_from_model_light():
"""Mock a components list with light."""
components = ["light"]
with patch(
"pywilight.get_components_from_model",
return_value=components,
):
yield components
@pytest.fixture(name="dummy_device_from_host_light_fan")
def mock_dummy_device_from_host_light_fan():
"""Mock a valid api_devce."""
device = pywilight.wilight_from_discovery(
f"http://{HOST}:45995/wilight.xml",
UPNP_MAC_ADDRESS,
UPNP_MODEL_NAME_LIGHT_FAN,
UPNP_SERIAL,
UPNP_MODEL_NUMBER,
)
device.set_dummy(True)
with patch(
"pywilight.device_from_host",
return_value=device,
):
yield device
@pytest.fixture(name="dummy_device_from_host_pb")
def mock_dummy_device_from_host_pb():
"""Mock a valid api_devce."""
device = pywilight.wilight_from_discovery(
f"http://{HOST}:45995/wilight.xml",
UPNP_MAC_ADDRESS,
UPNP_MODEL_NAME_P_B,
UPNP_SERIAL,
UPNP_MODEL_NUMBER,
)
device.set_dummy(True)
with patch(
"pywilight.device_from_host",
return_value=device,
):
yield device
@pytest.fixture(name="dummy_device_from_host_dimmer")
def mock_dummy_device_from_host_dimmer():
"""Mock a valid api_devce."""
device = pywilight.wilight_from_discovery(
f"http://{HOST}:45995/wilight.xml",
UPNP_MAC_ADDRESS,
UPNP_MODEL_NAME_DIMMER,
UPNP_SERIAL,
UPNP_MODEL_NUMBER,
)
device.set_dummy(True)
with patch(
"pywilight.device_from_host",
return_value=device,
):
yield device
@pytest.fixture(name="dummy_device_from_host_color")
def mock_dummy_device_from_host_color():
"""Mock a valid api_devce."""
device = pywilight.wilight_from_discovery(
f"http://{HOST}:45995/wilight.xml",
UPNP_MAC_ADDRESS,
UPNP_MODEL_NAME_COLOR,
UPNP_SERIAL,
UPNP_MODEL_NUMBER,
)
device.set_dummy(True)
with patch(
"pywilight.device_from_host",
return_value=device,
):
yield device
async def test_loading_light(
hass: HomeAssistantType,
dummy_device_from_host_light_fan,
dummy_get_components_from_model_light,
) -> None:
"""Test the WiLight configuration entry loading."""
# Using light_fan and removind fan from get_components_from_model
# to test light.py line 28
entry = await setup_integration(hass)
assert entry
assert entry.unique_id == WILIGHT_ID
entity_registry = await hass.helpers.entity_registry.async_get_registry()
# First segment of the strip
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_OFF
entry = entity_registry.async_get("light.wl000000000099_1")
assert entry
assert entry.unique_id == "WL000000000099_0"
async def test_on_off_light_state(
hass: HomeAssistantType, dummy_device_from_host_pb
) -> None:
"""Test the change of state of the light switches."""
await setup_integration(hass)
# Turn on
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_ON
# Turn off
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_OFF
async def test_dimmer_light_state(
hass: HomeAssistantType, dummy_device_from_host_dimmer
) -> None:
"""Test the change of state of the light switches."""
await setup_integration(hass)
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_BRIGHTNESS: 42, ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_ON
assert state.attributes.get(ATTR_BRIGHTNESS) == 42
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_BRIGHTNESS: 0, ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_OFF
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_BRIGHTNESS: 100, ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_ON
assert state.attributes.get(ATTR_BRIGHTNESS) == 100
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_OFF
# Turn on
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_ON
async def test_color_light_state(
hass: HomeAssistantType, dummy_device_from_host_color
) -> None:
"""Test the change of state of the light switches."""
await setup_integration(hass)
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{
ATTR_BRIGHTNESS: 42,
ATTR_HS_COLOR: [0, 100],
ATTR_ENTITY_ID: "light.wl000000000099_1",
},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_ON
assert state.attributes.get(ATTR_BRIGHTNESS) == 42
state_color = [
round(state.attributes.get(ATTR_HS_COLOR)[0]),
round(state.attributes.get(ATTR_HS_COLOR)[1]),
]
assert state_color == [0, 100]
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_BRIGHTNESS: 0, ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_OFF
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{
ATTR_BRIGHTNESS: 100,
ATTR_HS_COLOR: [270, 50],
ATTR_ENTITY_ID: "light.wl000000000099_1",
},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_ON
assert state.attributes.get(ATTR_BRIGHTNESS) == 100
state_color = [
round(state.attributes.get(ATTR_HS_COLOR)[0]),
round(state.attributes.get(ATTR_HS_COLOR)[1]),
]
assert state_color == [270, 50]
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_OFF
# Turn on
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_ON
# Hue = 0, Saturation = 100
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_HS_COLOR: [0, 100], ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_ON
state_color = [
round(state.attributes.get(ATTR_HS_COLOR)[0]),
round(state.attributes.get(ATTR_HS_COLOR)[1]),
]
assert state_color == [0, 100]
# Brightness = 60
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_BRIGHTNESS: 60, ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_ON
assert state.attributes.get(ATTR_BRIGHTNESS) == 60
| 26.114362 | 77 | 0.67237 | 1,229 | 9,819 | 5.040683 | 0.089504 | 0.067474 | 0.096852 | 0.059403 | 0.850363 | 0.836965 | 0.776594 | 0.770783 | 0.739952 | 0.739952 | 0 | 0.065009 | 0.233934 | 9,819 | 375 | 78 | 26.184 | 0.758575 | 0.036358 | 0 | 0.714777 | 0 | 0 | 0.118854 | 0.103115 | 0 | 0 | 0 | 0 | 0.14433 | 1 | 0.017182 | false | 0 | 0.024055 | 0 | 0.041237 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
3f8470df09ef75160def2f830c1c18d325f2f92e | 153 | py | Python | src/main/python/floatingpoints/floating_points_model_helper.py | mkisser-tgm/floating_points | ef6c93c1d1fa5bbb38615c0d2fc7632b31f06e8e | [
"MIT"
] | null | null | null | src/main/python/floatingpoints/floating_points_model_helper.py | mkisser-tgm/floating_points | ef6c93c1d1fa5bbb38615c0d2fc7632b31f06e8e | [
"MIT"
] | null | null | null | src/main/python/floatingpoints/floating_points_model_helper.py | mkisser-tgm/floating_points | ef6c93c1d1fa5bbb38615c0d2fc7632b31f06e8e | [
"MIT"
] | null | null | null | from PyQt5.QtWidgets import QMessageBox
def openInfoPopup(window, title, text):
return QMessageBox.information(window, title, text, QMessageBox.Ok)
| 30.6 | 71 | 0.797386 | 18 | 153 | 6.777778 | 0.722222 | 0.180328 | 0.245902 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007407 | 0.117647 | 153 | 4 | 72 | 38.25 | 0.896296 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
3fb8f3dc25d72802b9391fe97b5b206be644b936 | 1,859 | py | Python | alpyro_msgs/visualization_msgs/markerarray.py | rho2/alpyro_msgs | b5a680976c40c83df70d61bb2db1de32a1cde8d3 | [
"MIT"
] | 1 | 2020-12-13T13:07:10.000Z | 2020-12-13T13:07:10.000Z | alpyro_msgs/visualization_msgs/markerarray.py | rho2/alpyro_msgs | b5a680976c40c83df70d61bb2db1de32a1cde8d3 | [
"MIT"
] | null | null | null | alpyro_msgs/visualization_msgs/markerarray.py | rho2/alpyro_msgs | b5a680976c40c83df70d61bb2db1de32a1cde8d3 | [
"MIT"
] | null | null | null | from typing import List
from typing_extensions import Annotated
from typing import Final
from alpyro_msgs import RosMessage
from alpyro_msgs.visualization_msgs.marker import Marker
class MarkerArray(RosMessage):
__msg_typ__ = "visualization_msgs/MarkerArray"
__msg_def__ = "dmlzdWFsaXphdGlvbl9tc2dzL01hcmtlcltdIG1hcmtlcnMKICB1aW50OCBBUlJPVz0wCiAgdWludDggQ1VCRT0xCiAgdWludDggU1BIRVJFPTIKICB1aW50OCBDWUxJTkRFUj0zCiAgdWludDggTElORV9TVFJJUD00CiAgdWludDggTElORV9MSVNUPTUKICB1aW50OCBDVUJFX0xJU1Q9NgogIHVpbnQ4IFNQSEVSRV9MSVNUPTcKICB1aW50OCBQT0lOVFM9OAogIHVpbnQ4IFRFWFRfVklFV19GQUNJTkc9OQogIHVpbnQ4IE1FU0hfUkVTT1VSQ0U9MTAKICB1aW50OCBUUklBTkdMRV9MSVNUPTExCiAgdWludDggQUREPTAKICB1aW50OCBNT0RJRlk9MAogIHVpbnQ4IERFTEVURT0yCiAgdWludDggREVMRVRFQUxMPTMKICBzdGRfbXNncy9IZWFkZXIgaGVhZGVyCiAgICB1aW50MzIgc2VxCiAgICB0aW1lIHN0YW1wCiAgICBzdHJpbmcgZnJhbWVfaWQKICBzdHJpbmcgbnMKICBpbnQzMiBpZAogIGludDMyIHR5cGUKICBpbnQzMiBhY3Rpb24KICBnZW9tZXRyeV9tc2dzL1Bvc2UgcG9zZQogICAgZ2VvbWV0cnlfbXNncy9Qb2ludCBwb3NpdGlvbgogICAgICBmbG9hdDY0IHgKICAgICAgZmxvYXQ2NCB5CiAgICAgIGZsb2F0NjQgegogICAgZ2VvbWV0cnlfbXNncy9RdWF0ZXJuaW9uIG9yaWVudGF0aW9uCiAgICAgIGZsb2F0NjQgeAogICAgICBmbG9hdDY0IHkKICAgICAgZmxvYXQ2NCB6CiAgICAgIGZsb2F0NjQgdwogIGdlb21ldHJ5X21zZ3MvVmVjdG9yMyBzY2FsZQogICAgZmxvYXQ2NCB4CiAgICBmbG9hdDY0IHkKICAgIGZsb2F0NjQgegogIHN0ZF9tc2dzL0NvbG9yUkdCQSBjb2xvcgogICAgZmxvYXQzMiByCiAgICBmbG9hdDMyIGcKICAgIGZsb2F0MzIgYgogICAgZmxvYXQzMiBhCiAgZHVyYXRpb24gbGlmZXRpbWUKICBib29sIGZyYW1lX2xvY2tlZAogIGdlb21ldHJ5X21zZ3MvUG9pbnRbXSBwb2ludHMKICAgIGZsb2F0NjQgeAogICAgZmxvYXQ2NCB5CiAgICBmbG9hdDY0IHoKICBzdGRfbXNncy9Db2xvclJHQkFbXSBjb2xvcnMKICAgIGZsb2F0MzIgcgogICAgZmxvYXQzMiBnCiAgICBmbG9hdDMyIGIKICAgIGZsb2F0MzIgYQogIHN0cmluZyB0ZXh0CiAgc3RyaW5nIG1lc2hfcmVzb3VyY2UKICBib29sIG1lc2hfdXNlX2VtYmVkZGVkX21hdGVyaWFscwoK"
__md5_sum__ = "d155b9ce5188fbaf89745847fd5882d7"
markers: Annotated[List[Marker], 0, 0]
| 132.785714 | 1,502 | 0.963421 | 46 | 1,859 | 38.5 | 0.5 | 0.01694 | 0.018069 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.105902 | 0.024744 | 1,859 | 13 | 1,503 | 143 | 0.870932 | 0 | 0 | 0 | 0 | 0 | 0.83163 | 0.83163 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 1 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
3fc2c658de11a01d94e7e88ee233dd6cda7622f5 | 20,188 | py | Python | venv/lib/python3.9/site-packages/libcst/matchers/tests/test_visitors.py | qarik-hanrattyjen/apache-airflow-backport-providers-google-2021.3.3 | 630dcef73e6a258b6e9a52f934e2dd912ce741f8 | [
"Apache-2.0"
] | 3 | 2021-03-29T19:21:08.000Z | 2021-12-31T09:30:11.000Z | venv/lib/python3.9/site-packages/libcst/matchers/tests/test_visitors.py | qarik-hanrattyjen/apache-airflow-backport-providers-google-2021.3.3 | 630dcef73e6a258b6e9a52f934e2dd912ce741f8 | [
"Apache-2.0"
] | 1 | 2021-08-20T19:03:09.000Z | 2021-08-20T19:03:09.000Z | venv/lib/python3.9/site-packages/libcst/matchers/tests/test_visitors.py | qarik-hanrattyjen/apache-airflow-backport-providers-google-2021.3.3 | 630dcef73e6a258b6e9a52f934e2dd912ce741f8 | [
"Apache-2.0"
] | 3 | 2020-08-04T02:48:32.000Z | 2020-08-17T01:20:09.000Z | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import pickle
from typing import Union
import libcst as cst
import libcst.matchers as m
from libcst.matchers import (
MatchDecoratorMismatch,
MatcherDecoratableTransformer,
MatcherDecoratableVisitor,
leave,
visit,
)
from libcst.testing.utils import UnitTest
class MatchersVisitLeaveDecoratorTypingTest(UnitTest):
def test_valid_collector_simple(self) -> None:
class TestVisitor(MatcherDecoratableVisitor):
@visit(m.SimpleString())
def _string_visit(self, node: cst.SimpleString) -> None:
pass
@leave(m.SimpleString())
def _string_leave(self, original_node: cst.SimpleString) -> None:
pass
# Instantiating this class should not raise any errors
TestVisitor()
def test_valid_transformer_simple(self) -> None:
class TestVisitor(MatcherDecoratableTransformer):
@visit(m.SimpleString())
def _string_visit(self, node: cst.SimpleString) -> None:
pass
@leave(m.SimpleString())
def _string_leave(
self, original_node: cst.SimpleString, updated_node: cst.SimpleString
) -> cst.SimpleString:
return updated_node
# Instantiating this class should not raise any errors
TestVisitor()
def test_valid_transformer_base_class(self) -> None:
class TestVisitor(MatcherDecoratableTransformer):
@leave(m.SimpleString())
def _string_leave(
self, original_node: cst.SimpleString, updated_node: cst.SimpleString
) -> cst.BaseExpression:
return updated_node
# Instantiating this class should not raise any errors
TestVisitor()
def test_valid_collector_visit_union(self) -> None:
class TestVisitor(MatcherDecoratableVisitor):
@visit(m.SimpleString() | m.Name())
def _string_visit(self, node: Union[cst.SimpleString, cst.Name]) -> None:
pass
# Instantiating this class should not raise any errors
TestVisitor()
def test_valid_transformer_visit_union(self) -> None:
class TestVisitor(MatcherDecoratableTransformer):
@visit(m.SimpleString() | m.Name())
def _string_visit(self, node: Union[cst.SimpleString, cst.Name]) -> None:
pass
# Instantiating this class should not raise any errors
TestVisitor()
def test_valid_collector_visit_superclass(self) -> None:
class TestVisitor(MatcherDecoratableVisitor):
@visit(m.SimpleString() | m.Name())
def _string_visit(self, node: cst.BaseExpression) -> None:
pass
# Instantiating this class should not raise any errors
TestVisitor()
def test_valid_transformer_visit_superclass(self) -> None:
class TestVisitor(MatcherDecoratableTransformer):
@visit(m.SimpleString() | m.Name())
def _string_visit(self, node: cst.BaseExpression) -> None:
pass
# Instantiating this class should not raise any errors
TestVisitor()
def test_valid_collector_leave_union(self) -> None:
class TestVisitor(MatcherDecoratableVisitor):
@leave(m.SimpleString() | m.Name())
def _string_leave(self, node: Union[cst.SimpleString, cst.Name]) -> None:
pass
# Instantiating this class should not raise any errors
TestVisitor()
def test_valid_transformer_leave_union(self) -> None:
class TestVisitor(MatcherDecoratableTransformer):
@leave(m.SimpleString() | m.Name())
def _string_leave(
self,
original_node: Union[cst.SimpleString, cst.Name],
updated_node: Union[cst.SimpleString, cst.Name],
) -> Union[cst.SimpleString, cst.Name]:
return updated_node
# Instantiating this class should not raise any errors
TestVisitor()
def test_valid_collector_leave_superclass(self) -> None:
class TestVisitor(MatcherDecoratableVisitor):
@leave(m.SimpleString() | m.Name())
def _string_leave(self, node: cst.BaseExpression) -> None:
pass
# Instantiating this class should not raise any errors
TestVisitor()
def test_valid_transformer_leave_superclass(self) -> None:
class TestVisitor(MatcherDecoratableTransformer):
@leave(m.SimpleString() | m.Name())
def _string_leave(
self,
original_node: cst.BaseExpression,
updated_node: cst.BaseExpression,
) -> cst.BaseExpression:
return updated_node
# Instantiating this class should not raise any errors
TestVisitor()
def test_valid_transformer_leave_return_maybe(self) -> None:
class TestVisitor(MatcherDecoratableTransformer):
@leave(m.AssignEqual())
def _assign_equal_leave(
self, original_node: cst.AssignEqual, updated_node: cst.AssignEqual
) -> Union[cst.AssignEqual, cst.MaybeSentinel]:
return updated_node
# Instantiating this class should not raise any errors
TestVisitor()
def test_valid_transformer_leave_return_remove(self) -> None:
class TestVisitor(MatcherDecoratableTransformer):
@leave(m.AssignTarget())
def _string_visit(
self, original_node: cst.AssignTarget, updated_node: cst.AssignTarget
) -> Union[cst.AssignTarget, cst.RemovalSentinel]:
return updated_node
# Instantiating this class should not raise any errors
TestVisitor()
def test_invalid_collector_visit_return(self) -> None:
class TestVisitor(MatcherDecoratableVisitor):
@visit(m.SimpleString())
def _string_visit(self, node: cst.SimpleString) -> bool:
return False
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@visit should only decorate functions that do not return",
):
TestVisitor()
def test_invalid_transformer_visit_return(self) -> None:
class TestVisitor(MatcherDecoratableTransformer):
@visit(m.SimpleString())
def _string_visit(self, node: cst.SimpleString) -> bool:
return False
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@visit should only decorate functions that do not return",
):
TestVisitor()
def test_invalid_transformer_visit_num_params(self) -> None:
class TestVisitor(MatcherDecoratableTransformer):
@visit(m.SimpleString())
def _string_visit(
self, original_node: cst.SimpleString, updated_node: cst.SimpleString
) -> None:
pass
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@visit should decorate functions which take 1 parameter",
):
TestVisitor()
def test_invalid_collector_visit_num_params(self) -> None:
class TestVisitor(MatcherDecoratableVisitor):
@visit(m.SimpleString())
def _string_visit(
self, original_node: cst.SimpleString, updated_node: cst.SimpleString
) -> None:
pass
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@visit should decorate functions which take 1 parameter",
):
TestVisitor()
def test_invalid_transformer_leave_num_params(self) -> None:
class TestVisitor(MatcherDecoratableTransformer):
@leave(m.SimpleString())
def _string_leave(
self, original_node: cst.SimpleString
) -> cst.SimpleString:
return original_node
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@leave should decorate functions which take 2 parameters",
):
TestVisitor()
def test_invalid_collector_leave_num_params(self) -> None:
class TestVisitor(MatcherDecoratableVisitor):
@leave(m.SimpleString())
def _string_leave(
self, original_node: cst.SimpleString, updated_node: cst.SimpleString
) -> None:
pass
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@leave should decorate functions which take 1 parameter",
):
TestVisitor()
def test_invalid_collector_leave_return(self) -> None:
class TestVisitor(MatcherDecoratableVisitor):
@leave(m.SimpleString())
def _string_leave(self, original_node: cst.SimpleString) -> bool:
return False
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@leave should only decorate functions that do not return",
):
TestVisitor()
def test_invalid_transformer_leave_return_invalid_superclass(self) -> None:
class TestVisitor(MatcherDecoratableTransformer):
@leave(m.SimpleString())
def _string_visit(
self, original_node: cst.SimpleString, updated_node: cst.SimpleString
) -> cst.BaseParenthesizableWhitespace:
return cst.SimpleWhitespace("")
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@leave decorated function cannot return the type BaseParenthesizableWhitespace",
):
TestVisitor()
def test_invalid_transformer_leave_return_wrong_type(self) -> None:
class TestVisitor(MatcherDecoratableTransformer):
@leave(m.SimpleString())
def _string_visit(
self, original_node: cst.SimpleString, updated_node: cst.SimpleString
) -> cst.Pass:
return cst.Pass()
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@leave decorated function cannot return the type Pass",
):
TestVisitor()
def test_invalid_transformer_leave_return_invalid_maybe(self) -> None:
class TestVisitor(MatcherDecoratableTransformer):
@leave(m.SimpleString())
def _string_visit(
self, original_node: cst.SimpleString, updated_node: cst.SimpleString
) -> Union[cst.SimpleString, cst.MaybeSentinel]:
return updated_node
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@leave decorated function cannot return the type MaybeSentinel",
):
TestVisitor()
def test_invalid_transformer_leave_return_invalid_remove(self) -> None:
class TestVisitor(MatcherDecoratableTransformer):
@leave(m.SimpleString())
def _string_visit(
self, original_node: cst.SimpleString, updated_node: cst.SimpleString
) -> Union[cst.SimpleString, cst.RemovalSentinel]:
return updated_node
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@leave decorated function cannot return the type RemovalSentinel",
):
TestVisitor()
def test_invalid_transformer_leave_return_invalid_union(self) -> None:
class TestVisitor(MatcherDecoratableTransformer):
@leave(m.SimpleString() | m.Name())
def _string_leave(
self,
original_node: Union[cst.SimpleString, cst.Name],
updated_node: Union[cst.SimpleString, cst.Name],
) -> Union[cst.SimpleString, cst.Pass]:
return cst.SimpleString('""')
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@leave decorated function cannot return the type Pass",
):
TestVisitor()
def test_invalid_collector_visit_union(self) -> None:
class TestVisitor(MatcherDecoratableVisitor):
@visit(m.SimpleString() | m.Name())
def _string_visit(self, node: cst.SimpleString) -> None:
pass
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@visit can be called with Name but the decorated function parameter annotations do not include this type",
):
TestVisitor()
def test_invalid_transformer_visit_union(self) -> None:
class TestVisitor(MatcherDecoratableTransformer):
@visit(m.SimpleString() | m.Name())
def _string_visit(self, node: cst.SimpleString) -> None:
pass
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@visit can be called with Name but the decorated function parameter annotations do not include this type",
):
TestVisitor()
def test_invalid_collector_visit_superclass(self) -> None:
class TestVisitor(MatcherDecoratableVisitor):
@visit(m.SimpleString() | m.Pass())
def _string_visit(self, node: cst.BaseExpression) -> None:
pass
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@visit can be called with Pass but the decorated function parameter annotations do not include this type",
):
TestVisitor()
def test_invalid_transformer_visit_superclass(self) -> None:
class TestVisitor(MatcherDecoratableTransformer):
@visit(m.SimpleString() | m.Pass())
def _string_visit(self, node: cst.BaseExpression) -> None:
pass
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@visit can be called with Pass but the decorated function parameter annotations do not include this type",
):
TestVisitor()
def test_invalid_collector_leave_union(self) -> None:
class TestVisitor(MatcherDecoratableVisitor):
@leave(m.SimpleString() | m.Name())
def _string_leave(self, node: cst.SimpleString) -> None:
pass
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@leave can be called with Name but the decorated function parameter annotations do not include this type",
):
TestVisitor()
def test_invalid_transformer_leave_union(self) -> None:
class TestVisitor(MatcherDecoratableTransformer):
@leave(m.SimpleString() | m.Name())
def _string_leave(
self, original_node: cst.SimpleString, updated_node: cst.SimpleString
) -> cst.BaseExpression:
return updated_node
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@leave can be called with Name but the decorated function parameter annotations do not include this type",
):
TestVisitor()
def test_invalid_collector_leave_superclass(self) -> None:
class TestVisitor(MatcherDecoratableVisitor):
@leave(m.SimpleString() | m.Pass())
def _string_leave(self, node: cst.BaseExpression) -> None:
pass
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@leave can be called with Pass but the decorated function parameter annotations do not include this type",
):
TestVisitor()
def test_invalid_transformer_leave_superclass(self) -> None:
class TestVisitor(MatcherDecoratableTransformer):
@leave(m.SimpleString() | m.Pass())
def _string_leave(
self,
original_node: cst.BaseExpression,
updated_node: cst.BaseExpression,
) -> cst.BaseExpression:
return updated_node
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@leave can be called with Pass but the decorated function parameter annotations do not include this type",
):
TestVisitor()
def test_bad_visit_collecter_decorator(self) -> None:
class TestVisitor(MatcherDecoratableVisitor):
@visit(m.SimpleString())
def visit_SimpleString(self, node: cst.SimpleString) -> None:
pass
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@visit should not decorate functions that are concrete visit or leave methods",
):
TestVisitor()
def test_bad_leave_collecter_decorator(self) -> None:
class TestVisitor(MatcherDecoratableVisitor):
@leave(m.SimpleString())
def leave_SimpleString(
self, original_node: cst.SimpleString, updated_node: cst.SimpleString
) -> None:
pass
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@leave should not decorate functions that are concrete visit or leave methods",
):
TestVisitor()
def test_bad_visit_transform_decorator(self) -> None:
class TestVisitor(MatcherDecoratableTransformer):
@visit(m.SimpleString())
def visit_SimpleString(self, node: cst.SimpleString) -> None:
pass
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@visit should not decorate functions that are concrete visit or leave methods",
):
TestVisitor()
def test_bad_leave_transform_decorator(self) -> None:
class TestVisitor(MatcherDecoratableTransformer):
@leave(m.SimpleString())
def leave_SimpleString(
self, original_node: cst.SimpleString, updated_node: cst.SimpleString
) -> cst.SimpleString:
return updated_node
# Instantiating this class should raise a runtime error
with self.assertRaisesRegex(
MatchDecoratorMismatch,
"@leave should not decorate functions that are concrete visit or leave methods",
):
TestVisitor()
def test_pickleable_exception(self) -> None:
original = MatchDecoratorMismatch("func", "message")
serialized = pickle.dumps(original)
unserialized = pickle.loads(serialized)
self.assertEqual(original.message, unserialized.message)
self.assertEqual(original.func, unserialized.func)
| 39.506849 | 119 | 0.633792 | 1,908 | 20,188 | 6.553983 | 0.067086 | 0.061176 | 0.038465 | 0.071012 | 0.923551 | 0.919312 | 0.908437 | 0.892203 | 0.868613 | 0.845902 | 0 | 0.000281 | 0.295175 | 20,188 | 510 | 120 | 39.584314 | 0.878558 | 0.106648 | 0 | 0.786632 | 0 | 0 | 0.102929 | 0.001612 | 0 | 0 | 0 | 0 | 0.066838 | 1 | 0.197943 | false | 0.087404 | 0.015424 | 0.046272 | 0.357326 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
3fe2b21c2d3013e09c126a10a7b9f4a76f580432 | 120,398 | py | Python | QuantumLogo.py | ayushidubal/random-quantum-image | 261ec81cfd82e1b58b2b740de9e99a025f1bac17 | [
"MIT"
] | 8 | 2020-11-19T03:45:06.000Z | 2022-01-04T20:54:51.000Z | QuantumLogo.py | ayushidubal/random-quantum-image | 261ec81cfd82e1b58b2b740de9e99a025f1bac17 | [
"MIT"
] | null | null | null | QuantumLogo.py | ayushidubal/random-quantum-image | 261ec81cfd82e1b58b2b740de9e99a025f1bac17 | [
"MIT"
] | 1 | 2022-01-19T08:48:28.000Z | 2022-01-19T08:48:28.000Z | import pygame
my_col = (50,25,25)
pygame.init()
width = 200
board = pygame.display.set_mode((width, width))
board.fill((255,255,255))
clrs = [(255, 0, 0), (0, 255, 0), (0, 0, 255)]
generated_cols = [2, 2, 0, 1, 2, 1, 0, 0, 2, 2, 1, 1, 0, 0, 2, 1, 1, 2, 0, 1, 1, 1, 1, 0, 2, 0, 2, 1, 2, 2, 0, 0, 0, 2, 0, 2, 2, 1, 2, 0, 1, 1, 0, 2, 2, 2, 2, 0, 0, 1, 1, 2, 2, 1, 1, 0, 2, 1, 2, 2, 1, 1, 2, 2, 0, 2, 1, 2, 0, 1, 1, 2, 2, 1, 0, 2, 1, 2, 0, 2, 2, 2, 1, 0, 2, 1, 2, 2, 0, 1, 0, 2, 2, 0, 2, 2, 0, 0, 1, 1, 2, 1, 0, 2, 2, 2, 1, 1, 1, 1, 1, 0, 2, 0, 1, 0, 1, 0, 2, 1, 0, 0, 2, 1, 1, 2, 2, 0, 1, 2, 2, 1, 1, 2, 2, 1, 0, 2, 0, 0, 1, 0, 0, 0, 2, 2, 2, 0, 1, 2, 0, 2, 1, 2, 2, 2, 2, 2, 1, 0, 2, 2, 2, 1, 1, 1, 2, 0, 2, 2, 1, 2, 1, 2, 2, 0, 1, 0, 0, 2, 2, 0, 2, 0, 2, 1, 0, 2, 1, 1, 1, 1, 2, 2, 0, 1, 1, 1, 0, 1, 2, 1, 2, 2, 1, 1, 1, 2, 2, 1, 2, 0, 0, 1, 2, 0, 1, 1, 2, 1, 0, 2, 2, 1, 0, 1, 2, 1, 0, 0, 2, 0, 1, 1, 0, 1, 0, 0, 2, 2, 2, 0, 0, 1, 0, 2, 1, 1, 0, 1, 1, 0, 1, 0, 2, 1, 2, 1, 1, 0, 0, 0, 0, 2, 2, 2, 0, 0, 1, 2, 1, 0, 0, 2, 1, 2, 2, 2, 2, 1, 0, 1, 2, 1, 0, 0, 0, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 0, 1, 1, 2, 2, 1, 1, 0, 0, 1, 0, 1, 1, 1, 2, 2, 1, 0, 2, 1, 2, 1, 2, 2, 2, 2, 0, 0, 1, 0, 2, 1, 1, 0, 2, 2, 2, 0, 0, 0, 1, 2, 2, 0, 1, 1, 2, 1, 1, 1, 2, 0, 2, 1, 0, 1, 2, 1, 0, 1, 0, 1, 0, 1, 1, 1, 2, 2, 0, 1, 1, 0, 1, 1, 0, 2, 1, 2, 0, 1, 2, 0, 1, 2, 1, 0, 2, 1, 2, 2, 2, 2, 1, 0, 2, 1, 2, 2, 0, 1, 0, 2, 1, 2, 1, 1, 2, 1, 2, 0, 1, 1, 1, 0, 2, 2, 2, 0, 2, 1, 2, 1, 1, 0, 1, 0, 0, 1, 0, 2, 0, 1, 2, 0, 0, 0, 0, 0, 1, 2, 0, 2, 2, 2, 1, 0, 2, 2, 2, 2, 1, 1, 2, 1, 2, 0, 2, 2, 0, 1, 2, 2, 1, 1, 1, 0, 0, 0, 0, 2, 1, 1, 2, 2, 0, 1, 2, 1, 0, 1, 1, 0, 2, 2, 1, 1, 0, 1, 1, 2, 1, 2, 0, 1, 2, 2, 2, 2, 2, 1, 2, 1, 1, 0, 1, 2, 2, 0, 1, 0, 2, 2, 2, 1, 2, 1, 2, 2, 1, 0, 2, 1, 1, 1, 2, 2, 1, 2, 1, 2, 1, 2, 2, 0, 1, 2, 0, 1, 2, 0, 2, 2, 1, 1, 0, 1, 1, 1, 0, 2, 2, 2, 1, 0, 1, 1, 0, 1, 2, 0, 1, 1, 1, 1, 2, 2, 2, 1, 0, 2, 2, 2, 2, 0, 2, 2, 2, 1, 0, 0, 2, 1, 2, 1, 2, 0, 2, 0, 2, 2, 2, 2, 1, 1, 2, 2, 1, 1, 0, 2, 2, 2, 2, 0, 0, 2, 0, 1, 2, 2, 1, 2, 2, 1, 2, 0, 0, 1, 2, 0, 1, 1, 1, 1, 2, 0, 1, 2, 2, 2, 2, 0, 2, 0, 0, 1, 1, 0, 2, 1, 2, 2, 1, 0, 1, 0, 1, 2, 2, 0, 1, 1, 0, 1, 2, 2, 2, 2, 1, 1, 2, 1, 1, 2, 1, 2, 2, 2, 1, 1, 0, 1, 2, 2, 0, 2, 2, 2, 1, 0, 1, 2, 1, 1, 2, 2, 2, 2, 2, 0, 1, 2, 1, 0, 2, 0, 1, 0, 0, 2, 1, 1, 2, 2, 0, 2, 1, 1, 0, 1, 2, 2, 1, 0, 2, 1, 0, 0, 0, 1, 2, 2, 0, 2, 2, 1, 0, 2, 2, 1, 0, 2, 1, 1, 1, 1, 0, 2, 0, 0, 2, 1, 0, 2, 2, 2, 2, 0, 2, 0, 2, 2, 1, 2, 2, 1, 1, 1, 2, 1, 2, 0, 2, 0, 0, 2, 1, 1, 1, 2, 2, 0, 2, 1, 2, 1, 0, 1, 2, 1, 1, 1, 1, 0, 1, 1, 1, 1, 2, 2, 0, 2, 2, 2, 1, 0, 2, 1, 2, 1, 1, 1, 2, 0, 0, 1, 0, 0, 1, 2, 0, 0, 1, 0, 2, 0, 1, 1, 1, 1, 2, 1, 2, 1, 0, 2, 2, 2, 0, 2, 0, 1, 2, 2, 1, 1, 0, 1, 2, 0, 1, 2, 1, 1, 1, 2, 0, 1, 2, 2, 0, 0, 1, 2, 1, 1, 1, 2, 2, 2, 2, 2, 0, 2, 1, 1, 0, 1, 2, 1, 2, 0, 1, 1, 0, 1, 2, 2, 1, 2, 2, 1, 1, 0, 1, 1, 1, 0, 2, 1, 0, 1, 1, 1, 2, 1, 2, 1, 2, 2, 2, 2, 1, 0, 0, 1, 1, 2, 1, 1, 1, 2, 2, 1, 0, 2, 0, 1, 0, 0, 0, 2, 1, 0, 2, 0, 2, 0, 0, 2, 2, 1, 2, 2, 1, 0, 1, 1, 2, 1, 0, 1, 1, 0, 2, 0, 1, 0, 1, 0, 0, 2, 0, 0, 1, 1, 2, 2, 2, 2, 0, 2, 2, 0, 1, 1, 2, 1, 1, 0, 2, 0, 2, 2, 1, 1, 0, 0, 2, 2, 0, 1, 1, 0, 1, 1, 1, 2, 1, 1, 0, 2, 1, 2, 2, 2, 2, 0, 2, 2, 0, 2, 0, 2, 0, 2, 1, 1, 2, 2, 0, 0, 2, 2, 1, 2, 1, 1, 0, 1, 2, 2, 2, 0, 1, 2, 1, 2, 1, 1, 0, 0, 2, 2, 0, 1, 2, 1, 1, 0, 0, 0, 1, 0, 2, 1, 2, 1, 2, 1, 0, 2, 1, 0, 1, 2, 1, 1, 2, 0, 2, 1, 1, 1, 0, 1, 2, 1, 2, 1, 0, 1, 2, 2, 2, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 0, 1, 0, 1, 0, 1, 0, 2, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 2, 2, 2, 0, 0, 2, 2, 2, 2, 1, 0, 1, 2, 2, 0, 2, 1, 0, 0, 2, 2, 2, 0, 0, 2, 1, 0, 1, 0, 1, 1, 1, 2, 1, 2, 1, 1, 0, 1, 2, 2, 2, 0, 2, 0, 1, 1, 0, 1, 2, 2, 1, 0, 2, 2, 2, 2, 1, 1, 1, 1, 2, 2, 2, 2, 1, 1, 1, 0, 2, 1, 0, 0, 0, 2, 1, 0, 0, 0, 2, 2, 2, 1, 1, 0, 0, 0, 1, 1, 1, 2, 1, 1, 0, 0, 2, 1, 1, 1, 1, 2, 2, 2, 2, 1, 1, 2, 2, 2, 0, 2, 1, 1, 2, 1, 2, 1, 2, 0, 0, 0, 2, 2, 1, 1, 2, 2, 2, 1, 1, 0, 2, 1, 2, 2, 2, 0, 2, 2, 2, 0, 0, 2, 1, 0, 1, 1, 2, 2, 2, 1, 0, 0, 2, 1, 0, 1, 1, 2, 2, 2, 1, 0, 1, 2, 2, 1, 1, 2, 1, 2, 1, 1, 2, 2, 0, 1, 2, 2, 2, 1, 2, 2, 2, 1, 1, 1, 0, 1, 0, 1, 1, 2, 2, 0, 2, 0, 1, 2, 0, 0, 0, 1, 0, 0, 2, 2, 0, 0, 2, 1, 0, 2, 2, 1, 2, 2, 2, 1, 1, 1, 1, 2, 1, 0, 0, 1, 2, 0, 2, 2, 2, 1, 1, 1, 1, 1, 0, 0, 2, 2, 0, 2, 1, 1, 2, 2, 1, 1, 0, 0, 1, 0, 0, 1, 1, 2, 2, 1, 2, 0, 2, 0, 1, 2, 0, 2, 1, 0, 2, 2, 2, 1, 0, 0, 0, 2, 1, 1, 1, 1, 1, 0, 2, 2, 2, 2, 0, 1, 0, 2, 2, 0, 2, 2, 1, 1, 2, 1, 1, 1, 2, 2, 2, 1, 1, 2, 2, 1, 0, 0, 1, 0, 0, 2, 2, 0, 2, 1, 2, 2, 1, 0, 2, 0, 0, 2, 1, 1, 2, 1, 2, 2, 1, 2, 0, 2, 1, 2, 1, 0, 0, 1, 1, 2, 2, 1, 1, 0, 2, 1, 1, 0, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 1, 2, 2, 2, 1, 1, 0, 2, 1, 1, 0, 1, 2, 1, 2, 0, 2, 0, 1, 0, 2, 2, 1, 2, 1, 1, 0, 1, 1, 1, 2, 2, 2, 1, 2, 2, 1, 1, 2, 2, 1, 2, 2, 2, 2, 0, 1, 0, 0, 2, 2, 2, 1, 1, 2, 1, 1, 0, 2, 0, 1, 1, 2, 1, 1, 0, 2, 0, 1, 0, 1, 1, 1, 1, 2, 0, 2, 1, 1, 2, 1, 0, 2, 1, 1, 2, 0, 2, 2, 1, 1, 2, 0, 2, 2, 1, 2, 1, 2, 1, 1, 1, 1, 0, 0, 2, 2, 2, 1, 1, 0, 0, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 1, 0, 0, 1, 2, 0, 1, 1, 0, 1, 0, 2, 1, 1, 1, 2, 2, 1, 2, 2, 1, 2, 2, 0, 2, 0, 1, 1, 2, 1, 0, 0, 0, 1, 2, 0, 1, 1, 2, 1, 2, 1, 1, 2, 0, 0, 1, 2, 2, 2, 1, 1, 1, 1, 0, 1, 0, 1, 1, 2, 2, 2, 1, 2, 0, 1, 2, 2, 1, 0, 0, 2, 0, 0, 1, 1, 0, 2, 2, 1, 2, 1, 0, 1, 1, 1, 1, 2, 1, 1, 0, 0, 1, 0, 1, 1, 2, 0, 1, 1, 2, 1, 1, 1, 1, 1, 1, 0, 2, 1, 2, 1, 0, 1, 2, 0, 1, 2, 0, 2, 1, 1, 2, 0, 1, 1, 2, 0, 0, 1, 0, 1, 2, 2, 1, 2, 1, 0, 2, 2, 2, 2, 1, 0, 1, 2, 0, 0, 2, 1, 2, 1, 1, 0, 0, 1, 0, 0, 2, 2, 1, 0, 1, 0, 1, 0, 2, 0, 2, 1, 2, 2, 2, 2, 2, 2, 0, 2, 1, 2, 0, 1, 1, 1, 1, 2, 2, 2, 2, 0, 1, 0, 2, 1, 0, 1, 0, 1, 1, 2, 2, 0, 2, 1, 2, 1, 0, 1, 2, 1, 0, 1, 2, 0, 0, 1, 0, 2, 0, 0, 1, 1, 2, 1, 0, 0, 0, 1, 2, 2, 2, 1, 0, 2, 2, 2, 1, 1, 2, 1, 1, 1, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 0, 2, 2, 2, 1, 1, 2, 2, 2, 1, 0, 2, 2, 2, 2, 0, 1, 0, 2, 1, 2, 0, 1, 1, 2, 2, 2, 1, 1, 1, 2, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 1, 1, 2, 2, 2, 0, 1, 2, 2, 0, 1, 0, 2, 2, 2, 2, 2, 0, 0, 0, 0, 1, 1, 2, 2, 1, 2, 2, 1, 2, 1, 1, 1, 0, 1, 2, 0, 1, 2, 2, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 2, 1, 0, 0, 1, 2, 2, 0, 1, 0, 0, 1, 0, 0, 1, 2, 1, 0, 1, 2, 1, 2, 0, 1, 0, 1, 2, 1, 1, 2, 2, 2, 1, 0, 1, 0, 2, 2, 0, 1, 2, 1, 2, 2, 0, 2, 1, 1, 2, 2, 2, 0, 0, 0, 0, 0, 1, 0, 2, 1, 0, 2, 1, 2, 2, 2, 1, 0, 1, 1, 0, 2, 2, 2, 2, 1, 1, 1, 2, 1, 2, 2, 0, 2, 2, 2, 0, 2, 1, 1, 2, 2, 2, 0, 2, 0, 1, 0, 0, 0, 0, 1, 1, 2, 0, 2, 0, 2, 1, 2, 1, 2, 0, 1, 2, 0, 2, 0, 0, 1, 1, 0, 0, 0, 2, 2, 2, 0, 1, 2, 2, 2, 0, 0, 1, 2, 2, 2, 2, 1, 2, 1, 1, 0, 2, 2, 1, 0, 2, 1, 1, 2, 2, 0, 1, 2, 0, 2, 0, 0, 2, 0, 0, 1, 1, 0, 0, 1, 1, 2, 0, 2, 2, 1, 1, 0, 2, 1, 2, 2, 1, 2, 0, 2, 1, 0, 1, 1, 2, 0, 0, 2, 0, 0, 1, 1, 2, 0, 1, 1, 1, 2, 2, 1, 2, 0, 2, 0, 0, 2, 1, 0, 2, 0, 0, 0, 2, 0, 2, 1, 2, 2, 2, 2, 0, 0, 2, 1, 1, 0, 0, 2, 2, 0, 2, 1, 0, 1, 2, 1, 1, 1, 2, 2, 1, 2, 1, 2, 0, 2, 0, 0, 2, 0, 1, 0, 0, 2, 0, 0, 1, 0, 1, 1, 1, 2, 1, 2, 2, 1, 2, 2, 1, 2, 0, 0, 1, 2, 0, 0, 2, 1, 0, 0, 2, 2, 0, 1, 2, 2, 2, 1, 0, 2, 1, 2, 1, 2, 2, 1, 1, 1, 2, 1, 2, 1, 0, 1, 0, 0, 1, 0, 2, 1, 2, 0, 0, 0, 2, 2, 1, 1, 0, 0, 2, 2, 2, 2, 2, 0, 1, 1, 1, 0, 1, 2, 2, 1, 0, 1, 2, 1, 1, 2, 1, 1, 1, 1, 1, 0, 2, 1, 0, 2, 1, 1, 2, 0, 1, 0, 2, 1, 2, 2, 0, 2, 2, 0, 0, 0, 0, 2, 2, 1, 2, 1, 2, 2, 1, 2, 2, 2, 0, 1, 1, 2, 2, 0, 2, 0, 1, 2, 2, 0, 1, 2, 2, 0, 2, 2, 0, 0, 2, 0, 1, 2, 1, 1, 0, 1, 2, 0, 1, 1, 2, 2, 1, 1, 1, 0, 2, 1, 1, 2, 2, 0, 0, 0, 1, 0, 0, 1, 2, 1, 0, 2, 0, 1, 2, 1, 2, 0, 0, 0, 1, 0, 1, 1, 1, 0, 2, 0, 0, 2, 1, 2, 1, 0, 0, 1, 2, 1, 1, 2, 2, 1, 2, 1, 1, 0, 2, 2, 1, 1, 1, 0, 1, 0, 0, 2, 0, 2, 1, 2, 0, 1, 1, 1, 2, 0, 0, 1, 0, 0, 2, 2, 2, 0, 0, 1, 1, 0, 2, 2, 2, 0, 2, 0, 0, 1, 0, 2, 2, 2, 2, 2, 1, 2, 1, 1, 2, 2, 1, 1, 0, 1, 0, 0, 1, 2, 1, 1, 1, 2, 0, 1, 2, 0, 1, 0, 1, 0, 0, 2, 1, 1, 0, 1, 0, 1, 2, 0, 0, 0, 0, 1, 1, 2, 1, 0, 1, 1, 2, 1, 1, 2, 1, 1, 2, 0, 2, 2, 2, 2, 1, 2, 0, 2, 2, 0, 1, 1, 0, 0, 1, 2, 0, 1, 2, 2, 1, 0, 2, 1, 2, 2, 0, 1, 2, 0, 1, 0, 2, 2, 2, 1, 0, 0, 2, 1, 2, 1, 2, 1, 0, 1, 1, 2, 2, 1, 2, 0, 2, 2, 2, 2, 1, 2, 2, 0, 2, 1, 1, 1, 2, 2, 2, 1, 2, 2, 0, 1, 2, 0, 2, 1, 0, 2, 2, 0, 1, 2, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 2, 2, 2, 1, 2, 2, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 2, 0, 0, 0, 1, 2, 1, 1, 1, 2, 1, 2, 0, 0, 1, 1, 1, 2, 2, 2, 2, 0, 2, 1, 2, 2, 1, 2, 2, 1, 0, 1, 0, 1, 1, 0, 2, 0, 1, 1, 2, 0, 2, 1, 2, 1, 0, 2, 0, 2, 2, 1, 2, 2, 0, 1, 0, 2, 1, 2, 2, 1, 2, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 2, 2, 1, 1, 0, 0, 1, 1, 2, 1, 2, 2, 1, 0, 0, 0, 0, 2, 0, 2, 0, 1, 2, 1, 1, 0, 1, 1, 2, 0, 2, 2, 1, 1, 1, 0, 1, 2, 1, 1, 1, 0, 0, 0, 2, 0, 0, 0, 0, 2, 0, 1, 2, 1, 0, 1, 0, 2, 1, 0, 0, 1, 1, 2, 2, 2, 2, 1, 2, 1, 2, 2, 0, 2, 2, 1, 0, 2, 0, 0, 1, 2, 0, 2, 2, 1, 0, 0, 2, 1, 0, 1, 2, 1, 2, 0, 1, 2, 1, 1, 2, 1, 1, 2, 2, 2, 0, 1, 2, 0, 2, 0, 2, 0, 2, 0, 2, 1, 1, 0, 0, 2, 0, 1, 2, 0, 2, 2, 0, 2, 2, 2, 1, 2, 2, 0, 1, 1, 2, 0, 2, 0, 1, 1, 2, 1, 1, 1, 1, 0, 2, 1, 2, 2, 2, 1, 2, 2, 1, 1, 2, 1, 0, 2, 2, 2, 0, 1, 1, 1, 2, 0, 1, 2, 2, 1, 1, 1, 0, 2, 0, 1, 0, 0, 1, 1, 2, 0, 2, 1, 2, 0, 1, 1, 2, 2, 2, 1, 0, 2, 1, 0, 2, 0, 0, 2, 0, 2, 1, 0, 0, 2, 0, 1, 0, 1, 0, 2, 1, 1, 1, 1, 1, 2, 2, 0, 0, 2, 2, 1, 1, 1, 1, 0, 1, 2, 1, 1, 2, 2, 2, 1, 1, 2, 1, 0, 1, 1, 0, 1, 1, 1, 1, 2, 0, 1, 1, 0, 2, 0, 2, 1, 2, 2, 0, 1, 1, 1, 1, 0, 1, 0, 1, 2, 2, 1, 1, 2, 2, 2, 0, 2, 1, 1, 2, 2, 2, 2, 2, 1, 1, 1, 2, 2, 0, 0, 1, 1, 1, 0, 2, 1, 1, 1, 0, 1, 2, 2, 1, 0, 2, 1, 1, 1, 0, 2, 1, 0, 2, 0, 2, 1, 2, 2, 2, 0, 1, 0, 2, 2, 2, 1, 1, 1, 0, 1, 0, 1, 2, 2, 1, 1, 2, 0, 2, 0, 2, 1, 2, 2, 2, 0, 2, 1, 1, 2, 0, 1, 0, 2, 1, 1, 0, 0, 2, 2, 0, 1, 2, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 2, 0, 0, 2, 2, 0, 2, 1, 2, 2, 2, 0, 2, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 2, 2, 1, 1, 1, 1, 2, 0, 2, 0, 0, 1, 0, 2, 0, 0, 1, 2, 2, 1, 1, 1, 2, 1, 0, 1, 0, 0, 2, 1, 2, 2, 0, 1, 2, 1, 0, 0, 1, 2, 1, 2, 1, 0, 2, 1, 0, 1, 0, 0, 2, 1, 2, 1, 0, 2, 1, 0, 2, 2, 1, 0, 2, 2, 1, 0, 2, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 0, 2, 2, 2, 2, 0, 2, 1, 2, 1, 2, 0, 1, 0, 1, 2, 0, 2, 0, 1, 2, 2, 1, 2, 1, 1, 2, 2, 0, 0, 0, 1, 2, 1, 2, 1, 2, 1, 2, 2, 0, 2, 1, 1, 0, 0, 2, 2, 2, 1, 1, 2, 1, 0, 1, 2, 2, 1, 2, 0, 1, 0, 1, 1, 0, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 1, 1, 0, 1, 2, 0, 2, 0, 1, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 0, 2, 1, 1, 0, 2, 0, 0, 2, 1, 2, 0, 2, 1, 1, 0, 2, 0, 0, 1, 1, 0, 2, 0, 1, 1, 2, 0, 1, 1, 0, 2, 2, 1, 2, 2, 2, 0, 2, 1, 2, 1, 1, 1, 1, 0, 2, 0, 0, 1, 2, 2, 0, 2, 2, 0, 1, 1, 1, 1, 0, 2, 2, 2, 2, 2, 1, 0, 0, 1, 2, 2, 2, 1, 2, 1, 1, 2, 2, 2, 0, 2, 2, 2, 2, 0, 2, 1, 2, 0, 0, 1, 0, 1, 0, 1, 2, 1, 0, 2, 2, 0, 2, 2, 2, 1, 2, 1, 1, 2, 1, 0, 0, 0, 2, 1, 1, 2, 0, 1, 1, 1, 1, 0, 2, 2, 1, 2, 0, 1, 2, 1, 1, 0, 1, 0, 2, 2, 1, 1, 1, 1, 2, 1, 2, 2, 2, 1, 2, 2, 1, 2, 1, 1, 1, 2, 2, 2, 0, 0, 0, 2, 1, 2, 0, 2, 1, 1, 2, 2, 0, 0, 0, 1, 0, 1, 2, 0, 1, 2, 2, 1, 1, 1, 0, 2, 0, 2, 0, 1, 1, 2, 2, 2, 0, 1, 2, 2, 0, 1, 2, 0, 2, 2, 2, 2, 1, 0, 0, 2, 1, 2, 1, 1, 2, 2, 1, 0, 1, 1, 0, 0, 2, 1, 0, 2, 1, 0, 0, 0, 1, 2, 0, 1, 1, 1, 2, 2, 2, 1, 1, 2, 0, 0, 1, 2, 1, 2, 1, 0, 1, 2, 0, 0, 0, 0, 2, 1, 2, 0, 1, 2, 1, 2, 1, 2, 0, 2, 1, 2, 2, 1, 1, 2, 1, 1, 2, 1, 0, 2, 0, 2, 1, 0, 0, 2, 2, 1, 1, 2, 1, 1, 0, 0, 1, 2, 2, 1, 1, 2, 1, 0, 1, 1, 1, 2, 2, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 2, 1, 2, 2, 1, 0, 1, 2, 1, 1, 1, 0, 2, 0, 0, 1, 0, 2, 1, 0, 2, 1, 2, 2, 0, 1, 2, 1, 1, 1, 1, 1, 0, 0, 1, 1, 2, 1, 2, 1, 2, 1, 0, 2, 2, 2, 0, 2, 1, 2, 2, 2, 0, 1, 2, 1, 2, 0, 1, 1, 2, 0, 2, 2, 2, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 0, 2, 0, 2, 0, 0, 1, 2, 2, 1, 1, 2, 1, 1, 2, 1, 2, 0, 1, 1, 0, 0, 1, 2, 2, 2, 2, 2, 1, 0, 1, 2, 2, 2, 0, 0, 2, 0, 1, 0, 2, 2, 2, 0, 1, 0, 2, 0, 1, 2, 2, 2, 1, 2, 2, 0, 2, 2, 2, 1, 2, 0, 2, 0, 1, 1, 2, 0, 2, 1, 2, 2, 2, 2, 2, 1, 1, 1, 2, 2, 1, 0, 2, 1, 0, 2, 0, 2, 0, 1, 2, 2, 1, 2, 0, 0, 0, 0, 1, 0, 2, 1, 2, 0, 1, 1, 0, 2, 1, 2, 1, 2, 2, 0, 2, 2, 2, 2, 1, 0, 2, 0, 1, 1, 2, 1, 1, 2, 1, 0, 0, 2, 1, 2, 0, 2, 2, 1, 0, 0, 2, 2, 1, 2, 0, 0, 1, 2, 2, 1, 1, 1, 2, 2, 1, 2, 2, 0, 2, 1, 2, 2, 2, 2, 0, 2, 2, 0, 1, 2, 0, 2, 0, 1, 2, 2, 1, 1, 2, 2, 1, 2, 2, 1, 0, 0, 2, 2, 2, 1, 1, 1, 1, 2, 0, 1, 0, 0, 2, 1, 2, 0, 2, 2, 2, 2, 1, 1, 2, 0, 1, 2, 1, 2, 0, 0, 1, 1, 2, 2, 2, 1, 0, 1, 1, 2, 2, 1, 1, 2, 1, 2, 2, 1, 0, 2, 2, 2, 2, 1, 0, 2, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 0, 1, 1, 1, 2, 0, 0, 0, 2, 1, 2, 2, 2, 0, 1, 1, 2, 0, 0, 1, 2, 2, 1, 0, 1, 1, 1, 1, 1, 0, 0, 2, 2, 2, 2, 2, 1, 0, 2, 2, 2, 0, 1, 1, 0, 2, 2, 2, 1, 0, 1, 0, 2, 2, 2, 1, 0, 2, 0, 0, 2, 1, 2, 2, 2, 1, 2, 2, 0, 1, 0, 1, 1, 2, 0, 2, 2, 1, 2, 2, 1, 1, 2, 1, 2, 0, 0, 1, 2, 2, 2, 1, 2, 1, 1, 1, 0, 2, 2, 2, 0, 1, 1, 2, 1, 2, 1, 2, 0, 1, 0, 2, 2, 1, 0, 2, 0, 1, 1, 2, 2, 1, 1, 2, 1, 1, 0, 2, 2, 0, 0, 2, 2, 2, 2, 2, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 2, 2, 1, 1, 0, 1, 2, 2, 2, 2, 2, 0, 0, 1, 2, 1, 2, 0, 1, 2, 1, 1, 1, 2, 0, 1, 1, 2, 2, 2, 0, 1, 2, 2, 1, 2, 0, 2, 2, 0, 1, 2, 2, 1, 1, 2, 2, 2, 0, 1, 2, 0, 2, 2, 0, 2, 2, 0, 1, 2, 1, 1, 2, 1, 2, 1, 0, 0, 1, 0, 0, 0, 2, 1, 0, 2, 2, 2, 2, 2, 1, 1, 1, 1, 2, 1, 1, 2, 2, 1, 1, 1, 1, 1, 0, 1, 0, 0, 2, 0, 2, 1, 1, 1, 1, 2, 1, 2, 0, 2, 1, 1, 1, 1, 0, 0, 2, 2, 1, 2, 1, 0, 2, 0, 1, 2, 1, 2, 2, 2, 1, 0, 0, 0, 2, 1, 2, 1, 1, 2, 2, 2, 2, 0, 2, 2, 0, 2, 0, 0, 2, 1, 0, 1, 0, 0, 1, 1, 2, 0, 1, 2, 1, 2, 0, 0, 2, 1, 1, 0, 1, 0, 0, 2, 1, 1, 2, 1, 2, 1, 2, 2, 1, 1, 1, 2, 1, 0, 0, 0, 0, 2, 1, 0, 2, 2, 2, 1, 0, 1, 0, 1, 2, 1, 2, 0, 1, 1, 2, 1, 2, 2, 2, 2, 0, 1, 2, 1, 1, 0, 2, 2, 2, 2, 0, 2, 0, 1, 0, 1, 2, 2, 0, 2, 2, 1, 0, 1, 1, 2, 1, 1, 0, 0, 0, 2, 2, 2, 1, 2, 2, 0, 2, 2, 1, 1, 1, 0, 2, 1, 2, 1, 1, 0, 2, 2, 0, 2, 1, 2, 0, 0, 2, 0, 0, 2, 1, 1, 1, 0, 0, 1, 1, 2, 2, 2, 2, 1, 2, 1, 2, 1, 1, 1, 0, 1, 2, 1, 2, 1, 2, 2, 0, 0, 1, 2, 2, 1, 1, 2, 1, 1, 0, 2, 1, 1, 2, 1, 1, 1, 2, 1, 0, 2, 1, 1, 1, 0, 0, 0, 2, 2, 0, 1, 1, 1, 2, 1, 1, 1, 1, 2, 2, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 2, 0, 1, 2, 0, 2, 2, 0, 1, 2, 2, 1, 1, 2, 2, 1, 0, 2, 1, 1, 2, 0, 2, 0, 1, 0, 0, 2, 1, 1, 0, 2, 2, 2, 2, 0, 2, 1, 2, 0, 0, 2, 2, 2, 1, 2, 1, 2, 1, 2, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 2, 2, 1, 1, 1, 2, 1, 2, 2, 2, 1, 0, 2, 0, 2, 2, 2, 2, 2, 1, 1, 2, 1, 0, 1, 2, 0, 1, 2, 2, 0, 1, 1, 2, 1, 1, 2, 1, 2, 0, 0, 2, 1, 1, 1, 1, 2, 2, 0, 1, 0, 0, 1, 1, 1, 2, 1, 2, 1, 1, 2, 2, 1, 1, 2, 2, 2, 0, 1, 0, 1, 1, 2, 0, 1, 0, 1, 1, 1, 0, 0, 1, 2, 1, 1, 1, 2, 2, 1, 1, 0, 1, 2, 2, 1, 0, 2, 1, 1, 0, 2, 1, 2, 0, 1, 0, 2, 1, 1, 2, 0, 1, 1, 2, 1, 0, 2, 1, 1, 1, 2, 2, 1, 0, 2, 2, 1, 0, 1, 1, 1, 2, 2, 1, 0, 0, 1, 1, 2, 0, 2, 2, 1, 0, 0, 2, 2, 2, 1, 0, 2, 1, 1, 0, 2, 2, 2, 0, 2, 0, 2, 1, 1, 2, 0, 2, 1, 0, 2, 2, 1, 2, 2, 2, 2, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 2, 2, 1, 1, 0, 0, 0, 1, 1, 0, 2, 1, 2, 2, 1, 0, 0, 1, 1, 0, 1, 2, 2, 1, 1, 2, 1, 2, 1, 2, 2, 1, 2, 1, 1, 2, 1, 1, 2, 0, 2, 0, 1, 1, 0, 1, 2, 1, 0, 0, 0, 1, 1, 2, 1, 1, 1, 2, 1, 0, 1, 1, 2, 2, 1, 2, 2, 0, 2, 0, 0, 2, 2, 2, 2, 1, 1, 0, 2, 2, 1, 1, 0, 2, 1, 1, 2, 0, 0, 1, 1, 1, 1, 1, 2, 0, 1, 1, 2, 1, 0, 1, 1, 2, 1, 2, 2, 1, 2, 1, 2, 2, 0, 2, 2, 1, 1, 0, 0, 0, 1, 2, 1, 0, 1, 0, 1, 1, 1, 0, 0, 2, 0, 1, 2, 0, 2, 1, 2, 2, 1, 1, 0, 2, 0, 2, 0, 0, 1, 0, 0, 0, 1, 2, 2, 1, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 0, 1, 1, 2, 1, 1, 2, 0, 2, 2, 2, 1, 0, 1, 0, 0, 1, 2, 2, 2, 1, 2, 2, 2, 1, 1, 0, 2, 0, 0, 2, 0, 1, 1, 2, 0, 2, 1, 1, 1, 2, 2, 2, 0, 0, 0, 0, 1, 0, 0, 0, 0, 2, 2, 0, 2, 1, 1, 1, 2, 1, 0, 2, 1, 1, 0, 2, 2, 1, 1, 0, 1, 2, 2, 1, 0, 0, 2, 2, 2, 1, 1, 2, 0, 2, 2, 1, 0, 2, 1, 1, 0, 1, 2, 2, 1, 1, 1, 2, 2, 2, 2, 2, 1, 2, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 2, 0, 2, 2, 2, 0, 2, 2, 1, 0, 1, 0, 0, 1, 1, 2, 0, 2, 2, 1, 1, 0, 1, 1, 2, 0, 0, 2, 2, 0, 2, 0, 1, 2, 2, 1, 0, 2, 2, 0, 1, 2, 2, 1, 2, 1, 1, 1, 0, 1, 1, 2, 2, 2, 1, 1, 0, 1, 0, 0, 2, 1, 1, 1, 2, 1, 0, 1, 1, 2, 2, 1, 1, 2, 0, 2, 1, 2, 0, 1, 1, 2, 1, 2, 2, 2, 1, 1, 2, 1, 0, 1, 2, 0, 1, 1, 2, 1, 2, 2, 1, 1, 2, 1, 2, 1, 1, 0, 2, 0, 1, 0, 1, 0, 1, 1, 1, 0, 2, 2, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 2, 2, 1, 2, 2, 1, 0, 2, 2, 0, 0, 1, 0, 2, 2, 1, 2, 1, 0, 2, 2, 1, 0, 1, 2, 2, 1, 2, 2, 1, 2, 1, 2, 0, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 0, 0, 1, 0, 1, 2, 0, 1, 1, 1, 2, 2, 1, 2, 0, 2, 0, 1, 1, 1, 2, 1, 1, 2, 1, 0, 1, 0, 2, 1, 1, 2, 1, 0, 0, 2, 0, 2, 0, 0, 1, 1, 2, 2, 1, 2, 2, 1, 2, 1, 0, 2, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 2, 0, 1, 2, 2, 0, 1, 0, 0, 1, 2, 2, 2, 0, 0, 1, 0, 0, 1, 2, 0, 2, 1, 0, 2, 2, 2, 0, 2, 0, 1, 1, 0, 1, 2, 1, 2, 0, 1, 2, 2, 2, 1, 1, 2, 2, 2, 0, 0, 2, 0, 0, 1, 2, 2, 2, 0, 1, 1, 1, 1, 2, 2, 0, 2, 2, 2, 0, 2, 1, 0, 1, 2, 1, 1, 2, 1, 2, 2, 1, 0, 0, 1, 2, 2, 2, 1, 0, 1, 2, 2, 2, 1, 1, 2, 0, 2, 0, 2, 1, 2, 1, 2, 2, 0, 0, 1, 2, 2, 1, 2, 1, 1, 1, 2, 1, 2, 1, 2, 2, 1, 0, 1, 1, 2, 0, 0, 0, 1, 2, 0, 1, 1, 0, 0, 1, 0, 0, 1, 1, 2, 0, 2, 1, 1, 2, 0, 2, 1, 1, 1, 2, 0, 0, 0, 1, 1, 2, 2, 2, 0, 2, 1, 2, 1, 2, 0, 2, 1, 1, 0, 2, 2, 2, 2, 0, 0, 2, 1, 1, 2, 1, 1, 0, 2, 2, 2, 1, 2, 2, 2, 0, 2, 2, 2, 2, 1, 1, 1, 0, 2, 2, 2, 2, 1, 0, 0, 2, 0, 2, 0, 0, 0, 0, 0, 1, 2, 2, 1, 0, 0, 0, 2, 2, 1, 0, 1, 2, 1, 2, 2, 0, 0, 0, 0, 0, 0, 1, 2, 1, 1, 0, 2, 0, 2, 1, 1, 2, 1, 2, 1, 1, 2, 0, 0, 0, 2, 2, 1, 2, 2, 2, 2, 0, 1, 1, 2, 2, 1, 0, 1, 2, 2, 2, 2, 1, 2, 1, 1, 2, 2, 0, 2, 1, 0, 1, 2, 2, 1, 0, 1, 2, 0, 2, 1, 0, 2, 1, 1, 2, 1, 2, 0, 1, 2, 1, 1, 2, 0, 0, 2, 0, 0, 0, 2, 2, 0, 1, 1, 0, 0, 1, 1, 1, 1, 2, 1, 2, 0, 2, 0, 2, 0, 1, 2, 1, 1, 1, 2, 1, 2, 1, 2, 1, 1, 0, 2, 1, 1, 2, 0, 2, 0, 2, 0, 2, 1, 2, 2, 2, 2, 0, 1, 2, 2, 1, 2, 0, 2, 2, 1, 1, 2, 2, 2, 1, 1, 1, 2, 0, 1, 0, 1, 1, 1, 2, 2, 1, 2, 1, 2, 2, 2, 2, 1, 2, 1, 0, 2, 1, 0, 0, 2, 1, 1, 1, 1, 1, 1, 1, 2, 2, 0, 1, 0, 2, 1, 0, 1, 1, 2, 2, 1, 0, 0, 1, 1, 0, 2, 1, 1, 2, 2, 2, 1, 1, 0, 0, 1, 0, 0, 2, 0, 2, 2, 0, 0, 1, 1, 2, 0, 1, 1, 2, 0, 2, 0, 0, 2, 1, 2, 1, 2, 0, 2, 2, 2, 1, 1, 1, 2, 1, 1, 0, 2, 2, 2, 2, 0, 2, 1, 1, 1, 2, 0, 2, 1, 1, 2, 1, 1, 2, 1, 0, 1, 1, 0, 2, 2, 1, 0, 2, 2, 2, 0, 2, 2, 1, 2, 2, 2, 1, 1, 2, 0, 1, 1, 1, 2, 2, 1, 2, 2, 2, 1, 0, 1, 1, 1, 2, 0, 2, 2, 0, 2, 0, 1, 2, 2, 0, 0, 2, 2, 1, 0, 2, 2, 0, 2, 1, 0, 1, 1, 1, 2, 1, 0, 2, 1, 2, 2, 0, 2, 1, 2, 1, 1, 2, 0, 0, 2, 2, 2, 0, 2, 1, 2, 0, 1, 2, 1, 0, 2, 2, 1, 0, 0, 1, 1, 2, 0, 0, 2, 1, 2, 0, 2, 2, 0, 1, 0, 2, 1, 1, 2, 1, 2, 2, 2, 1, 0, 0, 2, 1, 2, 1, 1, 1, 2, 2, 1, 1, 2, 1, 0, 2, 2, 2, 0, 1, 0, 1, 2, 0, 2, 2, 1, 1, 0, 2, 1, 1, 1, 0, 2, 1, 2, 0, 1, 2, 1, 0, 1, 1, 2, 1, 0, 2, 2, 1, 1, 2, 1, 2, 1, 2, 0, 1, 2, 1, 0, 1, 2, 1, 1, 1, 1, 2, 0, 0, 2, 2, 0, 1, 2, 2, 1, 1, 2, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 2, 1, 1, 2, 1, 0, 2, 0, 1, 2, 2, 1, 2, 0, 2, 0, 2, 2, 0, 1, 0, 1, 1, 1, 1, 1, 2, 2, 2, 1, 0, 2, 1, 1, 2, 1, 0, 2, 1, 1, 2, 1, 2, 1, 1, 2, 1, 2, 2, 2, 2, 0, 1, 1, 2, 0, 2, 1, 1, 1, 2, 1, 2, 0, 1, 2, 1, 0, 1, 1, 0, 1, 0, 2, 2, 1, 2, 2, 2, 0, 2, 1, 0, 1, 2, 2, 0, 2, 1, 1, 1, 2, 2, 2, 1, 1, 0, 1, 2, 2, 1, 1, 1, 0, 2, 1, 0, 0, 1, 1, 1, 1, 1, 2, 0, 0, 2, 2, 2, 0, 2, 1, 1, 1, 1, 0, 2, 0, 2, 2, 1, 1, 2, 1, 1, 0, 1, 1, 1, 2, 1, 2, 0, 0, 0, 1, 1, 1, 1, 2, 0, 2, 0, 2, 1, 2, 2, 0, 0, 2, 2, 2, 0, 1, 2, 0, 2, 2, 2, 0, 1, 2, 1, 0, 2, 1, 0, 0, 2, 2, 2, 1, 1, 1, 2, 1, 2, 1, 1, 1, 2, 0, 1, 0, 2, 1, 1, 0, 0, 2, 1, 0, 1, 1, 0, 1, 1, 1, 2, 0, 1, 2, 2, 1, 2, 1, 2, 1, 0, 0, 2, 0, 1, 0, 1, 2, 2, 2, 2, 1, 0, 0, 2, 1, 1, 0, 0, 2, 1, 2, 2, 0, 1, 1, 0, 0, 2, 1, 2, 2, 2, 2, 0, 0, 2, 2, 1, 2, 1, 2, 2, 1, 1, 1, 2, 0, 1, 2, 0, 0, 1, 2, 0, 2, 1, 1, 1, 0, 1, 2, 1, 2, 1, 0, 1, 0, 0, 1, 2, 2, 2, 2, 2, 0, 2, 0, 2, 2, 1, 2, 2, 2, 1, 0, 2, 1, 2, 2, 0, 0, 1, 0, 2, 0, 2, 1, 2, 1, 2, 1, 0, 1, 0, 2, 2, 2, 0, 1, 0, 2, 1, 2, 1, 1, 2, 2, 1, 1, 2, 1, 0, 2, 0, 2, 1, 2, 1, 1, 2, 1, 0, 1, 0, 1, 2, 2, 2, 1, 0, 2, 2, 0, 2, 2, 0, 2, 0, 2, 1, 2, 2, 1, 1, 1, 1, 2, 0, 2, 0, 1, 2, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 2, 1, 1, 2, 1, 2, 1, 1, 1, 1, 0, 2, 2, 1, 2, 0, 1, 2, 1, 1, 2, 2, 1, 2, 2, 2, 1, 2, 1, 2, 0, 0, 1, 0, 1, 2, 0, 2, 0, 2, 2, 1, 1, 1, 2, 2, 0, 2, 1, 1, 1, 2, 1, 0, 1, 2, 1, 2, 1, 0, 1, 2, 1, 2, 2, 2, 2, 2, 1, 2, 1, 0, 1, 2, 2, 2, 2, 1, 0, 0, 0, 2, 2, 2, 1, 2, 2, 2, 2, 0, 2, 1, 0, 2, 2, 2, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 2, 1, 2, 0, 1, 2, 2, 0, 1, 2, 2, 1, 0, 1, 2, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 2, 2, 2, 1, 1, 0, 2, 2, 0, 0, 2, 1, 2, 2, 0, 1, 2, 0, 0, 1, 1, 0, 0, 1, 2, 1, 2, 2, 0, 2, 0, 1, 1, 1, 0, 0, 1, 1, 1, 2, 2, 1, 1, 1, 0, 2, 1, 2, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 2, 1, 2, 1, 0, 0, 1, 2, 0, 1, 1, 2, 2, 1, 1, 1, 2, 0, 0, 1, 0, 1, 1, 0, 0, 0, 2, 1, 2, 1, 0, 2, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 2, 2, 2, 0, 2, 2, 0, 1, 2, 1, 0, 2, 0, 2, 1, 2, 1, 1, 2, 0, 2, 1, 2, 2, 1, 1, 2, 2, 2, 2, 0, 2, 0, 2, 0, 1, 2, 2, 0, 1, 0, 1, 1, 0, 1, 2, 0, 0, 2, 0, 2, 0, 0, 1, 1, 1, 2, 1, 0, 2, 1, 1, 2, 2, 2, 0, 1, 1, 1, 1, 0, 0, 2, 1, 2, 2, 2, 1, 2, 2, 0, 0, 2, 0, 0, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, 2, 1, 0, 0, 2, 0, 1, 1, 0, 1, 0, 2, 1, 1, 2, 0, 1, 1, 2, 2, 1, 1, 1, 1, 2, 1, 0, 0, 1, 2, 2, 0, 0, 2, 2, 2, 2, 1, 2, 1, 0, 2, 2, 2, 0, 2, 1, 0, 1, 2, 0, 1, 1, 0, 0, 0, 1, 1, 1, 2, 1, 2, 2, 2, 1, 1, 1, 2, 2, 2, 1, 0, 1, 2, 2, 1, 1, 0, 2, 0, 2, 1, 2, 0, 2, 2, 1, 0, 2, 2, 1, 1, 2, 1, 2, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 2, 0, 2, 0, 2, 1, 1, 1, 0, 1, 1, 2, 1, 0, 1, 0, 1, 2, 2, 0, 1, 0, 0, 0, 0, 2, 1, 1, 1, 2, 1, 2, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 2, 1, 2, 1, 2, 2, 0, 2, 2, 0, 1, 1, 1, 1, 0, 1, 0, 2, 0, 0, 1, 1, 0, 1, 2, 1, 0, 1, 0, 2, 2, 0, 0, 1, 1, 1, 1, 1, 2, 1, 2, 0, 2, 0, 2, 2, 2, 0, 1, 0, 1, 1, 0, 0, 2, 2, 0, 2, 2, 2, 2, 2, 0, 1, 2, 0, 2, 1, 1, 2, 2, 2, 0, 2, 2, 0, 2, 0, 1, 1, 2, 1, 0, 0, 1, 0, 2, 1, 2, 1, 0, 2, 0, 0, 1, 1, 1, 1, 1, 2, 1, 1, 1, 2, 2, 2, 0, 1, 2, 2, 0, 1, 2, 2, 2, 2, 2, 0, 2, 0, 2, 2, 1, 2, 1, 1, 2, 1, 2, 1, 0, 1, 1, 0, 2, 1, 2, 2, 0, 1, 2, 1, 0, 2, 2, 2, 1, 0, 0, 0, 1, 1, 2, 1, 2, 1, 1, 2, 2, 0, 0, 0, 0, 1, 2, 1, 2, 1, 2, 2, 2, 0, 2, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 2, 2, 1, 1, 0, 1, 1, 2, 1, 0, 2, 1, 0, 0, 0, 2, 1, 0, 1, 1, 2, 2, 0, 0, 2, 2, 2, 1, 2, 1, 1, 2, 1, 1, 1, 1, 2, 0, 1, 1, 2, 1, 1, 1, 1, 2, 0, 2, 0, 0, 2, 0, 1, 0, 1, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 0, 2, 2, 1, 2, 0, 1, 2, 2, 2, 0, 1, 2, 2, 2, 1, 2, 1, 0, 1, 0, 2, 1, 1, 2, 2, 2, 2, 2, 0, 0, 0, 2, 1, 0, 1, 0, 0, 2, 2, 1, 2, 1, 2, 0, 1, 2, 1, 2, 2, 1, 0, 0, 1, 2, 2, 1, 2, 2, 1, 2, 1, 2, 2, 2, 1, 1, 1, 2, 2, 0, 2, 2, 0, 1, 2, 2, 2, 0, 0, 2, 1, 1, 1, 2, 0, 2, 2, 2, 2, 1, 0, 2, 2, 2, 2, 0, 1, 0, 2, 2, 1, 1, 0, 2, 0, 2, 0, 0, 0, 2, 1, 1, 0, 2, 2, 2, 1, 1, 2, 1, 1, 0, 2, 0, 2, 2, 2, 0, 0, 2, 2, 0, 2, 0, 1, 0, 1, 0, 1, 2, 0, 1, 0, 0, 2, 1, 0, 0, 1, 2, 1, 2, 2, 0, 1, 2, 1, 2, 0, 2, 0, 1, 2, 2, 0, 0, 2, 2, 2, 0, 0, 0, 1, 1, 0, 1, 2, 2, 1, 1, 2, 2, 0, 1, 2, 2, 0, 2, 2, 2, 2, 2, 0, 1, 1, 2, 0, 2, 2, 0, 1, 2, 2, 1, 2, 2, 2, 1, 2, 2, 1, 1, 0, 2, 2, 2, 2, 2, 2, 2, 0, 1, 0, 2, 2, 0, 2, 0, 2, 1, 0, 2, 1, 0, 0, 1, 1, 1, 0, 2, 1, 1, 1, 1, 0, 2, 0, 1, 2, 2, 1, 2, 1, 2, 1, 2, 0, 0, 1, 2, 0, 2, 1, 1, 0, 2, 2, 0, 0, 1, 2, 0, 2, 1, 1, 0, 2, 1, 0, 2, 2, 0, 1, 0, 1, 2, 1, 1, 1, 1, 1, 2, 0, 0, 2, 2, 2, 1, 2, 2, 2, 0, 2, 1, 2, 0, 0, 0, 2, 1, 2, 2, 1, 1, 2, 2, 0, 0, 0, 2, 1, 0, 0, 0, 2, 2, 1, 2, 0, 1, 2, 1, 2, 0, 0, 2, 1, 1, 0, 1, 2, 1, 2, 2, 1, 0, 1, 1, 2, 1, 2, 0, 2, 0, 2, 1, 1, 1, 0, 1, 2, 1, 0, 2, 1, 2, 2, 1, 1, 0, 1, 2, 0, 2, 1, 1, 0, 1, 1, 1, 2, 2, 2, 0, 1, 2, 1, 1, 0, 0, 2, 2, 0, 0, 0, 2, 2, 1, 1, 2, 2, 1, 2, 0, 1, 2, 2, 1, 0, 2, 1, 0, 0, 0, 2, 2, 1, 2, 1, 1, 0, 1, 2, 0, 0, 0, 2, 0, 1, 1, 0, 2, 1, 0, 2, 2, 2, 1, 1, 1, 2, 0, 0, 1, 1, 1, 2, 1, 1, 2, 2, 0, 1, 1, 0, 2, 0, 1, 1, 0, 2, 0, 2, 1, 2, 0, 0, 1, 2, 1, 1, 1, 2, 1, 2, 2, 2, 1, 1, 0, 2, 1, 1, 2, 2, 2, 0, 2, 1, 0, 2, 1, 2, 0, 2, 2, 2, 2, 1, 2, 2, 2, 0, 1, 1, 0, 1, 0, 2, 2, 0, 0, 1, 0, 1, 2, 0, 2, 0, 2, 1, 2, 1, 1, 2, 0, 0, 2, 0, 1, 2, 2, 1, 0, 0, 2, 1, 2, 2, 2, 2, 0, 1, 0, 2, 1, 0, 1, 0, 1, 1, 0, 2, 1, 1, 1, 2, 1, 1, 1, 1, 0, 2, 1, 2, 0, 1, 2, 1, 0, 2, 0, 2, 1, 2, 0, 2, 2, 0, 2, 2, 0, 0, 0, 0, 0, 1, 2, 2, 2, 1, 2, 0, 0, 1, 1, 0, 2, 1, 2, 2, 1, 0, 0, 1, 2, 0, 1, 2, 2, 0, 2, 1, 1, 1, 0, 0, 0, 1, 0, 1, 2, 1, 2, 2, 1, 1, 2, 1, 1, 0, 0, 1, 2, 0, 2, 2, 1, 1, 1, 1, 2, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 2, 1, 2, 2, 2, 1, 2, 1, 1, 2, 1, 0, 2, 2, 1, 2, 0, 1, 2, 2, 2, 1, 2, 1, 1, 1, 2, 2, 1, 2, 1, 0, 2, 1, 2, 2, 2, 2, 0, 1, 2, 2, 1, 2, 0, 2, 1, 0, 0, 1, 1, 2, 2, 2, 1, 1, 2, 0, 1, 0, 1, 2, 2, 2, 2, 1, 1, 2, 2, 1, 1, 2, 0, 1, 2, 1, 0, 2, 1, 1, 1, 2, 2, 2, 2, 0, 1, 0, 0, 2, 2, 0, 2, 2, 2, 0, 0, 0, 2, 0, 2, 1, 2, 0, 0, 1, 1, 2, 1, 1, 0, 1, 1, 2, 0, 1, 0, 0, 0, 2, 0, 2, 2, 2, 0, 2, 1, 0, 0, 1, 0, 2, 0, 0, 0, 1, 1, 0, 2, 1, 2, 1, 1, 0, 0, 1, 2, 2, 2, 2, 1, 1, 0, 2, 2, 2, 0, 0, 0, 2, 0, 2, 2, 1, 2, 2, 0, 1, 2, 2, 2, 1, 2, 1, 2, 1, 2, 1, 1, 0, 0, 2, 0, 1, 0, 2, 0, 2, 0, 0, 0, 0, 0, 1, 2, 0, 2, 1, 1, 0, 1, 1, 0, 2, 2, 0, 2, 2, 0, 1, 2, 1, 2, 1, 1, 1, 0, 1, 2, 2, 0, 0, 2, 1, 0, 1, 2, 0, 1, 1, 2, 0, 2, 2, 2, 2, 2, 2, 0, 0, 2, 0, 0, 2, 1, 0, 2, 2, 2, 2, 0, 2, 0, 1, 1, 2, 1, 2, 1, 2, 1, 2, 2, 0, 1, 0, 2, 0, 2, 1, 2, 1, 0, 0, 2, 1, 1, 2, 0, 2, 0, 2, 1, 2, 2, 2, 2, 0, 2, 1, 0, 1, 0, 2, 1, 2, 0, 1, 1, 2, 2, 2, 2, 2, 1, 2, 1, 0, 1, 2, 1, 2, 1, 0, 0, 2, 1, 2, 0, 1, 1, 1, 1, 0, 1, 0, 1, 2, 1, 2, 0, 0, 2, 0, 2, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 2, 1, 2, 2, 1, 1, 0, 2, 0, 1, 2, 1, 1, 2, 1, 0, 1, 2, 1, 2, 0, 0, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 0, 1, 1, 2, 2, 1, 1, 0, 0, 1, 2, 1, 0, 1, 1, 1, 1, 2, 2, 1, 2, 0, 0, 0, 1, 2, 2, 1, 2, 0, 1, 0, 1, 2, 2, 1, 0, 2, 1, 1, 0, 0, 2, 0, 1, 1, 1, 2, 2, 2, 1, 1, 2, 2, 0, 0, 0, 2, 2, 1, 0, 0, 1, 1, 1, 2, 2, 1, 0, 1, 2, 0, 0, 0, 1, 1, 1, 2, 0, 1, 0, 1, 1, 0, 2, 0, 1, 2, 1, 0, 2, 0, 1, 1, 2, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 2, 2, 0, 1, 1, 2, 1, 2, 0, 1, 0, 2, 1, 0, 2, 2, 2, 1, 2, 1, 2, 0, 1, 1, 2, 1, 2, 0, 2, 1, 2, 0, 2, 1, 2, 1, 2, 2, 1, 0, 2, 0, 1, 2, 0, 2, 1, 1, 0, 2, 1, 0, 1, 1, 0, 1, 2, 2, 2, 2, 1, 0, 1, 1, 1, 0, 2, 1, 2, 2, 1, 2, 1, 2, 2, 2, 1, 0, 2, 1, 2, 2, 2, 0, 1, 0, 2, 0, 0, 1, 2, 1, 2, 2, 0, 1, 1, 0, 1, 2, 0, 1, 2, 2, 2, 1, 0, 1, 1, 0, 1, 0, 2, 2, 2, 2, 0, 2, 1, 2, 2, 1, 1, 1, 2, 0, 2, 1, 2, 1, 1, 2, 2, 2, 2, 1, 0, 2, 2, 1, 0, 2, 1, 0, 1, 0, 0, 2, 0, 2, 2, 1, 1, 1, 2, 1, 0, 1, 0, 1, 2, 2, 1, 2, 1, 1, 1, 2, 0, 1, 1, 1, 2, 2, 1, 2, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 0, 2, 1, 1, 1, 1, 2, 1, 1, 2, 2, 0, 2, 2, 1, 2, 2, 1, 2, 1, 0, 2, 2, 1, 1, 2, 2, 2, 2, 1, 1, 2, 1, 2, 2, 0, 2, 2, 0, 0, 1, 0, 1, 1, 0, 1, 2, 0, 2, 1, 2, 1, 0, 2, 2, 1, 1, 2, 1, 2, 2, 0, 2, 1, 1, 2, 0, 2, 2, 1, 1, 2, 2, 2, 1, 2, 1, 2, 1, 1, 0, 2, 0, 1, 2, 1, 1, 0, 0, 1, 2, 1, 2, 1, 0, 1, 1, 1, 2, 0, 1, 0, 0, 0, 0, 2, 2, 1, 2, 0, 2, 2, 2, 0, 1, 1, 0, 1, 1, 2, 1, 2, 1, 2, 1, 1, 0, 2, 0, 0, 1, 2, 1, 1, 0, 1, 2, 0, 2, 0, 1, 2, 0, 0, 2, 2, 0, 2, 1, 2, 2, 1, 2, 2, 2, 1, 0, 2, 2, 1, 2, 2, 2, 1, 0, 0, 1, 2, 0, 0, 2, 2, 1, 1, 0, 2, 1, 2, 0, 2, 0, 0, 0, 2, 1, 2, 0, 1, 0, 1, 2, 2, 2, 2, 0, 0, 1, 1, 1, 1, 2, 2, 1, 2, 0, 1, 0, 0, 2, 0, 2, 0, 0, 1, 2, 2, 2, 1, 0, 1, 2, 0, 2, 2, 2, 1, 0, 0, 1, 0, 2, 2, 0, 1, 2, 1, 2, 2, 2, 0, 1, 1, 2, 2, 0, 1, 2, 0, 0, 1, 2, 2, 2, 1, 2, 1, 1, 2, 2, 0, 2, 0, 1, 1, 1, 2, 2, 0, 0, 2, 1, 0, 2, 1, 2, 1, 1, 2, 2, 2, 2, 2, 0, 1, 0, 2, 2, 2, 2, 0, 0, 1, 1, 1, 2, 2, 2, 2, 1, 1, 2, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 2, 0, 2, 1, 1, 1, 2, 2, 1, 1, 1, 0, 1, 2, 1, 0, 2, 1, 1, 0, 0, 0, 2, 2, 2, 2, 1, 2, 2, 1, 2, 0, 2, 1, 0, 0, 2, 0, 2, 1, 2, 2, 1, 0, 0, 0, 0, 2, 1, 1, 0, 2, 2, 1, 1, 2, 2, 0, 2, 1, 0, 2, 2, 1, 2, 2, 0, 0, 2, 2, 1, 1, 1, 0, 1, 2, 1, 0, 0, 2, 1, 1, 0, 2, 1, 2, 0, 2, 1, 1, 2, 1, 2, 2, 0, 1, 1, 1, 0, 1, 1, 2, 2, 1, 1, 1, 2, 0, 1, 1, 2, 1, 2, 2, 1, 1, 2, 1, 0, 2, 0, 1, 2, 1, 1, 1, 1, 0, 1, 0, 0, 2, 1, 1, 1, 2, 0, 2, 1, 1, 1, 0, 1, 2, 0, 1, 2, 0, 0, 1, 2, 2, 0, 2, 2, 1, 2, 1, 0, 2, 2, 2, 2, 1, 1, 1, 1, 0, 1, 2, 1, 0, 2, 1, 2, 0, 1, 0, 2, 1, 0, 0, 1, 2, 2, 2, 1, 1, 0, 1, 0, 2, 2, 0, 0, 2, 2, 1, 2, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 2, 0, 1, 0, 0, 0, 2, 2, 1, 2, 1, 1, 2, 1, 0, 2, 2, 1, 2, 2, 2, 1, 2, 1, 0, 2, 1, 2, 2, 2, 1, 1, 2, 2, 2, 2, 0, 1, 1, 0, 2, 2, 2, 2, 0, 0, 1, 1, 1, 0, 2, 0, 2, 1, 1, 2, 1, 1, 0, 1, 1, 1, 0, 0, 2, 1, 1, 1, 2, 1, 0, 0, 0, 1, 2, 2, 1, 2, 1, 1, 0, 0, 2, 2, 0, 2, 1, 0, 0, 0, 2, 2, 1, 1, 2, 1, 1, 2, 2, 2, 1, 1, 0, 1, 1, 1, 1, 2, 0, 0, 2, 1, 1, 2, 1, 1, 2, 0, 0, 1, 0, 2, 2, 2, 0, 2, 1, 1, 2, 2, 1, 2, 1, 2, 1, 1, 2, 0, 2, 2, 2, 0, 1, 0, 0, 2, 1, 1, 1, 0, 0, 1, 2, 1, 2, 1, 0, 1, 1, 0, 2, 1, 2, 2, 0, 2, 0, 2, 0, 0, 2, 2, 2, 2, 0, 0, 2, 0, 1, 1, 0, 2, 2, 1, 2, 1, 1, 2, 2, 1, 1, 1, 2, 2, 1, 2, 1, 2, 0, 2, 2, 2, 2, 2, 0, 0, 2, 0, 1, 1, 2, 2, 0, 2, 2, 2, 2, 2, 1, 2, 1, 0, 1, 1, 1, 2, 2, 0, 2, 2, 2, 1, 1, 2, 1, 0, 2, 1, 1, 1, 0, 1, 2, 1, 0, 2, 2, 0, 1, 1, 2, 2, 2, 1, 1, 2, 2, 0, 0, 1, 1, 1, 1, 1, 0, 2, 2, 2, 0, 1, 0, 0, 1, 2, 0, 2, 2, 2, 2, 1, 0, 1, 0, 2, 1, 2, 1, 2, 2, 1, 1, 1, 2, 2, 0, 0, 1, 0, 0, 0, 0, 2, 2, 1, 1, 1, 0, 2, 1, 1, 0, 2, 1, 0, 2, 2, 2, 0, 0, 1, 1, 2, 1, 1, 1, 0, 1, 2, 2, 2, 2, 2, 1, 1, 2, 0, 1, 2, 0, 2, 2, 2, 0, 1, 1, 2, 2, 0, 2, 1, 1, 2, 2, 0, 1, 2, 2, 1, 2, 2, 2, 1, 1, 2, 0, 2, 2, 1, 0, 2, 1, 1, 1, 2, 2, 0, 0, 1, 1, 2, 1, 0, 1, 1, 0, 2, 1, 1, 2, 2, 0, 2, 1, 1, 2, 2, 2, 1, 2, 0, 1, 1, 2, 2, 1, 2, 2, 1, 2, 0, 1, 2, 2, 1, 1, 2, 0, 0, 2, 1, 1, 1, 1, 1, 2, 0, 0, 2, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 2, 1, 2, 2, 1, 0, 1, 2, 0, 2, 2, 1, 0, 2, 1, 1, 2, 1, 1, 2, 2, 0, 2, 1, 0, 2, 2, 2, 1, 1, 1, 1, 2, 0, 1, 2, 1, 1, 2, 2, 1, 0, 0, 2, 2, 2, 1, 0, 2, 2, 1, 1, 1, 2, 2, 2, 2, 1, 1, 0, 1, 0, 1, 0, 1, 2, 1, 2, 0, 2, 0, 1, 2, 0, 2, 2, 2, 1, 2, 1, 1, 1, 2, 2, 1, 1, 0, 0, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 2, 0, 0, 1, 0, 2, 0, 0, 2, 0, 1, 2, 0, 2, 2, 2, 0, 2, 2, 0, 2, 1, 1, 1, 2, 2, 2, 1, 0, 2, 2, 0, 1, 1, 2, 2, 2, 2, 2, 2, 0, 2, 2, 0, 0, 0, 0, 1, 2, 0, 2, 2, 2, 0, 1, 2, 0, 1, 1, 2, 1, 0, 1, 1, 2, 1, 1, 2, 2, 2, 2, 2, 0, 1, 0, 2, 1, 2, 0, 1, 1, 1, 1, 0, 0, 2, 0, 0, 0, 0, 2, 1, 0, 2, 1, 0, 1, 0, 2, 1, 0, 2, 2, 2, 0, 0, 0, 2, 0, 1, 2, 1, 1, 0, 0, 1, 0, 2, 1, 1, 1, 1, 0, 0, 1, 0, 1, 2, 2, 1, 1, 2, 1, 1, 1, 1, 0, 2, 1, 1, 1, 2, 0, 1, 1, 2, 2, 0, 2, 0, 1, 0, 0, 2, 2, 1, 2, 0, 0, 2, 1, 0, 1, 2, 2, 2, 1, 2, 1, 1, 2, 2, 0, 0, 1, 1, 2, 2, 0, 2, 0, 2, 2, 1, 1, 0, 2, 2, 2, 1, 0, 1, 2, 1, 1, 1, 2, 2, 2, 2, 2, 1, 0, 1, 2, 0, 2, 1, 1, 2, 1, 0, 1, 0, 2, 2, 0, 1, 2, 1, 1, 2, 1, 0, 1, 2, 2, 1, 1, 0, 0, 2, 2, 2, 2, 1, 1, 0, 2, 2, 2, 1, 0, 2, 0, 1, 1, 1, 0, 2, 2, 2, 0, 2, 2, 2, 0, 2, 0, 0, 1, 0, 2, 0, 1, 1, 2, 1, 1, 2, 0, 2, 2, 1, 0, 1, 2, 2, 1, 2, 1, 0, 0, 2, 1, 2, 2, 1, 1, 1, 2, 0, 0, 2, 1, 0, 2, 2, 1, 1, 1, 1, 2, 1, 1, 2, 2, 2, 2, 2, 1, 2, 1, 2, 0, 1, 1, 2, 1, 0, 2, 2, 2, 2, 2, 2, 0, 1, 0, 2, 1, 1, 1, 1, 2, 1, 1, 2, 2, 1, 2, 1, 2, 1, 0, 2, 0, 2, 2, 2, 1, 0, 2, 1, 2, 0, 0, 2, 2, 1, 0, 1, 1, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 1, 0, 2, 2, 2, 0, 1, 1, 0, 1, 1, 0, 1, 2, 1, 0, 2, 0, 0, 1, 2, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 0, 2, 0, 1, 0, 0, 2, 0, 0, 2, 2, 2, 1, 1, 2, 2, 1, 1, 1, 1, 2, 2, 1, 2, 1, 2, 1, 0, 0, 1, 0, 1, 0, 2, 1, 0, 0, 1, 0, 2, 1, 0, 0, 1, 1, 2, 2, 2, 0, 2, 1, 1, 2, 0, 2, 0, 1, 1, 2, 1, 0, 0, 0, 0, 0, 2, 0, 1, 0, 2, 2, 0, 0, 0, 1, 0, 1, 0, 2, 1, 1, 1, 1, 0, 0, 0, 2, 0, 1, 2, 2, 2, 1, 1, 0, 1, 2, 1, 2, 2, 1, 0, 2, 1, 0, 0, 1, 2, 1, 0, 2, 2, 2, 1, 2, 1, 0, 1, 0, 0, 2, 0, 0, 0, 2, 2, 2, 2, 2, 1, 2, 0, 1, 2, 1, 1, 2, 2, 0, 1, 0, 0, 2, 1, 0, 1, 0, 0, 0, 1, 0, 2, 2, 2, 2, 2, 2, 2, 1, 0, 2, 2, 2, 0, 1, 1, 1, 2, 0, 2, 1, 0, 0, 2, 1, 2, 1, 0, 0, 2, 1, 1, 0, 2, 1, 0, 2, 2, 0, 0, 2, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 1, 2, 2, 1, 1, 0, 2, 2, 1, 1, 1, 2, 1, 2, 1, 0, 0, 2, 2, 2, 2, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 2, 0, 2, 2, 1, 1, 0, 1, 2, 2, 2, 2, 2, 0, 1, 1, 1, 2, 1, 1, 0, 2, 0, 2, 1, 1, 0, 1, 1, 2, 1, 0, 2, 0, 2, 1, 1, 1, 0, 0, 2, 0, 1, 0, 2, 2, 2, 2, 2, 1, 2, 1, 1, 2, 1, 2, 0, 2, 0, 1, 2, 1, 2, 2, 1, 0, 0, 1, 2, 1, 2, 1, 1, 2, 0, 2, 2, 1, 1, 1, 2, 1, 2, 0, 1, 1, 1, 2, 0, 2, 0, 0, 1, 2, 0, 1, 2, 2, 2, 0, 2, 2, 0, 2, 1, 0, 2, 1, 1, 1, 2, 1, 1, 2, 2, 2, 1, 0, 1, 0, 1, 2, 1, 2, 2, 2, 0, 2, 0, 2, 0, 1, 0, 2, 0, 2, 1, 1, 1, 0, 1, 2, 1, 1, 2, 2, 2, 0, 1, 1, 2, 2, 2, 1, 1, 2, 2, 2, 1, 0, 1, 2, 2, 1, 2, 2, 1, 1, 0, 2, 2, 1, 1, 0, 1, 0, 1, 2, 2, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 2, 2, 0, 1, 1, 0, 1, 2, 2, 2, 2, 1, 2, 1, 2, 1, 0, 0, 0, 1, 1, 1, 0, 2, 0, 2, 1, 1, 0, 0, 1, 1, 1, 2, 2, 2, 2, 0, 2, 1, 1, 2, 2, 2, 0, 1, 2, 1, 1, 2, 2, 1, 2, 1, 0, 2, 1, 2, 2, 1, 2, 1, 2, 1, 1, 2, 1, 2, 1, 0, 1, 2, 2, 2, 0, 1, 0, 1, 1, 2, 0, 0, 1, 0, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 1, 1, 1, 1, 0, 1, 0, 2, 1, 2, 0, 2, 2, 1, 0, 1, 1, 2, 0, 0, 1, 2, 2, 0, 1, 1, 1, 0, 2, 2, 2, 2, 1, 1, 0, 2, 0, 2, 0, 1, 1, 0, 0, 2, 2, 1, 0, 2, 1, 1, 0, 2, 1, 1, 2, 1, 2, 2, 0, 1, 2, 1, 0, 0, 1, 1, 1, 2, 0, 2, 1, 1, 0, 2, 2, 1, 1, 2, 2, 2, 1, 2, 1, 1, 1, 2, 0, 2, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 2, 1, 2, 1, 2, 1, 0, 2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 0, 0, 2, 1, 2, 0, 0, 2, 0, 0, 2, 1, 1, 2, 2, 0, 2, 2, 0, 2, 1, 1, 2, 1, 1, 2, 2, 0, 1, 1, 1, 1, 2, 1, 1, 0, 2, 2, 0, 0, 2, 2, 1, 2, 1, 1, 2, 0, 2, 1, 2, 0, 0, 0, 1, 2, 0, 1, 2, 1, 2, 1, 2, 2, 2, 2, 1, 2, 0, 1, 1, 1, 2, 0, 1, 0, 0, 1, 1, 0, 2, 0, 2, 1, 1, 0, 0, 2, 2, 0, 1, 2, 0, 2, 0, 0, 1, 0, 0, 1, 2, 0, 2, 1, 1, 1, 1, 0, 1, 1, 0, 2, 1, 1, 0, 0, 1, 1, 1, 2, 0, 1, 2, 2, 1, 2, 2, 0, 2, 1, 1, 2, 1, 0, 2, 2, 1, 1, 2, 1, 2, 1, 1, 1, 0, 2, 1, 0, 2, 0, 0, 2, 1, 2, 2, 2, 0, 1, 1, 0, 0, 2, 2, 1, 2, 2, 0, 1, 2, 2, 2, 2, 1, 1, 1, 1, 2, 1, 1, 1, 2, 1, 1, 2, 2, 1, 2, 1, 2, 1, 2, 1, 0, 0, 1, 1, 1, 0, 0, 0, 2, 2, 1, 0, 1, 0, 1, 1, 1, 2, 2, 1, 2, 0, 1, 2, 0, 1, 0, 1, 2, 1, 1, 1, 1, 0, 2, 2, 2, 1, 2, 0, 1, 0, 2, 2, 2, 1, 2, 2, 2, 2, 0, 1, 0, 2, 2, 0, 1, 1, 1, 2, 0, 2, 2, 2, 1, 0, 1, 2, 1, 0, 2, 1, 2, 0, 2, 0, 1, 2, 1, 2, 0, 0, 2, 2, 2, 2, 1, 0, 1, 0, 0, 1, 1, 2, 1, 2, 2, 2, 0, 2, 0, 2, 1, 0, 1, 1, 2, 2, 1, 0, 0, 1, 2, 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 1, 2, 0, 0, 2, 2, 1, 2, 0, 2, 0, 2, 2, 2, 2, 1, 1, 1, 0, 2, 2, 1, 2, 0, 1, 1, 2, 2, 0, 2, 0, 2, 1, 2, 2, 1, 0, 2, 1, 1, 1, 1, 2, 2, 1, 1, 1, 0, 2, 1, 1, 2, 0, 0, 1, 1, 1, 2, 1, 2, 1, 1, 0, 2, 0, 0, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 1, 0, 1, 2, 1, 0, 1, 0, 2, 0, 0, 2, 0, 1, 1, 1, 2, 2, 2, 2, 2, 0, 1, 2, 2, 2, 2, 1, 2, 2, 1, 1, 1, 0, 0, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 0, 2, 1, 1, 0, 0, 0, 0, 1, 2, 0, 1, 1, 0, 0, 0, 1, 1, 1, 2, 0, 0, 2, 2, 2, 2, 2, 1, 1, 2, 1, 0, 1, 2, 0, 1, 2, 0, 2, 2, 1, 1, 0, 1, 2, 0, 2, 2, 1, 1, 2, 2, 2, 0, 2, 1, 1, 1, 2, 1, 1, 0, 1, 1, 1, 0, 0, 1, 2, 2, 1, 0, 2, 1, 1, 1, 0, 0, 2, 1, 1, 0, 0, 0, 1, 0, 0, 2, 1, 1, 2, 1, 2, 0, 1, 1, 1, 2, 1, 0, 2, 0, 2, 1, 1, 0, 0, 2, 1, 1, 2, 0, 2, 1, 2, 0, 2, 2, 2, 2, 1, 2, 1, 2, 1, 2, 1, 0, 1, 2, 1, 1, 2, 0, 2, 1, 1, 2, 2, 2, 0, 1, 0, 1, 1, 1, 2, 0, 1, 1, 1, 0, 0, 1, 2, 2, 2, 0, 1, 1, 1, 0, 2, 1, 0, 2, 0, 0, 2, 0, 2, 2, 2, 1, 1, 2, 2, 1, 2, 1, 2, 0, 2, 0, 1, 0, 2, 1, 2, 0, 2, 0, 2, 0, 1, 2, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 2, 0, 2, 2, 0, 1, 2, 0, 1, 2, 1, 1, 1, 1, 1, 2, 0, 1, 2, 2, 1, 1, 2, 1, 2, 0, 1, 2, 1, 0, 1, 1, 2, 2, 1, 1, 0, 2, 2, 2, 1, 2, 2, 1, 1, 0, 1, 2, 1, 2, 2, 2, 2, 1, 2, 2, 1, 1, 1, 2, 1, 2, 1, 0, 0, 1, 0, 0, 0, 2, 0, 1, 2, 2, 1, 1, 2, 0, 1, 2, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 2, 2, 0, 1, 2, 1, 2, 2, 1, 1, 0, 1, 2, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 2, 1, 2, 2, 2, 1, 0, 1, 0, 2, 2, 2, 2, 0, 0, 0, 1, 0, 0, 2, 1, 1, 1, 2, 1, 1, 2, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 2, 1, 2, 1, 2, 2, 2, 1, 1, 0, 2, 2, 2, 0, 1, 0, 1, 2, 0, 2, 1, 2, 0, 2, 2, 2, 1, 2, 0, 0, 1, 0, 2, 0, 2, 1, 0, 0, 0, 2, 0, 2, 1, 2, 1, 0, 2, 0, 2, 2, 2, 1, 1, 1, 2, 2, 1, 0, 2, 2, 1, 1, 1, 1, 1, 1, 0, 2, 2, 0, 2, 1, 2, 0, 2, 1, 1, 2, 2, 1, 0, 1, 1, 1, 2, 2, 1, 1, 1, 2, 1, 1, 1, 1, 2, 1, 2, 2, 0, 1, 2, 2, 0, 1, 2, 2, 0, 0, 2, 2, 2, 2, 2, 2, 0, 0, 1, 2, 1, 2, 2, 2, 1, 0, 0, 1, 2, 2, 1, 0, 2, 2, 2, 1, 1, 2, 2, 0, 1, 1, 2, 0, 1, 2, 2, 1, 2, 2, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 2, 1, 1, 1, 0, 0, 2, 2, 0, 0, 2, 1, 0, 1, 1, 1, 1, 2, 0, 1, 2, 0, 1, 0, 2, 2, 2, 2, 2, 1, 2, 0, 1, 2, 2, 0, 1, 1, 0, 2, 1, 2, 2, 0, 0, 2, 2, 2, 0, 0, 2, 2, 0, 0, 2, 1, 1, 0, 2, 2, 1, 2, 0, 1, 2, 0, 0, 0, 2, 2, 2, 2, 1, 2, 2, 2, 1, 1, 0, 2, 2, 0, 0, 1, 0, 2, 1, 2, 2, 0, 2, 2, 2, 1, 0, 0, 0, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 0, 2, 0, 0, 2, 2, 0, 1, 0, 1, 0, 0, 0, 0, 2, 1, 0, 0, 2, 1, 2, 1, 2, 1, 0, 2, 0, 0, 1, 0, 1, 0, 2, 1, 0, 1, 0, 1, 2, 0, 1, 1, 0, 0, 1, 0, 1, 2, 1, 2, 2, 0, 0, 0, 0, 2, 2, 0, 2, 0, 1, 2, 2, 1, 2, 1, 0, 2, 0, 2, 1, 2, 1, 1, 0, 2, 1, 2, 1, 0, 2, 1, 2, 1, 0, 1, 1, 2, 2, 0, 2, 1, 2, 1, 1, 2, 2, 1, 0, 0, 2, 0, 1, 0, 0, 0, 1, 0, 2, 0, 1, 2, 2, 1, 0, 0, 1, 0, 0, 0, 2, 1, 0, 2, 2, 1, 0, 2, 0, 0, 2, 2, 0, 0, 2, 0, 0, 1, 2, 0, 2, 1, 2, 0, 2, 0, 2, 0, 2, 1, 1, 2, 0, 0, 1, 2, 2, 1, 1, 1, 1, 0, 1, 0, 1, 2, 1, 1, 0, 1, 2, 1, 2, 2, 2, 1, 2, 0, 2, 1, 2, 2, 1, 0, 0, 0, 2, 0, 2, 0, 1, 2, 0, 0, 1, 1, 1, 2, 1, 1, 1, 0, 2, 2, 1, 2, 1, 1, 2, 2, 1, 0, 2, 2, 2, 1, 2, 0, 2, 0, 1, 2, 0, 2, 2, 1, 1, 2, 0, 2, 2, 2, 2, 1, 1, 1, 1, 0, 2, 1, 2, 0, 2, 0, 1, 1, 0, 2, 1, 2, 1, 1, 2, 0, 2, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 2, 0, 1, 2, 1, 0, 2, 1, 0, 1, 0, 2, 2, 1, 2, 1, 0, 2, 1, 1, 1, 0, 2, 2, 0, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 0, 2, 0, 1, 0, 0, 2, 1, 0, 0, 1, 1, 0, 2, 2, 2, 2, 1, 2, 2, 0, 0, 1, 0, 0, 2, 2, 0, 2, 2, 0, 0, 1, 2, 1, 2, 0, 0, 0, 1, 1, 0, 2, 0, 0, 0, 2, 2, 0, 1, 2, 0, 2, 2, 2, 1, 1, 2, 0, 0, 2, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 2, 2, 2, 1, 2, 1, 2, 2, 1, 2, 1, 1, 0, 1, 1, 2, 2, 2, 1, 1, 2, 2, 1, 0, 2, 2, 1, 0, 1, 1, 1, 1, 1, 0, 2, 2, 2, 2, 1, 2, 0, 1, 0, 2, 1, 2, 1, 1, 2, 1, 2, 1, 2, 2, 0, 1, 0, 2, 1, 0, 2, 2, 1, 2, 1, 2, 0, 2, 0, 0, 1, 1, 2, 2, 2, 2, 0, 1, 2, 1, 0, 0, 1, 1, 1, 2, 1, 0, 2, 1, 0, 2, 0, 2, 0, 1, 1, 2, 1, 2, 0, 0, 0, 2, 1, 1, 1, 1, 2, 0, 1, 1, 1, 2, 0, 2, 2, 2, 1, 1, 2, 1, 0, 2, 2, 1, 1, 2, 1, 2, 2, 1, 0, 0, 0, 2, 1, 1, 2, 2, 2, 1, 2, 0, 1, 1, 0, 2, 0, 2, 0, 1, 1, 2, 2, 1, 2, 0, 1, 2, 1, 0, 0, 2, 1, 1, 2, 1, 2, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 2, 2, 1, 2, 0, 1, 1, 2, 2, 2, 2, 2, 2, 0, 0, 1, 1, 1, 2, 2, 1, 1, 1, 2, 2, 2, 1, 0, 1, 2, 2, 0, 2, 2, 1, 0, 0, 0, 1, 1, 1, 1, 1, 2, 0, 0, 0, 1, 1, 2, 2, 0, 0, 1, 2, 1, 1, 2, 1, 1, 0, 2, 2, 1, 0, 2, 2, 1, 1, 0, 1, 1, 0, 1, 2, 0, 1, 0, 1, 0, 2, 1, 1, 2, 0, 1, 0, 1, 0, 2, 2, 2, 1, 1, 0, 2, 2, 1, 1, 2, 1, 2, 0, 2, 0, 0, 2, 2, 2, 0, 1, 2, 2, 2, 2, 0, 2, 0, 1, 2, 1, 1, 0, 0, 1, 2, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 1, 0, 1, 2, 0, 1, 2, 2, 1, 1, 1, 2, 2, 2, 2, 2, 0, 0, 2, 2, 1, 0, 2, 2, 2, 1, 2, 1, 0, 0, 2, 2, 2, 1, 2, 1, 2, 1, 2, 1, 0, 1, 1, 1, 1, 2, 0, 0, 1, 1, 1, 2, 1, 1, 1, 2, 1, 2, 1, 1, 0, 0, 2, 2, 2, 1, 1, 2, 2, 2, 1, 1, 1, 1, 2, 2, 1, 0, 1, 0, 2, 0, 2, 1, 1, 2, 2, 0, 0, 1, 0, 0, 1, 0, 2, 2, 2, 1, 2, 2, 2, 0, 2, 2, 2, 2, 2, 1, 1, 2, 1, 0, 1, 2, 0, 1, 2, 2, 1, 2, 0, 2, 1, 1, 0, 0, 2, 0, 1, 1, 1, 0, 2, 1, 1, 2, 0, 2, 2, 2, 2, 2, 0, 1, 1, 2, 1, 2, 2, 0, 1, 2, 0, 0, 1, 0, 2, 1, 2, 2, 2, 2, 2, 2, 0, 1, 2, 0, 0, 0, 1, 2, 0, 0, 2, 1, 1, 1, 1, 0, 1, 2, 1, 2, 1, 0, 0, 1, 2, 0, 2, 1, 1, 2, 1, 1, 0, 2, 1, 0, 1, 0, 1, 1, 1, 2, 2, 2, 2, 1, 2, 2, 1, 1, 2, 2, 1, 1, 1, 2, 2, 2, 2, 2, 0, 1, 1, 2, 2, 1, 0, 2, 1, 1, 1, 1, 0, 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 0, 0, 2, 2, 1, 0, 1, 0, 2, 2, 2, 0, 0, 1, 2, 2, 2, 0, 1, 1, 2, 0, 0, 0, 0, 2, 2, 1, 2, 1, 2, 2, 2, 0, 0, 1, 0, 2, 0, 1, 0, 2, 0, 1, 2, 2, 0, 0, 2, 2, 1, 1, 0, 1, 2, 1, 2, 1, 2, 1, 2, 0, 2, 2, 1, 0, 2, 0, 0, 2, 1, 1, 0, 2, 1, 2, 2, 1, 1, 1, 0, 2, 1, 1, 0, 0, 1, 0, 0, 0, 2, 2, 1, 1, 0, 1, 2, 2, 2, 1, 1, 0, 1, 2, 1, 0, 0, 2, 2, 2, 2, 1, 0, 2, 1, 1, 2, 1, 1, 1, 0, 0, 0, 0, 2, 0, 2, 2, 0, 1, 0, 2, 0, 0, 2, 1, 2, 1, 1, 0, 1, 1, 0, 2, 1, 2, 1, 2, 2, 0, 2, 1, 1, 2, 0, 1, 0, 0, 1, 1, 0, 2, 2, 0, 2, 1, 0, 0, 0, 1, 1, 0, 0, 1, 2, 1, 1, 2, 1, 1, 1, 0, 1, 1, 2, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 2, 2, 1, 0, 1, 1, 2, 1, 0, 1, 1, 1, 2, 2, 0, 0, 0, 2, 1, 2, 2, 2, 1, 2, 1, 2, 0, 0, 0, 2, 0, 0, 1, 0, 2, 2, 0, 1, 2, 2, 2, 1, 2, 1, 1, 1, 2, 0, 1, 1, 2, 1, 1, 2, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 0, 1, 0, 0, 1, 2, 1, 1, 0, 2, 1, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 2, 1, 1, 2, 1, 2, 1, 0, 2, 2, 1, 0, 2, 2, 1, 2, 1, 2, 0, 1, 2, 1, 2, 1, 1, 1, 0, 1, 1, 2, 1, 2, 1, 1, 1, 1, 2, 2, 2, 2, 1, 1, 0, 1, 2, 1, 2, 1, 2, 0, 0, 2, 1, 0, 2, 1, 2, 1, 1, 0, 2, 0, 0, 2, 0, 1, 1, 1, 2, 1, 2, 1, 2, 0, 0, 2, 1, 2, 1, 2, 1, 2, 2, 2, 0, 0, 2, 1, 2, 1, 1, 0, 2, 1, 1, 2, 0, 1, 1, 2, 2, 2, 0, 2, 0, 0, 0, 2, 2, 2, 1, 0, 1, 2, 1, 1, 2, 1, 2, 0, 0, 1, 1, 1, 2, 1, 1, 2, 1, 2, 2, 2, 0, 2, 1, 1, 2, 1, 1, 2, 2, 0, 0, 0, 1, 2, 1, 1, 2, 0, 0, 0, 2, 2, 1, 0, 1, 0, 0, 2, 2, 1, 2, 1, 2, 0, 2, 1, 2, 2, 2, 0, 2, 1, 2, 0, 1, 1, 1, 0, 1, 2, 2, 0, 0, 1, 1, 0, 1, 1, 1, 0, 2, 2, 2, 2, 1, 1, 1, 2, 0, 0, 2, 0, 1, 2, 1, 0, 1, 2, 2, 1, 0, 2, 0, 1, 2, 1, 0, 2, 0, 2, 2, 2, 0, 1, 2, 2, 2, 0, 0, 2, 0, 2, 1, 0, 1, 1, 1, 1, 1, 0, 0, 2, 2, 0, 2, 1, 1, 2, 0, 0, 2, 2, 2, 0, 2, 1, 1, 1, 2, 1, 2, 1, 2, 0, 2, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 2, 1, 2, 2, 1, 1, 1, 0, 1, 1, 0, 1, 2, 1, 1, 0, 1, 1, 1, 1, 2, 1, 1, 2, 0, 2, 2, 1, 1, 1, 0, 0, 0, 0, 0, 2, 0, 2, 1, 2, 0, 2, 2, 1, 1, 1, 1, 2, 1, 0, 2, 0, 2, 1, 0, 1, 2, 2, 2, 2, 0, 2, 0, 1, 1, 2, 2, 1, 1, 1, 2, 1, 2, 0, 2, 1, 2, 1, 1, 2, 0, 0, 0, 2, 2, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 2, 2, 2, 1, 1, 2, 1, 0, 2, 1, 2, 0, 2, 1, 2, 1, 2, 1, 1, 1, 2, 2, 0, 2, 1, 1, 1, 2, 2, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 0, 0, 2, 0, 1, 1, 2, 0, 0, 1, 1, 2, 1, 2, 1, 1, 0, 2, 1, 2, 1, 0, 2, 1, 2, 1, 2, 0, 0, 2, 2, 1, 2, 0, 1, 2, 2, 2, 2, 1, 1, 2, 1, 1, 2, 1, 1, 1, 0, 1, 2, 0, 1, 1, 1, 2, 2, 0, 2, 0, 1, 2, 2, 0, 2, 2, 0, 2, 1, 1, 2, 2, 0, 2, 0, 0, 2, 1, 1, 2, 1, 0, 1, 1, 0, 1, 2, 1, 2, 1, 1, 0, 1, 0, 2, 2, 2, 2, 0, 0, 1, 1, 0, 2, 0, 1, 0, 1, 1, 2, 0, 1, 2, 2, 2, 0, 1, 2, 0, 1, 2, 1, 2, 0, 2, 2, 1, 2, 0, 2, 2, 1, 1, 2, 1, 1, 2, 0, 2, 1, 1, 2, 1, 1, 2, 1, 1, 2, 0, 1, 0, 1, 2, 2, 0, 2, 1, 0, 2, 1, 0, 0, 1, 1, 1, 1, 2, 1, 2, 0, 2, 1, 2, 2, 2, 2, 2, 0, 2, 2, 0, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 0, 1, 1, 2, 0, 2, 1, 0, 2, 1, 2, 0, 2, 0, 2, 0, 2, 1, 0, 2, 1, 2, 0, 0, 0, 2, 1, 0, 1, 2, 2, 1, 1, 1, 0, 0, 0, 2, 1, 1, 2, 2, 0, 1, 2, 0, 2, 2, 2, 2, 1, 1, 0, 2, 2, 2, 1, 2, 1, 1, 2, 0, 0, 1, 2, 2, 2, 0, 1, 0, 1, 2, 1, 0, 1, 0, 1, 0, 1, 2, 1, 2, 2, 1, 1, 1, 2, 1, 1, 0, 1, 2, 2, 2, 2, 1, 2, 0, 1, 2, 1, 2, 0, 1, 2, 1, 1, 2, 0, 0, 0, 2, 0, 0, 2, 2, 1, 1, 2, 2, 0, 2, 2, 2, 2, 2, 1, 0, 0, 0, 1, 1, 1, 2, 1, 0, 1, 1, 2, 2, 0, 1, 0, 1, 1, 2, 1, 2, 2, 2, 0, 1, 0, 0, 1, 2, 1, 0, 1, 1, 1, 1, 0, 2, 1, 1, 2, 1, 1, 0, 2, 2, 1, 2, 1, 2, 2, 2, 1, 2, 2, 2, 1, 1, 1, 2, 1, 0, 0, 0, 2, 0, 0, 0, 0, 0, 1, 2, 2, 1, 1, 2, 0, 2, 1, 0, 1, 1, 2, 2, 2, 1, 2, 0, 2, 1, 1, 0, 1, 2, 0, 1, 2, 2, 1, 2, 1, 0, 0, 2, 0, 0, 1, 1, 0, 2, 2, 2, 0, 2, 2, 0, 1, 0, 1, 2, 1, 2, 2, 0, 0, 1, 1, 1, 2, 2, 0, 1, 2, 2, 0, 0, 2, 0, 1, 0, 1, 2, 1, 0, 1, 0, 1, 0, 0, 2, 1, 1, 0, 1, 1, 0, 1, 0, 2, 2, 1, 0, 2, 0, 2, 0, 1, 1, 1, 0, 2, 2, 1, 1, 2, 2, 2, 1, 0, 1, 2, 1, 1, 2, 1, 1, 2, 0, 2, 0, 2, 0, 2, 0, 1, 2, 1, 0, 1, 2, 0, 2, 0, 0, 1, 0, 0, 1, 0, 1, 2, 2, 2, 1, 2, 0, 0, 1, 1, 2, 2, 1, 2, 0, 2, 1, 1, 1, 1, 2, 0, 2, 0, 0, 2, 0, 1, 1, 2, 0, 2, 0, 2, 2, 1, 2, 2, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 2, 2, 2, 2, 1, 0, 2, 2, 0, 2, 0, 2, 2, 1, 0, 2, 2, 2, 2, 1, 0, 2, 2, 0, 1, 2, 1, 0, 0, 2, 1, 1, 1, 1, 1, 0, 2, 0, 2, 2, 1, 0, 0, 1, 2, 0, 2, 1, 1, 2, 2, 0, 2, 1, 1, 1, 0, 0, 2, 2, 1, 1, 0, 2, 2, 1, 0, 2, 0, 2, 2, 2, 1, 1, 0, 2, 2, 2, 0, 2, 2, 1, 1, 0, 1, 0, 1, 1, 2, 2, 2, 0, 0, 2, 1, 2, 1, 1, 1, 0, 1, 1, 2, 2, 0, 0, 1, 1, 1, 2, 1, 1, 0, 1, 2, 1, 0, 1, 2, 2, 0, 0, 1, 0, 2, 2, 2, 1, 1, 1, 1, 2, 1, 0, 2, 0, 1, 1, 2, 0, 2, 0, 1, 1, 0, 1, 2, 0, 2, 2, 1, 2, 1, 1, 2, 1, 2, 1, 0, 0, 1, 1, 2, 0, 1, 2, 1, 2, 0, 1, 0, 2, 0, 1, 1, 2, 0, 2, 2, 2, 2, 1, 2, 2, 1, 0, 0, 0, 0, 1, 1, 1, 2, 1, 2, 1, 1, 0, 1, 0, 2, 1, 1, 1, 0, 2, 1, 2, 2, 2, 0, 1, 1, 2, 2, 2, 0, 1, 0, 0, 2, 1, 2, 1, 0, 0, 0, 0, 2, 0, 0, 0, 2, 1, 1, 0, 2, 2, 1, 1, 2, 2, 1, 1, 0, 0, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 1, 1, 1, 2, 2, 2, 1, 1, 2, 0, 0, 1, 1, 0, 0, 0, 0, 2, 0, 0, 0, 1, 1, 2, 1, 0, 2, 2, 2, 0, 1, 1, 2, 1, 1, 2, 0, 1, 2, 0, 0, 1, 2, 1, 2, 1, 1, 0, 1, 2, 2, 1, 0, 0, 0, 2, 1, 2, 2, 2, 1, 1, 0, 1, 2, 2, 1, 1, 0, 2, 2, 0, 1, 0, 0, 2, 1, 0, 2, 1, 2, 2, 2, 1, 2, 0, 0, 2, 1, 0, 0, 1, 0, 2, 1, 2, 2, 0, 2, 2, 1, 0, 0, 0, 2, 2, 1, 1, 2, 2, 2, 1, 2, 1, 0, 1, 1, 0, 2, 2, 1, 2, 2, 0, 2, 1, 0, 1, 1, 0, 1, 2, 0, 2, 2, 2, 1, 0, 2, 0, 1, 1, 1, 1, 0, 2, 0, 2, 1, 2, 1, 0, 0, 2, 2, 1, 1, 1, 1, 0, 1, 0, 2, 1, 2, 2, 1, 1, 0, 2, 0, 2, 2, 1, 1, 1, 2, 0, 1, 0, 1, 1, 2, 1, 1, 1, 2, 0, 2, 2, 2, 1, 1, 2, 0, 0, 1, 1, 0, 1, 1, 0, 2, 1, 2, 1, 1, 1, 0, 2, 0, 1, 1, 1, 1, 2, 2, 1, 1, 0, 0, 1, 1, 1, 0, 2, 1, 1, 2, 2, 2, 2, 0, 0, 2, 0, 1, 0, 2, 2, 1, 1, 0, 2, 0, 1, 2, 1, 2, 1, 2, 1, 1, 1, 2, 1, 1, 2, 1, 0, 0, 0, 1, 1, 1, 2, 2, 2, 1, 0, 2, 1, 2, 1, 1, 2, 1, 0, 2, 0, 1, 2, 2, 2, 2, 0, 1, 0, 1, 2, 0, 2, 1, 2, 0, 2, 2, 1, 0, 0, 2, 0, 2, 2, 2, 2, 1, 1, 1, 0, 2, 1, 1, 2, 2, 2, 2, 1, 0, 2, 1, 1, 2, 2, 0, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 2, 0, 2, 1, 2, 2, 1, 2, 1, 0, 0, 0, 1, 1, 1, 2, 0, 1, 0, 2, 1, 1, 2, 2, 1, 1, 2, 1, 0, 2, 2, 1, 0, 0, 1, 2, 2, 1, 0, 1, 2, 0, 2, 2, 1, 2, 2, 0, 1, 1, 2, 2, 1, 1, 1, 2, 0, 1, 2, 0, 2, 1, 2, 2, 1, 2, 0, 2, 0, 1, 1, 1, 0, 1, 2, 1, 2, 1, 0, 2, 1, 0, 1, 0, 1, 2, 1, 0, 0, 1, 1, 2, 1, 2, 1, 0, 0, 0, 2, 1, 2, 0, 1, 0, 2, 0, 0, 1, 2, 2, 2, 1, 0, 2, 2, 1, 2, 0, 1, 2, 0, 1, 0, 0, 1, 2, 2, 2, 1, 0, 2, 1, 2, 2, 2, 1, 1, 1, 2, 0, 2, 1, 2, 1, 1, 1, 0, 1, 2, 0, 0, 2, 1, 2, 0, 0, 1, 1, 0, 2, 2, 2, 1, 1, 0, 2, 1, 2, 1, 1, 0, 1, 2, 1, 1, 2, 2, 1, 0, 0, 0, 1, 0, 1, 2, 1, 1, 1, 2, 1, 1, 0, 1, 1, 1, 2, 1, 2, 1, 2, 0, 1, 2, 0, 2, 0, 1, 1, 2, 0, 2, 2, 1, 2, 2, 0, 0, 2, 2, 2, 0, 2, 0, 1, 2, 1, 0, 1, 2, 2, 0, 0, 2, 2, 1, 1, 1, 2, 0, 1, 2, 2, 1, 1, 0, 2, 0, 2, 0, 2, 2, 2, 0, 0, 0, 2, 0, 2, 2, 1, 0, 1, 1, 1, 2, 1, 1, 0, 1, 0, 0, 2, 2, 1, 2, 1, 0, 0, 2, 1, 2, 0, 1, 0, 2, 1, 2, 0, 1, 1, 1, 2, 0, 0, 0, 0, 2, 2, 0, 1, 1, 1, 1, 0, 0, 2, 1, 1, 1, 0, 0, 2, 2, 0, 2, 0, 2, 1, 0, 1, 2, 2, 2, 1, 1, 2, 1, 0, 1, 1, 2, 2, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 2, 0, 0, 2, 2, 2, 2, 0, 1, 2, 0, 0, 0, 0, 1, 0, 2, 0, 1, 2, 2, 1, 2, 2, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 2, 2, 0, 0, 0, 0, 0, 2, 2, 2, 0, 1, 2, 0, 2, 2, 2, 2, 1, 0, 1, 2, 2, 0, 1, 2, 2, 2, 0, 0, 1, 2, 2, 2, 1, 2, 2, 2, 2, 0, 1, 0, 1, 0, 1, 1, 2, 2, 2, 2, 0, 1, 1, 0, 2, 0, 1, 2, 0, 2, 1, 0, 1, 1, 2, 0, 2, 1, 1, 2, 0, 1, 2, 1, 0, 2, 1, 1, 2, 1, 1, 0, 1, 1, 2, 0, 2, 1, 2, 1, 2, 0, 0, 2, 1, 1, 1, 2, 1, 0, 1, 1, 1, 2, 2, 2, 1, 0, 0, 1, 1, 2, 0, 1, 0, 2, 1, 2, 2, 2, 0, 2, 1, 2, 2, 0, 1, 1, 2, 0, 1, 2, 2, 1, 2, 1, 1, 1, 2, 2, 1, 0, 0, 1, 0, 1, 0, 1, 2, 2, 0, 2, 1, 1, 2, 0, 2, 1, 1, 2, 0, 2, 1, 2, 1, 1, 2, 1, 2, 2, 0, 1, 2, 1, 2, 1, 1, 1, 1, 0, 1, 2, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 2, 2, 1, 2, 1, 2, 1, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0, 2, 1, 2, 1, 2, 0, 2, 0, 0, 0, 0, 1, 1, 2, 1, 1, 1, 2, 1, 2, 0, 2, 1, 2, 1, 2, 2, 1, 1, 1, 1, 2, 1, 0, 1, 0, 2, 0, 1, 2, 1, 2, 2, 2, 0, 2, 2, 2, 2, 1, 2, 0, 0, 1, 1, 2, 0, 2, 0, 0, 2, 2, 1, 0, 1, 1, 0, 2, 1, 2, 1, 0, 2, 1, 2, 1, 1, 1, 0, 2, 1, 1, 1, 0, 1, 0, 1, 1, 2, 2, 1, 1, 1, 2, 2, 1, 0, 1, 0, 1, 1, 0, 2, 1, 2, 2, 2, 1, 1, 2, 2, 0, 2, 2, 1, 2, 1, 2, 2, 2, 0, 0, 0, 1, 2, 0, 1, 0, 1, 0, 0, 1, 2, 0, 0, 0, 0, 0, 0, 2, 0, 1, 2, 1, 1, 0, 2, 2, 0, 0, 0, 1, 0, 2, 1, 1, 1, 1, 2, 2, 1, 2, 2, 2, 0, 1, 1, 1, 1, 2, 2, 2, 1, 2, 0, 1, 1, 0, 1, 1, 1, 2, 2, 1, 2, 0, 2, 2, 1, 0, 0, 1, 2, 1, 2, 2, 0, 1, 2, 1, 1, 2, 0, 2, 2, 0, 2, 2, 1, 2, 2, 1, 1, 0, 2, 1, 1, 2, 0, 1, 1, 2, 1, 0, 0, 0, 0, 1, 0, 2, 1, 0, 2, 1, 0, 1, 0, 2, 2, 0, 2, 2, 2, 2, 0, 2, 1, 2, 2, 1, 2, 2, 0, 2, 1, 1, 1, 2, 2, 2, 1, 0, 2, 1, 0, 2, 0, 2, 1, 1, 2, 0, 1, 0, 0, 2, 2, 1, 2, 0, 1, 1, 1, 2, 1, 0, 0, 0, 2, 1, 1, 1, 0, 1, 2, 1, 0, 1, 2, 2, 0, 1, 2, 2, 2, 2, 1, 1, 2, 1, 1, 1, 1, 2, 2, 2, 1, 0, 0, 2, 0, 0, 0, 1, 2, 1, 0, 1, 2, 2, 2, 2, 1, 2, 0, 1, 2, 1, 0, 1, 1, 2, 0, 0, 0, 2, 1, 2, 2, 0, 2, 1, 1, 0, 2, 0, 2, 1, 0, 0, 0, 1, 0, 0, 2, 1, 0, 1, 0, 0, 2, 2, 2, 2, 0, 1, 1, 1, 2, 2, 0, 2, 2, 0, 2, 2, 0, 0, 2, 0, 2, 1, 2, 1, 0, 1, 2, 2, 2, 0, 0, 1, 2, 0, 1, 2, 2, 2, 2, 0, 2, 2, 1, 1, 2, 1, 1, 2, 0, 1, 1, 1, 0, 2, 1, 2, 2, 2, 1, 1, 0, 0, 0, 1, 2, 1, 2, 0, 1, 0, 2, 2, 1, 2, 2, 1, 2, 2, 0, 1, 2, 0, 2, 2, 2, 2, 1, 1, 0, 2, 1, 1, 2, 0, 1, 1, 0, 0, 0, 2, 2, 2, 0, 1, 1, 2, 1, 1, 0, 2, 0, 1, 0, 0, 0, 2, 2, 0, 0, 1, 1, 2, 2, 1, 1, 0, 2, 2, 1, 0, 1, 0, 2, 1, 1, 1, 0, 1, 2, 2, 0, 0, 1, 2, 1, 0, 1, 2, 2, 2, 1, 2, 1, 0, 1, 1, 1, 1, 2, 2, 2, 2, 0, 2, 1, 0, 2, 0, 2, 0, 2, 2, 1, 1, 1, 2, 2, 1, 0, 0, 1, 1, 1, 2, 1, 2, 0, 2, 0, 1, 2, 0, 2, 0, 0, 2, 2, 0, 2, 2, 0, 0, 0, 1, 2, 0, 2, 1, 2, 1, 0, 1, 0, 0, 2, 0, 1, 2, 0, 0, 2, 2, 0, 1, 1, 1, 1, 0, 0, 0, 2, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 2, 0, 0, 0, 2, 1, 2, 2, 2, 2, 0, 0, 2, 2, 2, 2, 2, 2, 1, 1, 1, 0, 1, 1, 2, 1, 1, 2, 2, 2, 0, 2, 0, 0, 1, 2, 0, 2, 1, 1, 1, 1, 0, 1, 1, 0, 2, 1, 2, 2, 1, 0, 2, 1, 1, 1, 1, 2, 0, 1, 2, 2, 1, 0, 1, 2, 1, 1, 2, 1, 1, 0, 1, 2, 1, 2, 1, 0, 2, 1, 2, 1, 2, 0, 1, 0, 1, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 1, 2, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 2, 0, 1, 1, 0, 0, 0, 2, 0, 0, 0, 2, 1, 1, 1, 1, 2, 1, 0, 2, 2, 2, 0, 0, 1, 1, 1, 1, 2, 1, 2, 2, 0, 0, 0, 1, 1, 2, 1, 2, 0, 0, 2, 1, 2, 1, 1, 1, 1, 1, 0, 2, 1, 2, 2, 1, 1, 1, 1, 1, 2, 0, 2, 1, 2, 1, 2, 2, 0, 0, 2, 1, 0, 0, 1, 2, 2, 1, 2, 2, 1, 2, 1, 0, 1, 1, 2, 1, 2, 0, 2, 1, 2, 2, 1, 0, 2, 0, 1, 1, 2, 2, 0, 0, 0, 2, 0, 0, 2, 2, 0, 0, 1, 0, 0, 2, 2, 0, 2, 0, 2, 0, 2, 2, 1, 2, 2, 1, 0, 1, 1, 0, 0, 2, 1, 1, 1, 1, 2, 2, 0, 2, 0, 1, 0, 0, 1, 1, 2, 1, 0, 1, 1, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 0, 1, 1, 0, 2, 1, 1, 0, 2, 2, 2, 1, 2, 1, 0, 2, 2, 2, 2, 1, 0, 2, 0, 2, 1, 0, 1, 1, 0, 2, 1, 0, 2, 0, 1, 0, 2, 2, 1, 2, 0, 1, 1, 2, 2, 0, 0, 1, 2, 0, 2, 0, 2, 1, 1, 2, 0, 1, 1, 2, 1, 2, 2, 0, 1, 2, 1, 2, 1, 2, 2, 2, 2, 1, 2, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 2, 2, 2, 1, 2, 1, 1, 0, 2, 1, 2, 1, 1, 0, 1, 2, 0, 0, 2, 2, 1, 2, 1, 0, 2, 0, 0, 2, 0, 0, 1, 0, 1, 1, 2, 2, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 2, 1, 2, 2, 2, 2, 1, 1, 1, 1, 0, 2, 0, 2, 0, 0, 2, 1, 0, 0, 1, 1, 1, 2, 1, 2, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 2, 2, 2, 1, 1, 2, 0, 2, 2, 0, 2, 2, 2, 1, 1, 2, 0, 1, 2, 1, 0, 1, 2, 0, 1, 2, 1, 1, 2, 2, 1, 0, 1, 0, 1, 2, 2, 1, 0, 2, 2, 2, 1, 1, 2, 1, 2, 2, 2, 0, 1, 1, 2, 1, 0, 0, 1, 0, 1, 0, 1, 0, 2, 2, 2, 2, 0, 1, 2, 1, 1, 2, 0, 2, 0, 0, 0, 2, 2, 2, 1, 2, 1, 2, 2, 0, 2, 2, 1, 2, 0, 1, 1, 0, 2, 1, 0, 1, 1, 2, 1, 0, 1, 2, 1, 0, 2, 2, 2, 0, 2, 1, 2, 2, 2, 2, 1, 1, 1, 0, 1, 2, 1, 2, 1, 1, 1, 2, 1, 1, 1, 2, 1, 1, 1, 0, 1, 0, 2, 2, 2, 1, 1, 1, 1, 0, 2, 2, 0, 2, 1, 1, 1, 1, 1, 2, 1, 1, 1, 0, 2, 2, 1, 1, 2, 2, 1, 2, 1, 2, 1, 0, 2, 0, 2, 0, 0, 2, 0, 2, 2, 2, 1, 1, 1, 2, 1, 1, 2, 1, 0, 0, 0, 1, 2, 1, 0, 1, 2, 1, 0, 1, 2, 0, 1, 0, 0, 2, 1, 0, 2, 0, 2, 1, 0, 0, 1, 1, 2, 1, 1, 1, 0, 2, 0, 1, 0, 0, 1, 1, 2, 2, 1, 0, 0, 1, 2, 2, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 2, 2, 1, 1, 2, 1, 0, 2, 0, 1, 1, 0, 1, 2, 0, 0, 0, 2, 0, 1, 1, 0, 2, 1, 0, 0, 0, 0, 2, 1, 1, 2, 2, 1, 1, 2, 2, 2, 0, 0, 2, 1, 1, 2, 1, 0, 2, 2, 1, 0, 0, 1, 0, 0, 2, 1, 2, 2, 2, 0, 2, 2, 0, 1, 2, 2, 2, 2, 2, 2, 0, 0, 2, 2, 0, 2, 2, 2, 1, 2, 0, 2, 1, 2, 1, 0, 0, 0, 0, 0, 1, 0, 2, 1, 2, 1, 1, 2, 2, 1, 0, 1, 2, 0, 0, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 2, 0, 2, 1, 2, 2, 1, 2, 0, 0, 0, 2, 0, 1, 0, 1, 1, 2, 2, 0, 2, 1, 0, 1, 2, 1, 2, 1, 0, 2, 2, 1, 2, 1, 0, 1, 2, 1, 0, 2, 0, 1, 0, 0, 2, 2, 2, 2, 2, 2, 1, 0, 1, 1, 2, 1, 0, 2, 2, 0, 1, 2, 0, 0, 2, 2, 1, 2, 0, 1, 2, 2, 1, 2, 2, 2, 2, 2, 1, 0, 0, 1, 2, 2, 2, 0, 0, 1, 1, 1, 0, 2, 2, 1, 2, 0, 0, 1, 0, 1, 0, 2, 0, 0, 2, 2, 0, 2, 0, 2, 1, 1, 0, 1, 2, 0, 0, 2, 1, 2, 0, 0, 0, 2, 0, 2, 1, 0, 1, 1, 2, 1, 2, 2, 0, 2, 2, 2, 2, 2, 0, 0, 0, 0, 2, 2, 1, 1, 0, 2, 1, 1, 0, 2, 1, 2, 0, 2, 1, 1, 2, 0, 1, 2, 1, 2, 2, 1, 2, 2, 0, 2, 1, 0, 1, 0, 1, 2, 1, 2, 1, 1, 0, 2, 2, 1, 1, 1, 2, 1, 1, 1, 1, 2, 1, 2, 0, 0, 2, 1, 2, 2, 1, 1, 1, 2, 2, 1, 2, 0, 0, 2, 2, 0, 0, 2, 2, 0, 1, 1, 1, 0, 1, 1, 2, 1, 1, 2, 2, 1, 1, 1, 1, 2, 0, 1, 1, 0, 1, 0, 2, 1, 0, 0, 2, 2, 2, 1, 1, 1, 0, 2, 2, 0, 2, 1, 2, 0, 2, 1, 2, 2, 2, 1, 2, 2, 1, 2, 1, 0, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 0, 2, 1, 1, 1, 0, 1, 2, 2, 1, 0, 2, 2, 2, 2, 2, 1, 1, 0, 2, 0, 2, 2, 2, 1, 1, 0, 1, 1, 0, 0, 2, 1, 2, 2, 2, 0, 1, 1, 2, 1, 2, 1, 2, 0, 2, 0, 1, 1, 2, 2, 0, 1, 1, 2, 2, 2, 2, 1, 0, 1, 1, 1, 1, 2, 1, 1, 2, 1, 1, 2, 1, 1, 1, 2, 2, 0, 0, 0, 2, 2, 1, 1, 1, 2, 1, 2, 0, 1, 0, 0, 0, 2, 0, 0, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 1, 2, 0, 2, 2, 2, 0, 1, 0, 0, 0, 1, 2, 1, 2, 2, 1, 0, 2, 2, 2, 1, 1, 1, 2, 1, 1, 0, 1, 2, 2, 1, 1, 1, 1, 2, 1, 1, 0, 2, 2, 2, 1, 2, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 2, 1, 2, 1, 2, 2, 0, 2, 2, 0, 0, 0, 1, 2, 1, 1, 0, 2, 0, 0, 1, 2, 2, 2, 0, 1, 0, 2, 2, 2, 0, 2, 1, 0, 1, 0, 2, 1, 2, 0, 2, 1, 2, 0, 1, 2, 2, 2, 0, 1, 2, 0, 1, 2, 2, 2, 0, 1, 2, 0, 1, 2, 0, 1, 0, 2, 1, 2, 1, 1, 1, 1, 0, 2, 2, 0, 1, 0, 0, 2, 1, 0, 1, 0, 2, 2, 1, 1, 0, 2, 0, 1, 1, 1, 2, 2, 2, 1, 0, 1, 1, 0, 0, 2, 2, 1, 2, 1, 2, 2, 0, 2, 2, 0, 1, 1, 2, 1, 2, 1, 2, 2, 0, 0, 1, 1, 1, 1, 2, 2, 0, 2, 0, 0, 2, 0, 2, 1, 1, 1, 0, 2, 0, 1, 0, 2, 1, 1, 1, 1, 2, 1, 0, 1, 2, 1, 2, 0, 1, 1, 2, 0, 2, 2, 1, 1, 1, 1, 2, 2, 2, 2, 1, 2, 1, 0, 1, 2, 0, 2, 1, 2, 1, 2, 2, 1, 2, 2, 1, 1, 1, 1, 1, 1, 2, 2, 1, 2, 0, 2, 1, 1, 0, 2, 1, 2, 2, 1, 2, 2, 1, 2, 0, 1, 1, 2, 2, 0, 1, 0, 2, 1, 2, 0, 1, 2, 1, 2, 2, 0, 0, 0, 1, 0, 0, 0, 0, 0, 2, 2, 2, 1, 0, 1, 2, 2, 2, 1, 1, 0, 0, 0, 2, 1, 1, 1, 1, 1, 0, 1, 2, 1, 1, 1, 1, 2, 0, 1, 1, 1, 2, 0, 0, 0, 0, 1, 0, 1, 1, 2, 2, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 0, 2, 2, 0, 1, 2, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 0, 2, 2, 2, 0, 2, 0, 1, 1, 0, 0, 2, 1, 0, 0, 1, 2, 1, 2, 2, 2, 1, 2, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 2, 0, 0, 1, 1, 0, 2, 0, 0, 2, 2, 2, 2, 2, 1, 0, 2, 0, 1, 1, 0, 2, 0, 2, 2, 1, 1, 1, 1, 0, 2, 0, 1, 2, 1, 1, 2, 1, 2, 0, 1, 2, 2, 2, 2, 2, 0, 2, 0, 2, 2, 0, 0, 2, 0, 2, 0, 2, 0, 2, 1, 2, 1, 1, 1, 1, 2, 2, 0, 2, 0, 1, 2, 1, 1, 1, 2, 2, 1, 0, 2, 2, 1, 1, 1, 2, 1, 0, 2, 2, 2, 2, 0, 2, 2, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 1, 2, 1, 1, 2, 0, 2, 2, 2, 1, 1, 1, 0, 0, 2, 0, 2, 1, 1, 1, 0, 0, 0, 1, 1, 0, 2, 0, 2, 2, 0, 0, 2, 0, 2, 1, 2, 0, 2, 2, 1, 0, 1, 2, 1, 2, 2, 2, 0, 2, 0, 1, 1, 2, 1, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 2, 0, 2, 2, 2, 2, 1, 0, 1, 2, 1, 2, 2, 2, 2, 1, 2, 1, 2, 0, 0, 0, 1, 2, 1, 0, 2, 2, 1, 2, 2, 1, 2, 1, 0, 2, 1, 1, 1, 0, 2, 0, 1, 1, 1, 0, 2, 0, 1, 0, 2, 2, 0, 1, 0, 0, 1, 2, 1, 1, 0, 1, 1, 2, 0, 2, 2, 1, 2, 2, 0, 1, 1, 2, 0, 1, 2, 0, 2, 2, 1, 2, 2, 2, 2, 2, 0, 0, 2, 1, 0, 2, 1, 2, 0, 1, 2, 1, 1, 2, 2, 1, 0, 1, 2, 2, 0, 2, 1, 2, 1, 2, 0, 2, 1, 2, 1, 2, 1, 2, 0, 2, 0, 1, 2, 0, 2, 1, 0, 2, 2, 0, 1, 2, 0, 2, 1, 2, 1, 1, 0, 1, 2, 1, 1, 0, 2, 2, 2, 1, 1, 0, 1, 1, 2, 1, 1, 2, 2, 1, 1, 1, 1, 1, 2, 1, 1, 1, 0, 0, 2, 1, 2, 0, 2, 1, 0, 2, 2, 2, 1, 2, 2, 0, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 0, 0, 0, 1, 2, 1, 1, 2, 1, 2, 0, 1, 0, 2, 1, 0, 2, 2, 2, 0, 1, 0, 2, 2, 1, 2, 2, 2, 0, 1, 1, 0, 2, 1, 2, 1, 1, 2, 1, 2, 2, 2, 2, 1, 1, 0, 0, 0, 1, 2, 0, 2, 1, 2, 1, 2, 2, 2, 1, 1, 1, 2, 0, 1, 0, 2, 1, 0, 2, 1, 0, 0, 1, 0, 2, 2, 0, 2, 0, 0, 2, 0, 2, 1, 2, 2, 1, 1, 2, 1, 1, 0, 1, 0, 1, 0, 2, 2, 2, 1, 0, 2, 2, 2, 1, 2, 1, 2, 0, 1, 1, 0, 2, 1, 1, 2, 1, 1, 2, 2, 1, 1, 0, 0, 2, 0, 0, 1, 2, 1, 0, 0, 2, 2, 1, 1, 1, 2, 2, 2, 0, 2, 0, 1, 0, 2, 0, 2, 2, 0, 0, 0, 2, 1, 0, 0, 2, 0, 1, 0, 1, 0, 2, 2, 1, 0, 1, 1, 2, 2, 2, 1, 0, 2, 1, 1, 2, 0, 0, 0, 0, 2, 0, 1, 1, 2, 1, 0, 2, 0, 1, 2, 1, 0, 1, 0, 1, 1, 2, 2, 1, 1, 1, 1, 0, 2, 1, 1, 0, 0, 2, 2, 0, 1, 2, 0, 0, 2, 1, 2, 2, 2, 0, 2, 1, 1, 2, 0, 1, 2, 1, 2, 1, 2, 2, 1, 1, 1, 0, 1, 1, 2, 1, 2, 2, 2, 2, 2, 0, 0, 2, 2, 2, 1, 2, 1, 1, 0, 2, 2, 2, 2, 1, 2, 1, 1, 2, 1, 0, 0, 2, 0, 0, 1, 0, 2, 1, 2, 2, 2, 0, 2, 1, 0, 2, 1, 2, 1, 2, 0, 0, 1, 0, 0, 2, 1, 0, 2, 1, 0, 2, 1, 2, 1, 1, 0, 1, 2, 0, 0, 1, 2, 1, 2, 2, 2, 0, 0, 2, 1, 2, 1, 2, 2, 1, 2, 1, 1, 2, 1, 1, 0, 1, 1, 2, 0, 1, 0, 0, 1, 1, 0, 1, 0, 2, 0, 1, 2, 0, 0, 2, 2, 0, 0, 1, 1, 1, 0, 0, 2, 1, 2, 1, 1, 0, 0, 1, 2, 1, 2, 1, 2, 0, 2, 0, 2, 2, 2, 1, 1, 0, 1, 1, 1, 0, 2, 1, 1, 2, 2, 2, 0, 1, 0, 0, 2, 1, 1, 1, 1, 0, 2, 2, 0, 0, 2, 0, 1, 2, 2, 1, 1, 1, 2, 1, 2, 1, 1, 0, 1, 1, 2, 1, 1, 1, 0, 1, 2, 0, 2, 2, 0, 2, 1, 2, 2, 0, 1, 0, 2, 1, 1, 1, 1, 1, 0, 2, 0, 1, 2, 2, 2, 1, 2, 2, 2, 1, 0, 1, 2, 1, 0, 2, 0, 2, 2, 1, 2, 1, 2, 1, 0, 0, 2, 1, 1, 0, 1, 2, 1, 1, 1, 0, 1, 2, 2, 2, 1, 1, 1, 2, 2, 2, 1, 0, 0, 0, 0, 1, 0, 0, 2, 2, 1, 2, 0, 2, 1, 1, 1, 2, 0, 1, 1, 0, 0, 0, 1, 1, 0, 2, 0, 2, 2, 2, 0, 2, 2, 1, 0, 2, 0, 1, 2, 1, 2, 1, 2, 2, 0, 2, 2, 2, 2, 1, 2, 2, 0, 2, 2, 2, 2, 1, 2, 1, 2, 1, 2, 2, 1, 2, 1, 2, 0, 0, 2, 1, 0, 1, 0, 1, 2, 1, 1, 2, 1, 0, 2, 0, 0, 2, 2, 1, 0, 2, 0, 0, 2, 0, 1, 2, 0, 2, 1, 0, 0, 2, 2, 0, 2, 1, 0, 2, 2, 1, 0, 0, 2, 0, 1, 1, 1, 2, 1, 1, 2, 0, 2, 2, 2, 2, 0, 1, 1, 0, 0, 2, 0, 1, 2, 1, 1, 0, 2, 0, 0, 0, 1, 1, 1, 2, 2, 1, 2, 1, 0, 2, 2, 2, 0, 0, 1, 0, 1, 1, 2, 2, 2, 1, 1, 1, 2, 1, 1, 1, 2, 1, 1, 0, 0, 1, 0, 0, 0, 2, 0, 1, 2, 0, 1, 1, 0, 0, 2, 0, 0, 2, 0, 0, 1, 2, 2, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 2, 0, 1, 1, 1, 1, 1, 1, 2, 2, 2, 1, 0, 2, 0, 1, 1, 0, 2, 2, 2, 1, 1, 2, 2, 2, 1, 2, 1, 1, 0, 1, 2, 1, 1, 1, 0, 2, 1, 2, 2, 1, 1, 0, 2, 2, 0, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 2, 1, 2, 0, 1, 0, 1, 1, 2, 2, 0, 1, 2, 1, 1, 1, 2, 0, 0, 2, 2, 1, 2, 1, 2, 2, 1, 1, 2, 1, 2, 2, 0, 1, 1, 1, 2, 0, 1, 2, 0, 1, 1, 1, 1, 0, 1, 2, 1, 2, 2, 0, 2, 1, 0, 2, 0, 1, 2, 2, 2, 0, 1, 2, 1, 1, 1, 1, 2, 1, 0, 0, 1, 1, 2, 2, 2, 0, 2, 0, 0, 1, 2, 2, 2, 2, 2, 1, 2, 1, 1, 2, 2, 2, 0, 1, 2, 0, 1, 1, 0, 1, 1, 1, 0, 2, 2, 2, 0, 1, 2, 1, 2, 2, 0, 1, 0, 1, 2, 1, 1, 1, 0, 2, 1, 1, 2, 1, 1, 2, 0, 1, 1, 2, 1, 1, 1, 2, 0, 2, 2, 2, 1, 0, 1, 2, 2, 2, 2, 1, 0, 1, 2, 1, 2, 2, 1, 1, 2, 0, 2, 1, 0, 1, 0, 1, 2, 1, 2, 2, 0, 0, 0, 1, 1, 1, 2, 1, 2, 2, 2, 0, 1, 0, 2, 2, 2, 1, 1, 1, 1, 1, 1, 0, 2, 2, 0, 1, 2, 0, 2, 1, 2, 2, 1, 1, 1, 1, 2, 2, 1, 1, 1, 1, 2, 1, 1, 1, 1, 2, 1, 2, 1, 1, 0, 2, 1, 2, 2, 0, 2, 2, 1, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 2, 1, 1, 0, 0, 1, 2, 1, 2, 1, 2, 1, 2, 2, 2, 2, 0, 2, 1, 1, 2, 1, 2, 2, 2, 1, 1, 0, 0, 2, 1, 1, 0, 0, 1, 2, 0, 1, 2, 1, 1, 2, 2, 1, 1, 0, 1, 2, 0, 0, 1, 2, 2, 2, 2, 1, 2, 2, 1, 2, 1, 0, 0, 1, 2, 1, 0, 2, 0, 2, 1, 0, 2, 2, 2, 2, 2, 0, 0, 2, 1, 0, 0, 1, 1, 1, 1, 2, 1, 2, 0, 2, 2, 2, 1, 1, 2, 2, 1, 1, 2, 1, 1, 1, 2, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 2, 0, 2, 1, 2, 1, 2, 1, 1, 1, 0, 0, 2, 1, 0, 2, 1, 0, 1, 2, 2, 1, 1, 2, 1, 2, 0, 1, 1, 1, 0, 0, 2, 2, 1, 1, 1, 1, 0, 1, 0, 2, 2, 0, 1, 1, 2, 1, 1, 0, 0, 2, 0, 0, 0, 2, 2, 2, 2, 1, 1, 0, 2, 0, 2, 2, 1, 1, 0, 1, 0, 2, 2, 2, 1, 0, 1, 1, 1, 2, 1, 0, 0, 2, 1, 1, 2, 1, 1, 0, 2, 1, 0, 2, 1, 1, 0, 2, 0, 1, 2, 2, 1, 0, 2, 1, 2, 0, 2, 2, 1, 2, 1, 0, 1, 1, 2, 2, 2, 1, 0, 1, 2, 1, 1, 0, 2, 2, 0, 1, 2, 1, 2, 1, 2, 1, 1, 2, 1, 0, 1, 0, 2, 2, 2, 0, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 2, 1, 0, 0, 2, 2, 2, 0, 2, 1, 0, 2, 1, 0, 2, 0, 2, 2, 1, 1, 2, 1, 1, 1, 2, 1, 2, 1, 0, 1, 1, 0, 0, 1, 2, 1, 1, 2, 0, 0, 1, 1, 2, 1, 1, 1, 2, 1, 1, 0, 1, 1, 0, 0, 2, 2, 1, 0, 2, 1, 2, 1, 0, 1, 2, 1, 0, 2, 0, 0, 2, 1, 2, 1, 2, 2, 1, 0, 1, 0, 0, 2, 1, 2, 0, 0, 2, 1, 2, 0, 0, 1, 2, 1, 2, 1, 1, 2, 1, 2, 1, 0, 0, 1, 1, 2, 1, 2, 2, 2, 0, 1, 1, 0, 1, 2, 1, 0, 1, 1, 2, 2, 1, 2, 0, 0, 2, 1, 1, 2, 1, 2, 1, 0, 0, 1, 1, 2, 1, 2, 0, 1, 1, 0, 2, 1, 0, 2, 2, 0, 2, 1, 0, 2, 2, 2, 1, 0, 2, 2, 2, 2, 2, 0, 1, 2, 2, 1, 1, 2, 0, 1, 0, 1, 2, 2, 0, 1, 2, 1, 2, 2, 1, 2, 0, 2, 1, 2, 2, 0, 1, 1, 2, 2, 2, 2, 2, 1, 1, 0, 2, 2, 0, 2, 1, 0, 2, 2, 1, 2, 2, 0, 0, 0, 1, 2, 2, 2, 0, 0, 2, 2, 0, 1, 0, 2, 2, 2, 2, 2, 0, 2, 1, 0, 1, 1, 1, 0, 2, 1, 0, 1, 1, 2, 0, 1, 1, 2, 1, 2, 0, 1, 1, 1, 1, 2, 0, 2, 2, 0, 2, 0, 2, 0, 1, 1, 1, 0, 1, 1, 2, 1, 0, 2, 2, 1, 2, 1, 2, 2, 2, 2, 1, 1, 1, 2, 1, 2, 1, 0, 0, 1, 0, 2, 2, 2, 2, 1, 1, 2, 0, 1, 2, 1, 2, 0, 1, 0, 2, 2, 1, 1, 0, 2, 2, 1, 2, 1, 2, 1, 1, 1, 0, 2, 1, 2, 2, 1, 2, 0, 2, 2, 0, 2, 0, 2, 2, 1, 2, 1, 1, 1, 0, 2, 0, 1, 1, 2, 1, 2, 2, 2, 0, 1, 2, 1, 1, 2, 2, 2, 1, 1, 2, 2, 0, 2, 1, 0, 1, 0, 0, 2, 2, 0, 0, 1, 2, 1, 1, 2, 2, 2, 0, 2, 2, 1, 0, 1, 0, 0, 0, 1, 2, 1, 1, 1, 1, 2, 0, 1, 2, 2, 2, 2, 0, 2, 0, 2, 1, 0, 1, 2, 2, 2, 2, 0, 1, 0, 2, 1, 2, 0, 2, 1, 1, 0, 2, 0, 0, 1, 2, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 2, 2, 0, 0, 0, 2, 0, 0, 1, 2, 0, 1, 0, 0, 1, 2, 2, 0, 1, 1, 2, 0, 2, 2, 0, 1, 2, 2, 2, 2, 2, 0, 0, 2, 2, 2, 0, 0, 0, 1, 0, 2, 2, 2, 1, 0, 1, 2, 2, 2, 2, 0, 2, 2, 2, 1, 1, 2, 0, 2, 1, 1, 0, 0, 1, 0, 2, 2, 1, 0, 0, 1, 0, 2, 2, 1, 1, 1, 0, 2, 2, 2, 0, 1, 0, 1, 1, 2, 2, 0, 2, 0, 2, 2, 1, 0, 2, 0, 0, 0, 2, 2, 1, 2, 1, 1, 2, 2, 0, 2, 2, 2, 0, 0, 2, 1, 0, 0, 1, 1, 0, 2, 0, 1, 0, 0, 2, 1, 1, 2, 2, 2, 1, 1, 1, 2, 0, 1, 1, 1, 1, 2, 0, 1, 0, 1, 1, 2, 1, 1, 1, 2, 0, 2, 2, 2, 1, 1, 0, 2, 1, 2, 1, 0, 1, 2, 2, 1, 0, 2, 1, 2, 2, 0, 1, 1, 2, 2, 1, 0, 1, 1, 2, 2, 2, 2, 0, 1, 1, 1, 2, 2, 0, 0, 1, 1, 1, 0, 1, 2, 0, 2, 2, 2, 0, 0, 2, 2, 2, 1, 0, 2, 2, 1, 0, 0, 2, 1, 2, 0, 1, 1, 2, 2, 1, 0, 1, 2, 1, 2, 2, 0, 1, 2, 1, 0, 0, 2, 0, 1, 1, 1, 2, 1, 0, 1, 0, 1, 2, 2, 0, 1, 1, 1, 2, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 2, 2, 2, 0, 0, 2, 0, 0, 1, 1, 1, 2, 2, 2, 1, 1, 1, 0, 1, 0, 2, 1, 1, 2, 1, 2, 1, 0, 2, 1, 2, 2, 1, 0, 2, 1, 2, 1, 1, 2, 1, 2, 1, 1, 1, 2, 1, 1, 2, 1, 1, 2, 2, 2, 2, 0, 1, 2, 2, 0, 2, 1, 1, 1, 1, 2, 1, 2, 2, 0, 2, 0, 2, 2, 2, 0, 2, 0, 0, 2, 2, 2, 2, 1, 1, 0, 2, 2, 1, 0, 0, 1, 2, 0, 0, 2, 1, 0, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1, 0, 2, 1, 0, 2, 1, 2, 2, 2, 1, 0, 2, 0, 0, 0, 2, 1, 1, 0, 1, 2, 0, 0, 2, 1, 2, 1, 2, 0, 0, 2, 2, 1, 2, 1, 1, 2, 1, 0, 2, 2, 1, 0, 1, 2, 2, 1, 0, 1, 1, 2, 0, 1, 1, 1, 0, 2, 2, 1, 2, 2, 2, 0, 1, 0, 1, 0, 0, 1, 1, 1, 2, 1, 2, 1, 2, 1, 0, 2, 0, 1, 0, 2, 1, 0, 0, 0, 1, 1, 2, 0, 1, 2, 0, 2, 1, 0, 0, 2, 2, 1, 0, 1, 0, 2, 1, 0, 1, 0, 2, 0, 2, 1, 2, 1, 2, 1, 2, 2, 2, 2, 0, 1, 2, 1, 2, 2, 2, 2, 2, 0, 0, 0, 0, 2, 1, 1, 2, 0, 2, 2, 0, 1, 0, 1, 1, 1, 2, 1, 2, 1, 0, 2, 2, 2, 1, 0, 1, 2, 2, 0, 2, 0, 1, 2, 0, 1, 1, 0, 2, 0, 1, 0, 2, 1, 1, 2, 2, 1, 0, 0, 2, 1, 0, 2, 1, 1, 1, 1, 0, 2, 1, 0, 0, 2, 1, 0, 2, 1, 0, 1, 1, 0, 0, 1, 2, 1, 2, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 2, 1, 0, 0, 2, 1, 1, 2, 2, 2, 2, 1, 1, 1, 0, 1, 2, 1, 1, 1, 2, 1, 0, 1, 2, 2, 1, 2, 1, 0, 1, 1, 2, 1, 0, 2, 1, 1, 1, 1, 1, 2, 2, 0, 0, 1, 0, 2, 0, 1, 2, 0, 1, 0, 0, 1, 1, 2, 2, 0, 1, 1, 1, 1, 1, 1, 1, 2, 1, 0, 0, 2, 2, 0, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 1, 0, 1, 2, 2, 0, 2, 0, 2, 1, 0, 1, 0, 0, 1, 2, 2, 1, 0, 2, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 2, 1, 2, 2, 1, 1, 1, 0, 0, 1, 2, 1, 1, 1, 2, 1, 0, 1, 0, 2, 0, 2, 0, 1, 2, 0, 1, 2, 0, 1, 1, 2, 1, 1, 0, 2, 2, 0, 2, 1, 1, 0, 2, 1, 2, 2, 0, 2, 2, 2, 1, 0, 1, 1, 1, 1, 0, 1, 2, 1, 2, 0, 2, 0, 0, 1, 2, 2, 1, 0, 1, 0, 0, 1, 1, 2, 0, 2, 0, 2, 0, 0, 1, 1, 1, 2, 2, 1, 1, 2, 0, 2, 1, 0, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0, 2, 1, 2, 2, 1, 1, 1, 1, 1, 2, 2, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 0, 1, 2, 2, 1, 2, 1, 2, 1, 1, 2, 2, 1, 2, 0, 1, 1, 0, 1, 1, 2, 2, 1, 0, 1, 2, 2, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 2, 0, 2, 2, 0, 0, 1, 2, 2, 1, 0, 0, 1, 0, 0, 1, 2, 2, 1, 0, 0, 1, 1, 2, 1, 0, 1, 2, 2, 2, 2, 2, 1, 0, 2, 1, 1, 1, 1, 0, 1, 0, 1, 2, 1, 2, 1, 1, 1, 0, 2, 1, 1, 2, 2, 1, 2, 1, 2, 1, 0, 1, 2, 2, 2, 2, 1, 2, 1, 2, 2, 1, 0, 2, 2, 2, 1, 2, 2, 0, 0, 2, 0, 0, 0, 1, 2, 2, 2, 2, 0, 1, 2, 2, 0, 1, 2, 1, 2, 2, 0, 2, 2, 1, 1, 0, 2, 0, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 1, 1, 2, 0, 2, 1, 2, 1, 1, 0, 1, 2, 1, 1, 2, 2, 2, 1, 2, 2, 1, 1, 1, 1, 1, 2, 1, 1, 2, 0, 0, 2, 0, 0, 1, 0, 0, 0, 2, 1, 1, 0, 2, 2, 1, 0, 0, 2, 2, 1, 2, 2, 0, 1, 0, 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, 1, 2, 2, 0, 0, 1, 2, 2, 0, 2, 0, 0, 0, 2, 0, 0, 1, 2, 2, 0, 0, 0, 0, 2, 1, 0, 0, 1, 1, 1, 0, 1, 2, 1, 2, 1, 2, 1, 1, 2, 1, 1, 1, 1, 0, 1, 1, 1, 2, 2, 0, 0, 1, 0, 1, 2, 1, 2, 2, 2, 2, 1, 0, 1, 2, 1, 1, 1, 0, 2, 1, 2, 1, 2, 1, 2, 2, 1, 0, 1, 1, 1, 0, 2, 2, 2, 1, 1, 0, 0, 2, 1, 2, 1, 0, 1, 1, 1, 2, 1, 2, 1, 0, 2, 0, 1, 1, 0, 0, 2, 2, 2, 0, 1, 2, 2, 2, 2, 1, 2, 2, 0, 1, 0, 0, 2, 2, 2, 2, 1, 1, 0, 2, 1, 1, 2, 2, 1, 1, 2, 1, 1, 2, 0, 1, 2, 0, 1, 1, 2, 1, 1, 2, 1, 2, 1, 0, 0, 2, 1, 0, 1, 2, 0, 0, 2, 2, 0, 2, 1, 0, 0, 0, 1, 1, 2, 1, 2, 2, 1, 2, 1, 1, 1, 1, 2, 1, 0, 1, 2, 1, 1, 1, 0, 0, 1, 2, 2, 1, 1, 1, 2, 1, 0, 0, 1, 2, 2, 0, 1, 1, 2, 2, 1, 1, 1, 2, 2, 1, 2, 0, 2, 2, 2, 0, 0, 1, 2, 0, 1, 1, 0, 2, 0, 1, 1, 0, 2, 2, 1, 1, 2, 2, 2, 2, 1, 2, 1, 2, 2, 0, 0, 0, 2, 1, 2, 1, 2, 2, 1, 1, 0, 2, 2, 2, 2, 0, 0, 1, 1, 2, 1, 1, 1, 2, 1, 2, 2, 1, 1, 2, 2, 2, 1, 0, 0, 1, 1, 1, 0, 0, 1, 2, 0, 0, 2, 1, 2, 0, 1, 1, 0, 2, 2, 2, 0, 1, 1, 2, 1, 0, 2, 2, 2, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 2, 2, 0, 0, 0, 2, 0, 1, 2, 0, 0, 1, 1, 1, 1, 1, 2, 1, 2, 2, 0, 1, 1, 0, 2, 2, 2, 1, 0, 0, 0, 1, 0, 2, 1, 1, 1, 2, 2, 1, 0, 1, 1, 1, 0, 0, 2, 1, 2, 1, 2, 2, 0, 2, 1, 1, 2, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1, 2, 2, 0, 0, 2, 1, 2, 2, 0, 1, 1, 2, 1, 2, 2, 0, 0, 1, 2, 1, 1, 2, 1, 2, 2, 0, 0, 0, 1, 2, 1, 0, 2, 1, 2, 1, 0, 1, 1, 2, 1, 1, 1, 2, 0, 1, 2, 0, 1, 2, 1, 2, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 2, 2, 2, 1, 2, 0, 2, 0, 1, 2, 0, 0, 0, 1, 2, 0, 1, 1, 1, 1, 0, 1, 2, 0, 2, 1, 1, 1, 1, 2, 1, 0, 2, 1, 2, 1, 2, 1, 2, 1, 2, 0, 0, 0, 2, 1, 1, 2, 1, 2, 2, 0, 0, 0, 1, 2, 0, 1, 1, 2, 1, 0, 2, 2, 2, 2, 1, 0, 1, 1, 2, 2, 2, 2, 1, 2, 2, 2, 1, 0, 1, 1, 2, 1, 0, 1, 1, 0, 2, 1, 0, 2, 2, 1, 2, 0, 1, 1, 1, 2, 1, 1, 0, 2, 2, 1, 2, 0, 0, 2, 1, 2, 2, 1, 2, 1, 1, 2, 0, 0, 1, 2, 1, 0, 2, 0, 2, 1, 0, 2, 1, 2, 2, 0, 0, 2, 2, 0, 2, 1, 0, 2, 1, 0, 1, 2, 2, 2, 1, 2, 2, 2, 1, 1, 1, 2, 0, 2, 1, 1, 2, 2, 2, 1, 2, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 2, 2, 0, 2, 2, 0, 0, 0, 2, 1, 2, 2, 2, 0, 0, 1, 2, 1, 1, 0, 2, 1, 2, 2, 0, 0, 1, 2, 1, 1, 0, 1, 1, 2, 1, 0, 2, 0, 2, 2, 1, 2, 1, 2, 0, 2, 1, 2, 1, 1, 1, 2, 2, 0, 1, 2, 2, 2, 0, 0, 2, 1, 2, 2, 1, 1, 1, 0, 2, 1, 0, 2, 0, 1, 1, 2, 0, 1, 1, 1, 1, 0, 0, 1, 1, 2, 1, 0, 2, 0, 2, 2, 2, 1, 0, 2, 0, 1, 0, 1, 2, 1, 2, 2, 1, 1, 2, 1, 0, 1, 1, 1, 2, 1, 0, 0, 1, 2, 0, 2, 2, 2, 1, 1, 2, 2, 2, 2, 1, 2, 0, 0, 1, 1, 2, 2, 1, 2, 2, 0, 1, 2, 0, 1, 2, 1, 2, 1, 1, 1, 1, 2, 1, 1, 1, 1, 0, 1, 2, 2, 2, 1, 2, 1, 2, 2, 0, 2, 1, 1, 2, 1, 2, 0, 0, 2, 1, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 1, 1, 0, 0, 2, 1, 0, 2, 1, 1, 0, 2, 2, 0, 2, 0, 2, 0, 2, 1, 1, 1, 0, 0, 1, 2, 2, 0, 2, 2, 0, 1, 2, 2, 0, 1, 1, 2, 2, 2, 2, 1, 0, 2, 2, 1, 2, 1, 2, 1, 1, 1, 1, 1, 0, 2, 1, 2, 2, 2, 2, 2, 2, 2, 0, 1, 2, 1, 0, 2, 2, 1, 0, 0, 1, 1, 0, 1, 1, 1, 2, 1, 2, 2, 0, 1, 0, 1, 2, 2, 2, 0, 0, 1, 1, 1, 1, 2, 1, 2, 0, 0, 1, 2, 0, 1, 2, 1, 2, 0, 1, 2, 2, 1, 0, 2, 2, 0, 1, 2, 2, 2, 0, 2, 1, 2, 2, 0, 2, 1, 2, 2, 0, 0, 2, 1, 1, 0, 1, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 1, 1, 2, 0, 0, 2, 1, 2, 0, 1, 0, 1, 2, 2, 1, 2, 0, 1, 1, 1, 1, 0, 2, 2, 0, 1, 0, 2, 1, 0, 2, 1, 1, 2, 2, 1, 2, 2, 1, 1, 2, 1, 1, 0, 1, 1, 2, 2, 0, 1, 0, 1, 2, 1, 2, 2, 0, 1, 1, 2, 1, 2, 1, 2, 2, 2, 2, 2, 1, 1, 1, 1, 0, 1, 0, 1, 2, 1, 1, 0, 2, 2, 2, 1, 2, 1, 2, 0, 2, 1, 2, 2, 0, 0, 0, 1, 1, 2, 1, 2, 0, 1, 1, 2, 2, 0, 1, 2, 1, 1, 1, 2, 2, 0, 2, 1, 2, 0, 0, 1, 2, 1, 0, 2, 2, 0, 1, 1, 1, 0, 1, 2, 2, 0, 0, 2, 1, 1, 2, 1, 1, 2, 2, 1, 0, 0, 0, 2, 0, 0, 1, 2, 0, 0, 1, 1, 1, 2, 0, 0, 2, 2, 2, 2, 1, 2, 1, 0, 0, 0, 1, 0, 0, 1, 2, 2, 2, 2, 1, 2, 2, 0, 1, 2, 0, 0, 0, 2, 2, 2, 1, 1, 2, 1, 2, 2, 0, 1, 2, 2, 2, 0, 1, 0, 2, 2, 1, 2, 0, 1, 1, 1, 0, 2, 2, 2, 1, 2, 0, 0, 2, 0, 1, 2, 2, 1, 2, 2, 1, 0, 0, 1, 2, 1, 1, 1, 1, 1, 2, 1, 2, 0, 2, 1, 2, 2, 1, 1, 1, 1, 2, 1, 1, 1, 2, 2, 2, 0, 0, 0, 2, 2, 0, 1, 0, 0, 1, 2, 1, 2, 0, 2, 1, 1, 1, 2, 0, 2, 1, 1, 1, 2, 1, 1, 1, 2, 0, 1, 2, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 2, 1, 1, 0, 1, 1, 2, 1, 1, 1, 1, 0, 0, 2, 1, 2, 1, 2, 1, 1, 0, 1, 1, 0, 2, 2, 2, 0, 0, 0, 2, 0, 0, 1, 2, 0, 2, 2, 1, 2, 0, 2, 2, 0, 0, 0, 0, 1, 2, 1, 0, 2, 2, 2, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 2, 0, 1, 1, 2, 2, 2, 1, 0, 0, 2, 1, 1, 1, 1, 0, 2, 0, 1, 0, 1, 1, 2, 1, 1, 1, 1, 1, 2, 0, 0, 2, 0, 0, 2, 2, 0, 1, 1, 1, 2, 1, 1, 1, 2, 2, 2, 1, 2, 2, 0, 2, 1, 1, 2, 1, 1, 0, 2, 2, 1, 2, 2, 1, 2, 1, 0, 2, 2, 2, 2, 1, 2, 1, 1, 1, 0, 1, 2, 2, 0, 1, 2, 0, 1, 0, 2, 0, 1, 2, 2, 2, 1, 0, 2, 2, 2, 1, 1, 2, 2, 0, 1, 1, 0, 2, 2, 2, 1, 0, 1, 1, 0, 2, 1, 1, 2, 1, 1, 1, 2, 0, 1, 2, 2, 0, 1, 2, 2, 1, 2, 1, 1, 2, 0, 2, 2, 2, 2, 1, 1, 2, 1, 1, 2, 1, 1, 1, 1, 1, 1, 0, 2, 2, 0, 2, 2, 2, 1, 2, 2, 2, 1, 2, 0, 0, 2, 2, 0, 1, 1, 0, 2, 2, 1, 0, 0, 1, 1, 1, 0, 2, 2, 1, 2, 2, 2, 1, 1, 2, 1, 1, 1, 0, 2, 1, 0, 2, 2, 1, 0, 1, 1, 2, 2, 1, 1, 2, 1, 1, 2, 2, 1, 1, 1, 2, 2, 2, 1, 2, 2, 2, 0, 1, 2, 1, 1, 0, 2, 0, 0, 0, 2, 0, 1, 1, 1, 1, 2, 0, 1, 2, 0, 0, 2, 2, 1, 0, 1, 1, 0, 0, 1, 2, 2, 2, 1, 0, 2, 2, 0, 0, 0, 2, 0, 1, 1, 0, 2, 2, 0, 0, 0, 1, 1, 1, 2, 1, 0, 1, 1, 1, 0, 2, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 2, 0, 1, 2, 0, 1, 1, 0, 2, 0, 2, 2, 2, 1, 2, 0, 1, 1, 0, 1, 2, 0, 2, 1, 1, 2, 1, 0, 2, 0, 1, 0, 0, 0, 0, 0, 1, 2, 1, 0, 1, 1, 1, 1, 2, 2, 2, 1, 0, 2, 1, 2, 0, 0, 2, 2, 1, 0, 0, 0, 1, 1, 2, 1, 1, 0, 2, 1, 1, 1, 1, 0, 2, 2, 0, 0, 1, 2, 1, 0, 0, 1, 1, 0, 1, 1, 2, 2, 1, 1, 0, 2, 0, 2, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 2, 2, 1, 2, 1, 2, 0, 0, 1, 1, 1, 1, 1, 2, 2, 1, 0, 0, 2, 2, 0, 2, 2, 2, 1, 2, 0, 2, 1, 1, 0, 0, 0, 2, 2, 0, 2, 2, 2, 0, 0, 1, 1, 0, 1, 2, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 2, 0, 1, 1, 2, 2, 0, 2, 2, 2, 1, 2, 1, 2, 2, 0, 2, 2, 1, 2, 0, 2, 1, 2, 0, 2, 0, 1, 2, 0, 1, 1, 1, 0, 2, 1, 1, 2, 1, 1, 0, 0, 2, 1, 1, 1, 1, 1, 0, 0, 0, 1, 2, 0, 0, 0, 2, 1, 2, 1, 0, 1, 0, 2, 2, 0, 2, 2, 1, 2, 2, 1, 0, 1, 0, 2, 0, 2, 1, 0, 1, 1, 1, 2, 2, 2, 1, 2, 0, 2, 1, 1, 1, 2, 1, 1, 0, 2, 1, 2, 1, 0, 1, 1, 2, 1, 0, 0, 1, 2, 2, 0, 2, 2, 1, 2, 1, 0, 2, 1, 0, 1, 0, 1, 1, 2, 2, 0, 2, 0, 2, 0, 2, 0, 1, 2, 1, 2, 2, 2, 2, 0, 0, 0, 2, 2, 0, 2, 0, 0, 0, 0, 1, 2, 1, 2, 2, 2, 1, 1, 1, 1, 0, 1, 0, 2, 2, 0, 2, 2, 2, 1, 1, 2, 1, 2, 1, 2, 2, 1, 0, 2, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 2, 1, 0, 0, 1, 0, 1, 1, 2, 2, 1, 0, 1, 0, 1, 2, 0, 1, 1, 2, 2, 0, 2, 0, 2, 0, 2, 1, 0, 2, 0, 2, 0, 0, 2, 2, 0, 1, 2, 0, 2, 2, 0, 0, 0, 1, 1, 1, 1, 1, 0, 2, 0, 2, 1, 1, 0, 1, 2, 0, 2, 2, 1, 1, 2, 0, 2, 2, 1, 2, 0, 0, 1, 2, 1, 1, 2, 1, 0, 0, 0, 0, 0, 1, 2, 2, 2, 2, 0, 0, 1, 0, 0, 0, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 1, 1, 1, 1, 0, 0, 2, 2, 0, 2, 1, 1, 0, 2, 2, 2, 2, 1, 2, 1, 2, 0, 2, 0, 0, 0, 0, 0, 1, 0, 1, 1, 2, 1, 2, 2, 2, 2, 2, 0, 0, 1, 0, 0, 2, 2, 2, 0, 0, 2, 1, 0, 1, 2, 2, 1, 2, 2, 1, 0, 0, 1, 1, 2, 0, 1, 2, 1, 2, 1, 1, 2, 0, 2, 0, 1, 1, 0, 2, 1, 0, 1, 1, 2, 2, 2, 1, 0, 1, 0, 2, 0, 1, 0, 2, 2, 2, 0, 1, 2, 1, 2, 1, 2, 1, 0, 0, 2, 0, 0, 1, 1, 2, 1, 1, 2, 2, 1, 1, 0, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 1, 0, 0, 1, 1, 1, 0, 0, 0, 2, 1, 2, 0, 1, 0, 1, 1, 2, 1, 2, 2, 2, 0, 1, 0, 1, 0, 0, 0, 2, 2, 2, 1, 0, 1, 0, 1, 1, 0, 2, 0, 1, 2, 1, 1, 1, 1, 0, 2, 0, 2, 0, 2, 1, 2, 0, 2, 2, 0, 2, 0, 2, 1, 0, 1, 1, 1, 2, 0, 1, 1, 1, 2, 0, 1, 2, 2, 1, 1, 1, 1, 0, 1, 1, 2, 2, 1, 2, 0, 0, 0, 0, 2, 2, 0, 1, 0, 2, 2, 0, 1, 1, 0, 2, 1, 2, 2, 1, 1, 2, 2, 1, 0, 2, 1, 0, 1, 1, 2, 2, 2, 2, 1, 0, 0, 2, 2, 1, 1, 2, 1, 2, 1, 0, 0, 2, 2, 1, 1, 0, 0, 2, 1, 1, 2, 1, 0, 1, 1, 0, 0, 2, 1, 2, 2, 2, 2, 1, 2, 0, 2, 1, 0, 0, 2, 1, 0, 2, 1, 1, 0, 1, 0, 1, 2, 1, 1, 2, 1, 1, 2, 2, 2, 1, 2, 0, 0, 2, 0, 2, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 2, 1, 1, 1, 1, 1, 2, 1, 2, 2, 2, 1, 0, 0, 2, 0, 0, 2, 2, 1, 2, 0, 1, 2, 2, 2, 0, 2, 1, 1, 0, 1, 0, 0, 2, 1, 1, 2, 1, 2, 1, 2, 0, 1, 2, 1, 0, 2, 1, 1, 2, 0, 2, 2, 1, 0, 2, 2, 2, 1, 2, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 1, 1, 0, 1, 0, 2, 0, 2, 2, 0, 0, 2, 2, 0, 0, 0, 2, 2, 1, 2, 0, 1, 1, 1, 2, 0, 0, 1, 2, 0, 2, 2, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 2, 1, 0, 1, 2, 2, 2, 2, 1, 1, 2, 2, 2, 0, 1, 2, 2, 2, 1, 2, 1, 0, 1, 2, 2, 2, 0, 2, 1, 0, 2, 0, 1, 2, 0, 1, 1, 2, 1, 2, 1, 1, 1, 2, 0, 1, 2, 0, 0, 1, 0, 1, 1, 0, 2, 2, 1, 1, 1, 1, 0, 0, 0, 2, 0, 2, 2, 2, 2, 0, 0, 2, 0, 2, 2, 2, 2, 1, 2, 1, 1, 0, 2, 1, 0, 1, 0, 2, 2, 1, 1, 0, 1, 0, 2, 1, 1, 2, 1, 0, 1, 0, 1, 2, 1, 2, 2, 0, 2, 0, 1, 1, 2, 0, 0, 0, 2, 0, 0, 2, 2, 1, 0, 2, 0, 1, 1, 1, 1, 0, 1, 2, 2, 1, 1, 1, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 2, 2, 2, 2, 1, 2, 1, 1, 2, 2, 2, 0, 2, 1, 0, 1, 2, 1, 2, 1, 0, 0, 1, 1, 1, 2, 0, 1, 0, 2, 2, 0, 2, 1, 2, 2, 1, 2, 2, 2, 2, 2, 0, 2, 0, 2, 0, 2, 1, 2, 0, 1, 0, 2, 2, 0, 2, 2, 1, 0, 2, 1, 1, 0, 2, 2, 2, 1, 0, 0, 0, 2, 2, 0, 1, 1, 0, 0, 2, 2, 1, 2, 2, 1, 0, 2, 1, 0, 2, 1, 0, 0, 2, 0, 0, 1, 1, 2, 2, 0, 1, 0, 0, 2, 1, 1, 2, 0, 1, 2, 1, 1, 2, 2, 0, 2, 1, 1, 2, 0, 0, 1, 0, 2, 2, 1, 1, 1, 2, 1, 0, 1, 1, 0, 2, 1, 0, 2, 2, 2, 1, 0, 1, 1, 1, 0, 2, 1, 2, 0, 1, 0, 1, 2, 2, 2, 1, 1, 2, 2, 2, 2, 1, 1, 2, 1, 0, 0, 1, 2, 2, 1, 0, 2, 0, 1, 0, 2, 0, 0, 2, 1, 1, 0, 1, 2, 2, 2, 1, 0, 1, 2, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 0, 2, 1, 2, 1, 2, 1, 2, 2, 0, 2, 1, 1, 0, 0, 0, 2, 2, 2, 0, 2, 2, 0, 2, 0, 0, 1, 1, 0, 2, 1, 2, 2, 0, 2, 2, 0, 1, 1, 0, 2, 0, 1, 1, 0, 2, 1, 0, 1, 1, 2, 2, 0, 1, 2, 0, 1, 2, 2, 0, 2, 2, 1, 0, 0, 1, 2, 0, 1, 2, 0, 0, 2, 1, 1, 1, 2, 0, 1, 2, 0, 2, 2, 2, 2, 2, 1, 0, 0, 0, 1, 1, 1, 1, 2, 0, 0, 1, 0, 1, 1, 1, 2, 2, 1, 1, 2, 0, 2, 0, 2, 1, 0, 1, 1, 1, 2, 2, 0, 0, 2, 0, 1, 2, 1, 0, 2, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 0, 2, 1, 2, 1, 2, 2, 0, 1, 1, 0, 0, 1, 1, 1, 2, 1, 2, 1, 2, 0, 0, 2, 1, 1, 0, 1, 0, 1, 0, 0, 1, 2, 1, 1, 0, 0, 0, 2, 2, 2, 0, 0, 2, 2, 1, 1, 1, 0, 2, 1, 0, 0, 1, 0, 2, 2, 0, 1, 1, 2, 1, 0, 1, 2, 1, 2, 1, 1, 2, 1, 1, 2, 2, 2, 1, 2, 0, 1, 1, 1, 1, 1, 1, 0, 2, 1, 0, 1, 2, 0, 2, 2, 1, 2, 1, 1, 0, 2, 0, 0, 2, 0, 1, 0, 2, 1, 2, 2, 2, 1, 2, 2, 1, 0, 0, 2, 1, 2, 2, 1, 1, 0, 2, 0, 1, 2, 0, 1, 1, 1, 2, 1, 2, 1, 1, 2, 1, 1, 1, 1, 1, 2, 1, 1, 2, 2, 0, 0, 2, 1, 0, 2, 1, 2, 0, 1, 0, 1, 2, 0, 2, 0, 2, 0, 1, 2, 1, 0, 2, 2, 0, 2, 1, 1, 2, 1, 1, 1, 0, 2, 2, 2, 2, 1, 1, 2, 0, 2, 2, 2, 1, 1, 1, 0, 1, 1, 0, 2, 1, 1, 1, 2, 1, 1, 0, 0, 0, 1, 2, 1, 1, 0, 1, 1, 2, 2, 2, 0, 2, 2, 1, 1, 1, 1, 0, 1, 2, 1, 1, 0, 0, 2, 1, 2, 2, 2, 2, 0, 0, 0, 0, 2, 0, 0, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 0, 2, 0, 0, 2, 2, 2, 2, 0, 1, 1, 1, 0, 2, 1, 0, 1, 0, 2, 1, 1, 2, 1, 1, 0, 0, 2, 1, 0, 2, 1, 2, 0, 1, 1, 0, 1, 2, 1, 2, 2, 0, 1, 0, 2, 2, 1, 1, 1, 2, 2, 2, 1, 2, 0, 2, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 2, 2, 0, 0, 2, 0, 1, 2, 0, 0, 2, 0, 1, 0, 2, 2, 0, 0, 2, 2, 2, 2, 0, 2, 1, 0, 2, 0, 2, 1, 1, 1, 1, 2, 2, 1, 0, 0, 1, 1, 0, 1, 2, 1, 2, 0, 0, 0, 1, 2, 2, 1, 1, 2, 1, 1, 0, 2, 2, 0, 1, 2, 2, 1, 2, 1, 0, 2, 2, 0, 1, 2, 1, 0, 2, 1, 1, 0, 0, 1, 2, 0, 2, 0, 0, 2, 1, 1, 1, 2, 1, 1, 0, 2, 0, 2, 1, 0, 1, 2, 2, 2, 1, 1, 0, 1, 0, 0, 2, 0, 1, 1, 2, 0, 2, 2, 0, 0, 2, 2, 0, 0, 1, 2, 0, 2, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 2, 2, 2, 1, 1, 2, 2, 1, 0, 0, 1, 1, 0, 2, 2, 2, 2, 1, 1, 1, 0, 0, 2, 0, 1, 1, 2, 2, 0, 2, 0, 0, 2, 0, 2, 2, 2, 1, 0, 1, 1, 2, 2, 1, 1, 1, 1, 2, 0, 1, 2, 1, 1, 1, 0, 2, 0, 1, 1, 2, 2, 2, 1, 1, 2, 0, 1, 1, 2, 0, 1, 2, 0, 2, 2, 2, 2, 2, 0, 1, 1, 2, 2, 1, 1, 2, 2, 0, 1, 0, 0, 2, 1, 2, 1, 0, 2, 0, 2, 0, 1, 2, 0, 0, 2, 0, 1, 2, 2, 1, 0, 2, 1, 1, 2, 2, 1, 1, 1, 1, 2, 1, 2, 0, 0, 0, 1, 2, 1, 1, 0, 1, 0, 0, 1, 2, 2, 1, 2, 2, 2, 2, 0, 2, 1, 1, 0, 0, 1, 1, 2, 2, 2, 2, 1, 2, 0, 1, 0, 2, 1, 0, 2, 2, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 2, 2, 0, 2, 2, 2, 1, 2, 1, 1, 2, 2, 2, 1, 1, 1, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 0, 2, 2, 1, 1, 2, 0, 2, 2, 1, 2, 1, 2, 0, 1, 0, 1, 0, 2, 1, 1, 0, 1, 1, 0, 1, 2, 0, 2, 2, 1, 2, 0, 0, 2, 2, 1, 2, 0, 1, 2, 1, 1, 1, 2, 0, 1, 1, 0, 0, 1, 1, 1, 2, 0, 2, 2, 2, 2, 1, 1, 2, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 2, 0, 2, 2, 1, 0, 1, 0, 2, 2, 0, 0, 2, 1, 1, 2, 2, 1, 0, 2, 2, 1, 2, 1, 1, 2, 1, 2, 1, 2, 0, 2, 1, 1, 2, 1, 2, 0, 2, 1, 2, 2, 2, 1, 1, 0, 1, 0, 1, 0, 2, 2, 0, 2, 0, 0, 2, 2, 1, 0, 2, 0, 0, 0, 1, 1, 1, 2, 1, 1, 1, 1, 1, 0, 1, 1, 2, 1, 2, 2, 1, 2, 1, 2, 0, 2, 2, 1, 1, 0, 2, 2, 1, 1, 0, 2, 2, 2, 1, 2, 2, 1, 1, 1, 0, 2, 2, 2, 2, 2, 1, 0, 2, 0, 0, 1, 0, 1, 1, 1, 1, 2, 1, 2, 1, 2, 2, 2, 2, 0, 1, 1, 0, 1, 1, 1, 1, 2, 2, 2, 2, 2, 1, 0, 0, 1, 2, 2, 1, 2, 2, 1, 0, 2, 1, 1, 2, 1, 2, 1, 1, 0, 0, 1, 2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 1, 0, 2, 1, 2, 0, 1, 1, 0, 1, 2, 1, 2, 1, 1, 0, 1, 1, 2, 2, 2, 1, 1, 2, 1, 1, 2, 2, 2, 0, 1, 0, 2, 2, 1, 1, 0, 1, 2, 2, 1, 1, 1, 1, 2, 2, 1, 1, 0, 0, 1, 1, 0, 2, 1, 2, 2, 2, 0, 0, 2, 2, 2, 2, 0, 2, 2, 2, 1, 2, 0, 1, 1, 2, 2, 1, 0, 0, 1, 0, 1, 1, 2, 2, 1, 0, 2, 1, 2, 0, 0, 2, 0, 1, 1, 0, 0, 1, 2, 0, 2, 1, 0, 2, 1, 0, 2, 1, 2, 1, 2, 0, 0, 1, 0, 0, 2, 2, 1, 2, 1, 2, 0, 1, 0, 1, 1, 0, 1, 1, 1, 2, 2, 2, 1, 1, 2, 2, 2, 1, 1, 2, 2, 2, 0, 2, 0, 1, 1, 2, 2, 2, 0, 2, 0, 0, 2, 0, 2, 2, 1, 1, 1, 1, 2, 0, 1, 0, 0, 1, 2, 2, 0, 2, 1, 1, 1, 1, 1, 2, 2, 2, 2, 1, 2, 0, 0, 2, 1, 0, 2, 2, 1, 2, 0, 1, 0, 2, 1, 2, 0, 2, 0, 1, 1, 2, 1, 2, 2, 0, 1, 0, 1, 2, 1, 1, 2, 0, 2, 1, 1, 2, 1, 2, 2, 2, 1, 1, 1, 2, 0, 1, 2, 2, 0, 2, 0, 0, 0, 2, 2, 2, 0, 2, 2, 2, 1, 0, 0, 2, 1, 1, 1, 2, 2, 2, 1, 0, 1, 2, 2, 1, 2, 2, 1, 1, 2, 1, 2, 1, 2, 0, 1, 2, 2, 0, 0, 2, 0, 0, 1, 0, 1, 2, 0, 1, 0, 2, 1, 0, 1, 1, 2, 1, 1, 1, 0, 2, 1, 2, 2, 0, 0, 1, 2, 1, 0, 2, 1, 0, 2, 2, 2, 2, 0, 0, 1, 1, 2, 0, 2, 1, 2, 1, 0, 2, 2, 2, 1, 0, 0, 2, 0, 1, 2, 1, 1, 1, 2, 2, 0, 1, 2, 0, 1, 0, 2, 2, 1, 2, 0, 2, 2, 0, 1, 2, 2, 0, 2, 1, 1, 2, 0, 2, 2, 2, 0, 1, 0, 2, 0, 2, 1, 1, 2, 2, 1, 2, 2, 2, 0, 0, 1, 2, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 2, 0, 2, 1, 0, 2, 0, 0, 0, 0, 2, 2, 1, 2, 1, 1, 2, 0, 2, 1, 2, 2, 2, 0, 1, 2, 1, 1, 0, 0, 0, 1, 2, 2, 1, 0, 2, 1, 0, 2, 1, 1, 0, 0, 2, 2, 0, 1, 2, 2, 0, 2, 2, 2, 2, 0, 0, 0, 1, 2, 1, 2, 1, 1, 1, 1, 0, 1, 2, 2, 1, 2, 2, 1, 1, 0, 2, 2, 0, 1, 0, 2, 2, 1, 2, 1, 1, 1, 1, 0, 1, 1, 2, 0, 2, 0, 0, 1, 1, 2, 1, 2, 0, 2, 2, 2, 0, 1, 0, 2, 0, 2, 1, 2, 2, 0, 0, 2, 2, 1, 1, 0, 0, 2, 2, 2, 1, 1, 0, 0, 1, 0, 2, 0, 0, 0, 1, 2, 1, 2, 1, 2, 0, 1, 1, 0, 2, 1, 2, 0, 1, 0, 0, 1, 1, 2, 1, 2, 0, 2, 2, 2, 0, 1, 1, 0, 2, 1, 2, 1, 2, 0, 0, 2, 0, 1, 2, 2, 2, 2, 0, 0, 2, 1, 1, 1, 1, 1, 1, 1, 2, 0, 2, 1, 2, 1, 2, 2, 1, 2, 1, 2, 0, 1, 2, 1, 0, 1, 1, 2, 0, 1, 2, 1, 2, 1, 0, 1, 0, 0, 0, 2, 1, 0, 0, 2, 0, 0, 0, 0, 2, 0, 1, 0, 0, 2, 1, 2, 2, 1, 1, 1, 1, 0, 2, 1, 1, 1, 2, 1, 1, 2, 1, 2, 2, 1, 1, 1, 0, 0, 1, 0, 2, 2, 1, 1, 0, 0, 1, 2, 1, 1, 2, 2, 1, 2, 0, 2, 0, 1, 1, 1, 1, 0, 0, 2, 2, 2, 0, 0, 1, 1, 2, 1, 2, 1, 1, 1, 1, 2, 1, 1, 2, 1, 1, 1, 1, 2, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 2, 2, 1, 1, 2, 1, 2, 2, 0, 0, 1, 2, 2, 2, 0, 1, 0, 2, 2, 2, 0, 1, 0, 1, 1, 0, 1, 1, 2, 0, 2, 1, 0, 0, 2, 0, 1, 2, 2, 1, 0, 2, 2, 2, 1, 2, 2, 1, 2, 0, 2, 1, 0, 1, 2, 2, 1, 2, 2, 1, 0, 2, 0, 2, 1, 1, 0, 2, 1, 0, 0, 0, 0, 1, 1, 0, 1, 2, 1, 0, 2, 0, 2, 2, 2, 2, 0, 0, 2, 2, 0, 1, 2, 1, 0, 2, 1, 0, 1, 0, 0, 1, 2, 1, 0, 2, 0, 1, 2, 0, 2, 0, 1, 1, 2, 0, 1, 0, 2, 1, 2, 0, 2, 0, 2, 0, 0, 2, 0, 0, 2, 2, 2, 2, 2, 0, 1, 2, 2, 1, 2, 2, 0, 2, 2, 0, 0, 2, 1, 1, 2, 1, 2, 2, 2, 1, 1, 2, 2, 2, 1, 2, 1, 1, 2, 0, 2, 0, 0, 1, 1, 0, 2, 1, 0, 0, 2, 1, 1, 0, 2, 1, 0, 0, 0, 1, 0, 0, 2, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 2, 0, 2, 2, 2, 1, 2, 1, 2, 2, 1, 1, 1, 0, 0, 2, 1, 1, 0, 2, 2, 1, 1, 0, 1, 1, 1, 2, 0, 2, 0, 0, 2, 2, 1, 1, 0, 1, 2, 2, 1, 0, 2, 1, 1, 2, 1, 2, 0, 2, 1, 0, 1, 2, 1, 1, 1, 2, 0, 2, 0, 0, 2, 2, 2, 1, 0, 2, 0, 2, 0, 1, 0, 1, 1, 1, 1, 2, 1, 1, 0, 2, 0, 1, 2, 1, 2, 2, 1, 2, 1, 1, 2, 2, 1, 1, 0, 2, 2, 2, 0, 1, 2, 0, 2, 1, 2, 2, 0, 2, 2, 1, 2, 2, 2, 2, 2, 0, 2, 0, 1, 1, 0, 1, 2, 1, 0, 2, 2, 1, 1, 2, 2, 1, 2, 2, 2, 0, 0, 1, 0, 1, 1, 1, 2, 0, 1, 1, 2, 0, 1, 2, 1, 0, 2, 2, 0, 1, 2, 2, 1, 2, 1, 2, 2, 1, 2, 0, 2, 0, 2, 2, 1, 2, 2, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 2, 0, 0, 0, 2, 1, 2, 1, 0, 1, 1, 0, 1, 2, 1, 2, 2, 1, 1, 2, 2, 2, 0, 2, 2, 1, 2, 1, 2, 1, 2, 1, 1, 0, 2, 2, 2, 0, 0, 0, 1, 0, 0, 2, 2, 1, 2, 1, 2, 1, 1, 0, 1, 1, 2, 2, 1, 2, 0, 2, 0, 2, 1, 2, 2, 1, 0, 2, 2, 2, 0, 1, 2, 0, 2, 2, 0, 0, 2, 0, 1, 1, 0, 0, 1, 2, 2, 2, 2, 1, 1, 2, 2, 2, 0, 2, 1, 2, 0, 2, 1, 1, 2, 1, 2, 2, 0, 2, 1, 1, 1, 1, 2, 2, 1, 2, 2, 0, 2, 1, 1, 1, 0, 0, 2, 2, 1, 2, 1, 1, 1, 2, 1, 2, 1, 0, 2, 1, 0, 2, 1, 2, 0, 2, 2, 2, 1, 0, 0, 1, 2, 1, 1, 1, 1, 2, 1, 0, 1, 1, 2, 2, 1, 2, 2, 1, 0, 2, 1, 1, 1, 2, 2, 1, 2, 1, 1, 0, 1, 1, 1, 1, 0, 0, 2, 2, 1, 1, 0, 1, 1, 2, 1, 0, 1, 2, 2, 1, 1, 2, 2, 0, 1, 0, 2, 0, 0, 2, 2, 0, 2, 0, 0, 2, 2, 2, 0, 1, 1, 2, 1, 2, 2, 2, 1, 1, 1, 0, 1, 2, 2, 0, 2, 2, 2, 0, 0, 1, 0, 2, 0, 1, 1, 0, 2, 2, 1, 1, 2, 2, 1, 1, 1, 2, 2, 1, 2, 2, 1, 2, 0, 0, 0, 2, 0, 2, 1, 2, 1, 1, 1, 1, 2, 2, 0, 0, 0, 1, 0, 2, 2, 2, 2, 0, 2, 0, 2, 0, 1, 2, 2, 1, 0, 2, 2, 1, 0, 1, 1, 1, 1, 2, 0, 0, 1, 1, 2, 0, 2, 2, 2, 0, 1, 2, 1, 0, 0, 1, 0, 0, 0, 1, 1, 2, 0, 0, 0, 0, 0, 1, 1, 0, 2, 2, 1, 2, 1, 1, 1, 0, 2, 1, 0, 0, 1, 2, 1, 1, 2, 2, 2, 0, 1, 1, 0, 1, 2, 1, 0, 0, 1, 1, 1, 2, 0, 1, 2, 2, 1, 2, 1, 1, 0, 0, 1, 1, 1, 2, 1, 2, 2, 2, 0, 1, 1, 2, 2, 2, 1, 2, 2, 0, 1, 2, 0, 2, 2, 0, 1, 0, 1, 2, 2, 0, 2, 2, 0, 2, 1, 0, 1, 2, 1, 1, 2, 0, 2, 1, 1, 1, 2, 0, 2, 2, 1, 0, 2, 1, 1, 0, 1, 2, 0, 0, 2, 1, 1, 0, 2, 2, 2, 0, 0, 2, 1, 2, 2, 1, 1, 0, 1, 1, 1, 2, 1, 1, 1, 2, 2, 0, 1, 2, 0, 1, 1, 2, 2, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 2, 1, 0, 2, 2, 1, 2, 2, 0, 2, 1, 2, 1, 0, 2, 1, 0, 0, 2, 1, 0, 2, 0, 0, 2, 2, 2, 0, 1, 1, 0, 2, 2, 2, 1, 2, 1, 1, 0, 1, 2, 2, 1, 2, 2, 0, 1, 1, 2, 1, 2, 2, 1, 1, 1, 1, 1, 0, 2, 2, 0, 1, 0, 1, 2, 2, 1, 1, 2, 1, 0, 2, 1, 2, 2, 1, 1, 1, 1, 0, 0, 2, 2, 2, 2, 2, 1, 0, 1, 0, 1, 0, 2, 1, 2, 0, 1, 0, 2, 1, 1, 2, 0, 2, 2, 0, 0, 1, 0, 1, 2, 2, 1, 2, 0, 2, 1, 0, 0, 2, 1, 1, 2, 0, 0, 1, 0, 1, 2, 1, 1, 1, 1, 2, 0, 0, 0, 2, 0, 2, 2, 1, 2, 1, 1, 1, 1, 1, 2, 1, 2, 2, 0, 2, 1, 1, 0, 2, 2, 2, 2, 0, 2, 2, 0, 1, 0, 1, 1, 1, 2, 2, 1, 0, 0, 1, 2, 1, 0, 0, 2, 1, 0, 1, 1, 2, 0, 1, 2, 2, 1, 0, 2, 0, 0, 1, 1, 2, 2, 2, 1, 0, 0, 2, 2, 2, 1, 0, 1, 2, 2, 0, 2, 2, 2, 2, 1, 1, 2, 1, 1, 2, 2, 1, 1, 2, 0, 0, 1, 0, 0, 2, 1, 1, 2, 2, 1, 1, 2, 1, 1, 1, 1, 0, 1, 2, 0, 2, 1, 1, 2, 1, 2, 2, 1, 1, 1, 2, 2, 1, 2, 0, 1, 2, 1, 2, 2, 0, 0, 1, 2, 1, 1, 0, 2, 1, 1, 1, 2, 0, 1, 2, 1, 2, 2, 0, 1, 2, 0, 1, 1, 2, 2, 1, 2, 1, 0, 0, 0, 2, 0, 2, 1, 1, 1, 2, 1, 1, 2, 1, 0, 1, 0, 1, 2, 2, 2, 2, 1, 0, 1, 1, 1, 0, 2, 2, 1, 2, 1, 1, 1, 0, 1, 1, 2, 1, 0, 1, 2, 1, 2, 1, 2, 2, 1, 1, 1, 2, 1, 1, 1, 2, 2, 1, 2, 1, 1, 1, 0, 1, 0, 1, 1, 1, 2, 1, 2, 2, 2, 2, 0, 2, 0, 1, 1, 0, 2, 2, 1, 1, 1, 1, 1, 2, 1, 0, 1, 2, 0, 2, 0, 1, 2, 1, 2, 0, 0, 2, 0, 2, 0, 0, 0, 1, 2, 2, 1, 0, 1, 0, 1, 2, 2, 0, 2, 0, 1, 0, 1, 1, 1, 1, 0, 2, 1, 0, 0, 2, 2, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 2, 1, 1, 1, 0, 0, 2, 2, 0, 2, 0, 1, 1, 1, 1, 2, 1, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 2, 0, 1, 2, 2, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 2, 0, 1, 0, 0, 0, 1, 1, 0, 1, 2, 1, 1, 1, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 1, 0, 2, 0, 2, 1, 0, 0, 2, 0, 0, 0, 2, 1, 1, 2, 1, 2, 2, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 2, 2, 2, 0, 1, 0, 1, 2, 0, 0, 2, 0, 2, 0, 1, 1, 0, 2, 2, 1, 0, 2, 2, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 2, 0, 2, 0, 0, 2, 2, 0, 1, 1, 1, 2, 0, 1, 1, 0, 0, 2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 1, 0, 2, 2, 1, 1, 1, 2, 1, 2, 1, 1, 1, 2, 1, 1, 0, 1, 1, 2, 2, 0, 2, 2, 1, 0, 2, 1, 1, 2, 2, 1, 0, 1, 2, 0, 1, 1, 2, 0, 1, 1, 1, 2, 1, 2, 1, 0, 0, 2, 2, 1, 2, 0, 2, 0, 1, 2, 1, 2, 0, 2, 2, 1, 0, 1, 2, 2, 2, 0, 2, 0, 2, 1, 0, 2, 2, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 2, 2, 1, 1, 0, 2, 2, 2, 1, 2, 2, 0, 0, 1, 1, 2, 1, 0, 0, 2, 2, 0, 1, 2, 1, 2, 2, 1, 2, 2, 1, 1, 2, 2, 0, 0, 0, 2, 2, 2, 1, 2, 2, 2, 0, 0, 2, 1, 2, 2, 1, 1, 1, 0, 2, 1, 2, 1, 0, 2, 1, 2, 1, 0, 1, 2, 1, 2, 1, 0, 1, 0, 0, 0, 2, 1, 2, 0, 2, 1, 2, 2, 2, 0, 0, 1, 0, 2, 1, 2, 1, 1, 2, 2, 1, 1, 1, 2, 2, 1, 1, 2, 2, 0, 1, 1, 1, 1, 2, 1, 1, 0, 2, 2, 2, 1, 1, 2, 2, 0, 2, 2, 2, 1, 0, 2, 0, 2, 1, 2, 0, 2, 2, 1, 1, 2, 1, 1, 2, 0, 1, 1, 2, 1, 2, 1, 1, 0, 1, 1, 1, 2, 2, 2, 1, 0, 2, 2, 1, 0, 2, 2, 1, 1, 1, 0, 1, 2, 1, 1, 2, 2, 0, 0, 2, 0, 2, 1, 1, 1, 2, 1, 1, 1, 2, 2, 0, 0, 2, 2, 1, 2, 1, 1, 1, 1, 2, 1, 2, 0, 1, 1, 2, 0, 1, 1, 2, 0, 0, 1, 1, 1, 2, 0, 0, 1, 2, 1, 1, 1, 1, 1, 2, 0, 2, 1, 2, 2, 2, 1, 1, 0, 1, 1, 0, 1, 0, 2, 1, 0, 1, 2, 1, 2, 1, 0, 2, 2, 2, 2, 0, 2, 0, 0, 1, 0, 1, 1, 1, 1, 2, 2, 0, 0, 2, 1, 2, 0, 2, 0, 2, 2, 1, 1, 2, 1, 2, 1, 1, 2, 1, 2, 2, 1, 0, 1, 1, 0, 2, 0, 1, 2, 1, 0, 0, 2, 1, 2, 0, 1, 1, 0, 2, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 1, 2, 0, 2, 0, 2, 1, 1, 1, 2, 1, 1, 1, 1, 1, 2, 2, 1, 0, 2, 2, 0, 2, 2, 0, 1, 1, 2, 1, 0, 2, 1, 0, 0, 0, 2, 2, 0, 1, 2, 1, 0, 0, 1, 2, 0, 2, 1, 1, 0, 2, 2, 0, 1, 1, 1, 0, 1, 2, 0, 2, 0, 2, 1, 0, 0, 2, 0, 0, 0, 1, 2, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 2, 1, 2, 0, 1, 1, 1, 2, 2, 2, 0, 2, 1, 2, 2, 1, 2, 0, 2, 2, 0, 2, 1, 2, 2, 2, 0, 2, 2, 2, 2, 0, 2, 1, 2, 2, 0, 2, 1, 1, 0, 2, 2, 2, 1, 2, 2, 1, 1, 2, 0, 2, 1, 1, 1, 2, 2, 0, 0, 1, 1, 0, 1, 1, 1, 2, 1, 2, 2, 2, 1, 2, 1, 2, 1, 1, 2, 2, 2, 2, 2, 0, 1, 1, 2, 0, 0, 1, 0, 0, 2, 1, 1, 2, 2, 2, 1, 2, 0, 2, 2, 2, 2, 2, 1, 2, 2, 2, 0, 0, 0, 1, 1, 2, 1, 2, 1, 0, 2, 2, 0, 1, 0, 2, 0, 2, 2, 2, 1, 2, 2, 0, 1, 1, 1, 0, 2, 2, 0, 1, 2, 1, 0, 1, 1, 1, 1, 1, 0, 0, 2, 0, 2, 1, 1, 1, 1, 2, 1, 0, 0, 2, 2, 1, 2, 0, 2, 1, 1, 0, 2, 1, 2, 0, 1, 2, 2, 0, 0, 2, 1, 1, 2, 0, 2, 0, 2, 1, 1, 2, 2, 2, 2, 1, 1, 2, 1, 2, 0, 2, 1, 0, 0, 0, 2, 2, 2, 2, 0, 1, 2, 1, 2, 1, 0, 1, 2, 2, 0, 2, 0, 1, 0, 1, 1, 1, 1, 1, 2, 0, 1, 2, 1, 1, 1, 1, 2, 1, 2, 1, 0, 2, 1, 0, 1, 1, 1, 1, 2, 2, 2, 1, 2, 2, 1, 0, 0, 1, 2, 1, 2, 1, 2, 2, 0, 1, 1, 2, 0, 1, 0, 0, 1, 1, 1, 1, 0, 2, 1, 2, 2, 1, 2, 1, 2, 2, 2, 1, 1, 0, 2, 1, 1, 0, 0, 0, 2, 1, 1, 1, 0, 1, 1, 0, 1, 0, 0, 2, 0, 2, 2, 1, 1, 1, 2, 2, 0, 1, 1, 2, 0, 2, 2, 2, 2, 2, 1, 1, 0, 0, 2, 2, 2, 0, 1, 0, 1, 2, 0, 2, 1, 1, 0, 2, 1, 2, 1, 1, 2, 2, 0, 1, 1, 2, 0, 0, 1, 1, 1, 2, 1, 0, 2, 1, 0, 2, 0, 1, 2, 2, 2, 0, 1, 2, 1, 1, 2, 1, 0, 2, 1, 1, 1, 2, 2, 0, 0, 1, 1, 1, 1, 1, 2, 2, 0, 2, 2, 2, 0, 2, 1, 2, 0, 0, 0, 1, 0, 2, 0, 0, 1, 1, 0, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 0, 2, 0, 1, 2, 2, 0, 1, 0, 2, 2, 0, 1, 0, 2, 1, 2, 2, 0, 2, 1, 1, 0, 2, 1, 0, 0, 2, 1, 1, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 0, 2, 2, 2, 2, 2, 2, 1, 1, 0, 0, 1, 2, 1, 2, 0, 1, 0, 1, 2, 0, 2, 0, 1, 1, 0, 2, 2, 1, 1, 2, 2, 1, 2, 2, 1, 1, 2, 1, 0, 1, 1, 0, 1, 2, 0, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1, 0, 2, 1, 1, 2, 1, 1, 1, 1, 2, 2, 1, 2, 2, 2, 1, 1, 0, 1, 0, 1, 2, 2, 0, 2, 2, 0, 1, 1, 1, 2, 2, 1, 0, 2, 1, 0, 1, 0, 2, 2, 0, 2, 1, 0, 2, 1, 0, 1, 0, 2, 2, 2, 2, 1, 0, 2, 1, 2, 1, 1, 2, 0, 1, 2, 2, 1, 0, 1, 2, 2, 0, 2, 2, 2, 1, 2, 2, 1, 1, 1, 1, 2, 1, 2, 2, 1, 0, 0, 0, 0, 2, 1, 1, 2, 2, 1, 2, 2, 0, 1, 2, 1, 1, 1, 2, 1, 1, 0, 0, 1, 1, 0, 1, 2, 0, 2, 1, 2, 2, 2, 2, 1, 0, 1, 1, 1, 2, 2, 2, 2, 2, 1, 1, 1, 2, 1, 1, 2, 0, 2, 2, 1, 2, 2, 1, 1, 0, 2, 1, 0, 0, 1, 2, 1, 1, 1, 2, 2, 1, 1, 1, 1, 0, 2, 1, 2, 2, 2, 1, 1, 1, 2, 2, 0, 1, 1, 2, 0, 0, 1, 2, 0, 2, 2, 0, 1, 2, 2, 0, 2, 2, 2, 2, 1, 2, 2, 1, 2, 0, 2, 2, 2, 2, 2, 2, 1, 0, 2, 1, 1, 2, 2, 2, 0, 2, 1, 2, 1, 2, 1, 1, 2, 1, 0, 0, 2, 0, 1, 1, 0, 1, 2, 0, 2, 1, 2, 1, 1, 2, 1, 2, 0, 1, 2, 1, 1, 1, 2, 1, 2, 2, 1, 0, 1, 1, 1, 2, 2, 2, 2, 2, 2, 0, 0, 1, 1, 1, 1, 1, 0, 2, 1, 2, 1, 0, 1, 1, 0, 0, 1, 2, 0, 1, 0, 2, 2, 2, 1, 1, 2, 0, 2, 2, 0, 1, 0, 0, 2, 0, 0, 0, 1, 2, 0, 0, 0, 2, 0, 0, 2, 2, 0, 1, 1, 1, 2, 1, 1, 2, 2, 1, 1, 1, 0, 1, 2, 2, 0, 0, 1, 1, 1, 2, 1, 1, 2, 1, 2, 0, 2, 2, 0, 2, 1, 0, 2, 2, 2, 1, 1, 1, 0, 1, 1, 2, 0, 1, 0, 2, 1, 1, 0, 2, 1, 0, 0, 0, 2, 1, 1, 1, 1, 2, 2, 2, 0, 1, 0, 0, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 0, 0, 0, 1, 1, 1, 2, 2, 2, 1, 0, 1, 1, 2, 1, 1, 2, 1, 2, 2, 1, 2, 1, 0, 1, 2, 1, 2, 2, 0, 2, 0, 2, 2, 0, 1, 2, 0, 1, 0, 2, 2, 1, 1, 0, 0, 2, 2, 2, 0, 2, 2, 2, 1, 0, 1, 0, 0, 2, 0, 1, 0, 2, 0, 2, 0, 2, 2, 1, 0, 1, 2, 2, 2, 2, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 2, 2, 0, 2, 2, 2, 1, 2, 2, 2, 2, 0, 0, 2, 1, 2, 1, 0, 0, 0, 0, 0, 0, 2, 2, 1, 1, 1, 2, 2, 1, 1, 1, 2, 2, 1, 1, 0, 1, 1, 1, 1, 2, 1, 0, 1, 0, 1, 0, 2, 0, 2, 1, 2, 1, 1, 0, 2, 2, 0, 0, 0, 0, 2, 2, 1, 0, 1, 1, 1, 1, 0, 1, 2, 2, 0, 2, 1, 0, 2, 0, 1, 0, 0, 1, 0, 2, 1, 0, 1, 2, 1, 1, 1, 2, 0, 1, 0, 1, 2, 1, 0, 2, 1, 1, 2, 1, 2, 1, 0, 2, 1, 2, 1, 0, 1, 1, 2, 2, 1, 2, 1, 2, 2, 0, 2, 0, 2, 1, 0, 2, 1, 2, 2, 1, 2, 1, 2, 1, 1, 0, 2, 1, 1, 2, 1, 2, 1, 2, 2, 2, 1, 1, 0, 2, 0, 1, 1, 1, 2, 2, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 2, 0, 2, 2, 2, 1, 1, 2, 1, 1, 1, 1, 1, 1, 0, 0, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 1, 0, 2, 1, 1, 1, 1, 2, 2, 0, 1, 0, 2, 0, 1, 1, 2, 1, 1, 2, 2, 1, 1, 1, 1, 1, 0, 1, 0, 2, 1, 1, 1, 2, 1, 2, 1, 0, 2, 0, 1, 0, 2, 2, 1, 2, 2, 0, 1, 0, 0, 1, 0, 1, 1, 2, 2, 1, 1, 0, 2, 0, 1, 0, 2, 1, 0, 0, 0, 0, 0, 2, 2, 2, 0, 0, 0, 0, 2, 1, 2, 0, 1, 2, 0, 0, 1, 0, 1, 1, 2, 0, 1, 0, 2, 0, 0, 2, 1, 1, 1, 2, 2, 1, 2, 2, 2, 2, 1, 2, 0, 0, 2, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 2, 0, 1, 1, 2, 1, 0, 1, 1, 2, 0, 1, 1, 1, 1, 1, 2, 1, 1, 2, 2, 1, 0, 2, 1, 0, 1, 1, 1, 1, 2, 1, 1, 0, 1, 1, 1, 0, 2, 2, 2, 1, 2, 2, 1, 2, 1, 0, 2, 2, 1, 1, 0, 2, 2, 2, 2, 1, 2, 2, 0, 0, 0, 1, 2, 2, 1, 0, 0, 2, 2, 2, 1, 0, 1, 1, 1, 1, 0, 2, 1, 2, 2, 2, 2, 2, 0, 2, 1, 2, 2, 2, 2, 1, 1, 2, 1, 2, 1, 2, 2, 1, 2, 2, 2, 0, 1, 0, 1, 2, 2, 1, 2, 0, 2, 1, 1, 2, 2, 2, 1, 2, 1, 2, 1, 1, 1, 1, 0, 1, 1, 1, 2, 1, 1, 0, 1, 2, 1, 2, 1, 0, 2, 2, 0, 0, 1, 2, 2, 0, 1, 0, 1, 2, 1, 1, 2, 2, 2, 0, 2, 2, 2, 1, 0, 2, 0, 2, 0, 2, 0, 1, 1, 2, 0, 0, 1, 2, 2, 2, 1, 2, 1, 1, 1, 0, 2, 2, 0, 1, 1, 2, 1, 1, 2, 1, 0, 1, 0, 2, 1, 2, 2, 2, 1, 0, 1, 0, 2, 1, 1, 1, 1, 1, 0, 2, 0, 0, 1, 2, 1, 1, 2, 0, 1, 0, 2, 1, 0, 2, 1, 1, 2, 2, 2, 2, 2, 0, 1, 1, 1, 2, 1, 0, 1, 0, 1, 2, 2, 1, 2, 1, 2, 1, 1, 2, 0, 1, 2, 1, 1, 0, 1, 2, 0, 1, 1, 2, 1, 1, 2, 2, 1, 2, 2, 1, 2, 1, 0, 1, 2, 0, 0, 2, 2, 2, 1, 2, 1, 1, 2, 0, 2, 1, 1, 2, 0, 1, 1, 0, 0, 0, 2, 0, 1, 0, 1, 2, 1, 1, 1, 0, 1, 1, 1, 2, 2, 1, 0, 1, 1, 1, 2, 0, 1, 2, 2, 0, 0, 2, 0, 1, 2, 2, 0, 0, 2, 1, 1, 0, 1, 2, 2, 0, 0, 2, 2, 0, 1, 1, 2, 2, 2, 2, 1, 2, 1, 2, 2, 1, 2, 0, 2, 2, 2, 2, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 1, 2, 2, 1, 2, 1, 0, 2, 1, 2, 1, 2, 2, 1, 2, 2, 1, 2, 2, 1, 1, 2, 2, 1, 2, 1, 0, 2, 1, 2, 0, 0, 1, 2, 2, 0, 1, 2, 0, 1, 2, 1, 2, 0, 1, 1, 2, 2, 2, 0, 0, 2, 1, 2, 2, 0, 1, 2, 1, 2, 1, 1, 2, 2, 2, 2, 1, 1, 2, 0, 2, 1, 1, 2, 0, 1, 0, 1, 1, 0, 0, 2, 0, 1, 2, 2, 1, 0, 1, 2, 0, 1, 2, 2, 1, 2, 0, 1, 2, 1, 2, 2, 2, 2, 1, 1, 2, 1, 2, 2, 2, 0, 0, 2, 0, 0, 2, 1, 0, 2, 0, 2, 1, 1, 1, 1, 1, 2, 0, 0, 1, 2, 1, 2, 2, 0, 2, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 2, 0, 0, 2, 1, 0, 1, 2, 1, 2, 1, 2, 0, 1, 2, 2, 1, 1, 1, 1, 0, 1, 2, 1, 1, 2, 2, 2, 1, 1, 0, 0, 1, 2, 2, 1, 0, 2, 1, 1, 2, 0, 2, 0, 1, 1, 2, 2, 1, 1, 0, 2, 1, 0, 1, 1, 1, 0, 2, 2, 1, 1, 0, 0, 0, 1, 2, 2, 2, 2, 0, 1, 1, 1, 1, 2, 2, 2, 2, 1, 2, 2, 1, 1, 1, 2, 1, 2, 2, 0, 1, 2, 1, 0, 1, 1, 0, 1, 2, 1, 2, 1, 2, 0, 0, 1, 1, 0, 1, 1, 2, 1, 1, 2, 1, 2, 1, 2, 2, 1, 2, 2, 1, 2, 1, 0, 1, 0, 2, 2, 1, 2, 2, 2, 0, 1, 2, 1, 2, 2, 2, 2, 2, 1, 2, 0, 1, 1, 2, 2, 2, 1, 0, 1, 0, 1, 1, 2, 2, 2, 1, 0, 1, 2, 1, 1, 1, 1, 0, 2, 2, 1, 1, 0, 2, 2, 1, 0, 0, 0, 1, 0, 2, 1, 1, 1, 1, 0, 2, 2, 0, 1, 0, 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 0, 1, 1, 1, 1, 2, 1, 2, 2, 0, 1, 1, 0, 0, 0, 1, 2, 2, 1, 1, 0, 2, 1, 2, 1, 1, 2, 2, 0, 2, 2, 0, 0, 1, 0, 0, 1, 1, 2, 0, 1, 1, 0, 2, 2, 2, 2, 2, 0, 0, 0, 2, 2, 0, 1, 1, 0, 0, 1, 2, 1, 1, 0, 1, 2, 2, 2, 0, 1, 2, 1, 1, 1, 1, 2, 2, 1, 1, 1, 2, 0, 1, 0, 1, 1, 0, 1, 2, 1, 2, 2, 0, 0, 0, 1, 2, 2, 2, 0, 2, 2, 1, 2, 2, 0, 2, 2, 1, 2, 0, 0, 2, 1, 2, 1, 2, 1, 2, 2, 0, 2, 2, 0, 2, 2, 1, 1, 0, 1, 0, 0, 2, 1, 2, 2, 2, 2, 1, 1, 0, 1, 1, 2, 2, 2, 2, 0, 1, 0, 1, 1, 0, 2, 2, 2, 2, 1, 2, 0, 0, 1, 1, 2, 2, 2, 1, 2, 2, 0, 1, 1, 2, 0, 1, 2, 0, 1, 0, 2, 1, 2, 0, 2, 2, 0, 2, 2, 2, 1, 2, 2, 0, 2, 1, 2, 1, 2, 2, 1, 1, 1, 2, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 2, 1, 1, 0, 0, 1, 2, 0, 1, 1, 0, 1, 0, 2, 2, 1, 2, 2, 1, 1, 2, 1, 0, 1, 1, 2, 0, 0, 1, 1, 0, 2, 1, 1, 0, 0, 2, 2, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 2, 1, 0, 0, 0, 2, 1, 2, 0, 2, 0, 2, 0, 1, 1, 1, 1, 2, 2, 2, 0, 1, 1, 0, 1, 1, 2, 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 2, 2, 1, 1, 0, 1, 2, 0, 2, 1, 1, 1, 0, 0, 2, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 1, 2, 2, 2, 1, 2, 2, 2, 0, 1, 0, 2, 0, 1, 2, 0, 1, 1, 2, 0, 2, 1, 0, 1, 2, 2, 2, 2, 0, 2, 2, 0, 2, 2, 1, 0, 2, 0, 2, 1, 2, 1, 2, 2, 0, 1, 2, 2, 0, 1, 0, 2, 0, 1, 0, 2, 1, 0, 1, 2, 0, 2, 2, 0, 1, 1, 0, 2, 2, 2, 2, 0, 1, 2, 0, 2, 2, 2, 0, 1, 2, 2, 2, 2, 2, 0, 1, 0, 2, 0, 1, 1, 1, 1, 0, 1, 2, 1, 1, 2, 2, 1, 2, 0, 0, 0, 1, 0, 0, 0, 0, 1, 2, 1, 2, 2, 0, 1, 2, 2, 0, 0, 1, 1, 1, 2, 2, 0, 0, 0, 2, 1, 2, 0, 0, 0, 1, 1, 2, 2, 2, 1, 1, 2, 2, 1, 1, 1, 0, 2, 2, 2, 0, 1, 1, 0, 1, 2, 0, 0, 1, 0, 0, 0, 0, 1, 2, 1, 2, 1, 2, 1, 1, 2, 0, 2, 1, 1, 2, 2, 2, 0, 2, 2, 0, 0, 2, 2, 0, 0, 2, 2, 1, 0, 1, 1, 1, 1, 1, 2, 1, 1, 2, 2, 0, 0, 2, 2, 2, 2, 0, 1, 1, 2, 1, 1, 0, 1, 0, 1, 2, 1, 0, 2, 2, 1, 0, 2, 2, 2, 1, 0, 0, 0, 1, 1, 0, 0, 2, 2, 2, 2, 1, 2, 0, 0, 0, 1, 1, 0, 2, 0, 1, 0, 1, 0, 1, 0, 1, 1, 2, 1, 1, 0, 1, 2, 0, 1, 1, 2, 2, 1, 0, 1, 0, 1, 2, 2, 2, 0, 1, 2, 2, 1, 0, 2, 2, 1, 2, 1, 1, 1, 1, 1, 1, 0, 1, 2, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 1, 1, 2, 0, 0, 2, 0, 2, 2, 0, 1, 0, 1, 2, 1, 2, 2, 1, 1, 2, 1, 1, 2, 0, 1, 1, 2, 0, 1, 1, 0, 0, 2, 0, 0, 1, 0, 2, 0, 0, 2, 1, 1, 1, 1, 0, 2, 2, 1, 1, 2, 0, 1, 1, 1, 2, 2, 1, 1, 1, 0, 2, 2, 2, 1, 2, 1, 2, 0, 0, 0, 0, 1, 0, 2, 1, 0, 0, 0, 1, 1, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 0, 0, 2, 0, 1, 1, 1, 1, 1, 0, 0, 0, 2, 1, 0, 1, 1, 1, 1, 2, 1, 2, 0, 2, 2, 0, 2, 2, 0, 0, 1, 2, 1, 1, 0, 0, 1, 2, 1, 2, 0, 0, 2, 2, 1, 0, 2, 0, 0, 0, 2, 2, 2, 2, 1, 1, 1, 1, 2, 2, 1, 1, 2, 0, 1, 0, 0, 2, 1, 1, 1, 2, 2, 2, 1, 1, 0, 0, 2, 2, 1, 0, 1, 2, 2, 0, 2, 0, 0, 2, 0, 1, 1, 0, 2, 2, 0, 1, 1, 2, 2, 2, 1, 1, 1, 2, 2, 2, 0, 1, 1, 2, 1, 0, 0, 2, 1, 2, 1, 0, 1, 2, 1, 1, 2, 0, 2, 2, 2, 1, 1, 2, 2, 0, 1, 2, 1, 1, 1, 0, 2, 0, 0, 1, 2, 2, 1, 2, 1, 2, 1, 2, 2, 1, 0, 2, 0, 2, 1, 1, 0, 1, 2, 2, 0, 2, 2, 2, 1, 1, 0, 2, 1, 0, 0, 2, 0, 1, 1, 2, 1, 0, 1, 2, 2, 1, 1, 1, 1, 0, 1, 2, 1, 0, 2, 1, 0, 2, 1, 0, 0, 1, 2, 0, 2, 1, 0, 2, 1, 2, 0, 1, 1, 2, 2, 0, 1, 1, 0, 2, 0, 1, 0, 2, 2, 2, 2, 1, 0, 1, 2, 1, 0, 1, 1, 0, 2, 1, 0, 1, 1, 2, 2, 1, 0, 2, 1, 0, 1, 0, 1, 0, 2, 1, 1, 0, 1, 1, 2, 0, 1, 0, 1, 1, 0, 1, 1, 2, 1, 2, 0, 1, 2, 1, 1, 0, 1, 2, 2, 2, 1, 1, 0, 2, 2, 1, 2, 1, 1, 0, 2, 1, 2, 1, 2, 1, 0, 0, 1, 1, 2, 1, 1, 1, 2, 2, 1, 0, 2, 0, 1, 1, 2, 1, 2, 1, 2, 2, 2, 1, 2, 2, 0, 0, 2, 1, 0, 0, 1, 1, 2, 1, 1, 2, 2, 0, 1, 0, 0, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 1, 2, 1, 2, 2, 1, 1, 1, 2, 1, 0, 2, 1, 0, 1, 1, 1, 2, 0, 2, 0, 0, 0, 0, 1, 2, 1, 2, 0, 1, 0, 1, 0, 2, 2, 1, 2, 1, 2, 0, 1, 0, 0, 0, 2, 1, 2, 0, 2, 2, 2, 1, 2, 2, 0, 2, 2, 1, 2, 1, 1, 2, 2, 2, 1, 2, 0, 2, 1, 0, 0, 0, 0, 2, 2, 0, 0, 0, 2, 0, 1, 2, 1, 1, 1, 1, 2, 1, 0, 0, 1, 1, 1, 0, 2, 1, 1, 1, 0, 2, 1, 1, 2, 1, 2, 1, 2, 1, 2, 2, 0, 1, 1, 1, 2, 2, 2, 2, 0, 1, 0, 0, 1, 2, 0, 1, 1, 1, 2, 1, 1, 2, 1, 1, 2, 0, 2, 0, 0, 1, 1, 1, 2, 0, 0, 0, 2, 2, 2, 0, 0, 0, 0, 2, 0, 1, 2, 1, 2, 0, 2, 1, 2, 0, 0, 2, 0, 0, 0, 1, 2, 2, 1, 2, 2, 0, 0, 0, 0, 0, 2, 1, 0, 0, 1, 0, 1, 1, 1, 0, 2, 2, 2, 1, 1, 1, 2, 2, 2, 1, 2, 2, 1, 0, 2, 2, 1, 1, 1, 0, 1, 1, 2, 1, 0, 1, 0, 1, 0, 1, 2, 1, 0, 0, 1, 1, 2, 2, 1, 1, 0, 0, 1, 2, 1, 1, 1, 1, 2, 1, 0, 0, 0, 2, 0, 2, 1, 2, 2, 2, 0, 2, 2, 2, 1, 2, 0, 2, 0, 0, 2, 2, 1, 0, 2, 2, 2, 2, 0, 2, 1, 2, 2, 0, 2, 2, 1, 2, 1, 1, 0, 1, 0, 0, 1, 2, 0, 1, 2, 2, 0, 1, 0, 1, 0, 2, 0, 2, 2, 2, 0, 2, 1, 2, 1, 1, 1, 1, 2, 1, 2, 1, 0, 2, 2, 1, 0, 1, 1, 2, 1, 2, 0, 1, 0, 1, 1, 0, 2, 0, 1, 1, 1, 2, 1, 0, 0, 2, 1, 2, 2, 0, 2, 0, 1, 1, 0, 1, 1, 0, 2, 1, 0, 0, 2, 0, 1, 1, 2, 1, 1, 0, 2, 1, 2, 2, 0, 0, 1, 2, 1, 1, 1, 2, 2, 0, 1, 1, 2, 2, 2, 2, 0, 2, 0, 0, 0, 0, 1, 1, 2, 0, 1, 0, 2, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 2, 1, 2, 1, 1, 1, 2, 1, 1, 0, 2, 0, 0, 1, 1, 1, 1, 2, 1, 1, 1, 0, 2, 0, 1, 1, 1, 2, 1, 2, 1, 2, 0, 2, 2, 1, 1, 2, 1, 2, 2, 0, 1, 2, 0, 1, 1, 2, 0, 2, 0, 1, 1, 0, 0, 2, 1, 1, 1, 2, 2, 0, 0, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 0, 0, 2, 1, 0, 0, 1, 2, 1, 2, 1, 1, 0, 2, 0, 0, 2, 2, 2, 2, 1, 2, 0, 2, 1, 1, 1, 2, 2, 1, 0, 1, 2, 1, 2, 0, 1, 0, 0, 2, 2, 0, 0, 1, 0, 0, 1, 1, 2, 2, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 2, 2, 0, 2, 0, 2, 1, 2, 2, 1, 1, 2, 1, 0, 1, 0, 2, 2, 0, 2, 2, 0, 0, 2, 2, 1, 2, 0, 1, 1, 2, 1, 2, 1, 1, 2, 2, 2, 2, 1, 1, 0, 2, 2, 1, 0, 2, 1, 2, 1, 0, 1, 1, 2, 2, 0, 2, 0, 1, 2, 1, 1, 0, 1, 0, 1, 2, 1, 2, 1, 1, 1, 0, 1, 1, 2, 2, 1, 1, 0, 2, 1, 1, 1, 1, 2, 0, 1, 0, 0, 1, 0, 2, 1, 2, 1, 2, 0, 1, 2, 2, 2, 1, 0, 0, 2, 1, 0, 2, 0, 0, 1, 2, 1, 2, 0, 2, 2, 2, 0, 1, 0, 1, 2, 1, 1, 0, 2, 2, 1, 2, 2, 1, 1, 1, 0, 1, 0, 2, 0, 2, 2, 0, 0, 0, 2, 2, 2, 0, 1, 0, 1, 1, 0, 0, 0, 2, 0, 0, 0, 2, 2, 0, 2, 2, 1, 0, 2, 0, 2, 0, 1, 0, 2, 1, 0, 1, 2, 2, 2, 1, 1, 1, 1, 2, 1, 2, 1, 1, 0, 0, 2, 2, 0, 1, 1, 2, 0, 1, 1, 1, 1, 1, 1, 2, 0, 0, 1, 0, 0, 1, 0, 0, 1, 2, 1, 0, 1, 1, 2, 1, 1, 1, 1, 0, 2, 1, 0, 1, 1, 2, 1, 0, 2, 2, 1, 1, 2, 2, 1, 2, 0, 2, 1, 2, 2, 1, 1, 1, 0, 1, 1, 2, 1, 0, 2, 2, 2, 1, 2, 2, 1, 1, 1, 2, 1, 2, 1, 2, 2, 1, 2, 1, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 0, 2, 1, 2, 0, 0, 2, 2, 0, 2, 2, 1, 0, 1, 0, 0, 0, 1, 1, 2, 1, 0, 2, 0, 2, 2, 2, 1, 1, 1, 1, 1, 1, 0, 1, 2, 2, 1, 2, 2, 2, 2, 1, 1, 2, 1, 1, 0, 2, 2, 2, 1, 1, 0, 0, 1, 1, 2, 1, 2, 2, 0, 0, 1, 0, 1, 0, 2, 0, 1, 1, 0, 0, 1, 0, 1, 2, 2, 1, 0, 2, 0, 2, 0, 2, 1, 1, 1, 1, 1, 2, 2, 2, 1, 0, 1, 2, 1, 1, 2, 0, 2, 2, 2, 0, 2, 1, 1, 2, 1, 1, 0, 0, 2, 0, 0, 2, 1, 2, 2, 2, 0, 1, 1, 2, 1, 0, 1, 0, 1, 1, 2, 2, 2, 1, 0, 1, 1, 0, 2, 2, 2, 1, 0, 2, 2, 0, 0, 1, 1, 0, 0, 2, 0, 1, 0, 0, 2, 2, 1, 1, 1, 0, 2, 2, 1, 2, 2, 1, 0, 1, 2, 1, 1, 0, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 0, 0, 2, 2, 1, 0, 2, 2, 2, 2, 1, 0, 0, 2, 1, 1, 1, 1, 2, 2, 1, 2, 2, 1, 0, 2, 0, 2, 1, 1, 2, 1, 2, 1, 2, 2, 1, 2, 2, 0, 1, 0, 2, 2, 2, 1, 1, 1, 0, 0, 1, 1, 2, 1, 2, 2, 2, 1, 2, 1, 2, 0, 0, 1, 0, 2, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 2, 1, 2, 2, 0, 2, 1, 2, 1, 0, 1, 0, 1, 0, 2, 1, 2, 0, 1, 1, 1, 1, 1, 2, 0, 1, 1, 2, 2, 1, 2, 1, 0, 1, 0, 1, 1, 2, 1, 0, 2, 2, 1, 1, 2, 0, 0, 2, 2, 0, 2, 2, 1, 1, 1, 1, 2, 0, 2, 0, 0, 1, 1, 2, 2, 1, 0, 0, 0, 0, 2, 0, 2, 2, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 2, 1, 1, 2, 2, 2, 1, 0, 1, 2, 2, 0, 1, 0, 1, 2, 0, 0, 1, 1, 0, 0, 2, 1, 1, 2, 1, 1, 2, 2, 2, 0, 1, 1, 1, 2, 2, 1, 1, 1, 2, 0, 2, 2, 1, 2, 0, 1, 0, 2, 2, 1, 0, 1, 0, 0, 1, 2, 2, 1, 2, 1, 1, 1, 2, 2, 2, 1, 1, 2, 2, 2, 1, 2, 0, 0, 1, 2, 0, 1, 1, 2, 0, 2, 1, 2, 0, 1, 1, 2, 1, 1, 0, 2, 1, 0, 2, 1, 1, 0, 1, 2, 2, 0, 0, 1, 1, 1, 2, 2, 2, 0, 2, 2, 1, 1, 2, 2, 0, 0, 0, 0, 2, 2, 2, 0, 0, 1, 1, 0, 1, 2, 2, 2, 0, 0, 2, 1, 2, 2, 2, 1, 2, 2, 1, 1, 0, 2, 1, 1, 0, 1, 1, 2, 2, 1, 0, 2, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 2, 2, 2, 2, 2, 0, 2, 0, 1, 1, 2, 1, 1, 2, 1, 1, 2, 1, 1, 2, 1, 0, 1, 1, 2, 2, 0, 2, 1, 2, 2, 1, 0, 2, 1, 1, 2, 0, 0, 1, 0, 1, 2, 0, 0, 0, 1, 2, 2, 0, 0, 2, 1, 1, 1, 0, 1, 1, 2, 0, 2, 0, 0, 1, 1, 2, 2, 0, 2, 1, 1, 2, 0, 2, 0, 1, 2, 1, 1, 1, 2, 1, 2, 1, 1, 2, 1, 2, 1, 2, 2, 2, 2, 0, 0, 1, 2, 1, 2, 1, 1, 2, 2, 1, 2, 2, 1, 1, 1, 2, 2, 1, 2, 1, 1, 2, 1, 2, 0, 1, 2, 0, 0, 1, 0, 2, 2, 2, 2, 1, 0, 1, 0, 0, 1, 2, 2, 0, 0, 1, 2, 2, 1, 1, 1, 1, 2, 1, 1, 0, 2, 1, 0, 1, 2, 1, 2, 1, 1, 0, 0, 2, 1, 2, 1, 1, 1, 2, 0, 1, 1, 1, 0, 2, 1, 0, 1, 1, 0, 1, 0, 2, 1, 2, 0, 0, 2, 0, 2, 2, 1, 2, 1, 1, 1, 1, 0, 2, 1, 1, 2, 1, 0, 1, 0, 1, 1, 2, 0, 2, 0, 2, 0, 0, 0, 2, 0, 0, 0, 0, 0, 1, 2, 1, 2, 2, 2, 1, 1, 1, 1, 2, 2, 0, 2, 1, 2, 0, 1, 0, 1, 1, 1, 2, 1, 2, 0, 2, 0, 1, 2, 2, 0, 0, 0, 2, 2, 2, 2, 2, 0, 2, 0, 2, 2, 1, 1, 1, 2, 0, 0, 0, 2, 2, 2, 0, 2, 2, 0, 1, 0, 0, 2, 1, 2, 2, 1, 1, 0, 1, 2, 0, 1, 1, 0, 1, 1, 2, 2, 1, 2, 0, 1, 1, 1, 2, 0, 1, 0, 1, 0, 1, 2, 1, 2, 2, 2, 0, 1, 1, 1, 1, 0, 1, 1, 2, 1, 1, 0, 1, 0, 0, 2, 1, 2, 1, 1, 0, 2, 2, 0, 1, 1, 1, 0, 1, 1, 1, 0, 2, 0, 1, 2, 0, 0, 0, 0, 0, 2, 2, 2, 0, 1, 0, 1, 1, 0, 2, 1, 1, 0, 2, 2, 1, 2, 1, 2, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 2, 2, 2, 2, 2, 1, 1, 2, 1, 2, 2, 0, 1, 1, 1, 1, 0, 2, 2, 2, 2, 1, 1, 0, 0, 1, 1, 0, 2, 1, 0, 0, 0, 2, 1, 2, 2, 1, 2, 1, 0, 0, 2, 2, 0, 2, 2, 1, 1, 1, 0, 2, 2, 2, 1, 0, 0, 1, 1, 1, 2, 1, 1, 2, 2, 1, 2, 2, 1, 0, 1, 0, 1, 1, 1, 2, 2, 2, 0, 0, 1, 0, 1, 2, 2, 2, 1, 1, 0, 2, 0, 2, 2, 2, 1, 2, 2, 0, 2, 0, 2, 2, 0, 1, 0, 0, 1, 1, 2, 2, 2, 2, 2, 0, 0, 2, 2, 1, 0, 2, 2, 1, 2, 0, 2, 1, 2, 1, 0, 1, 2, 1, 1, 0, 2, 2, 2, 1, 0, 1, 0, 2, 1, 1, 0, 2, 1, 1, 1, 1, 1, 0, 0, 2, 2, 2, 2, 0, 0, 1, 0, 2, 2, 2, 2, 2, 1, 0, 1, 2, 1, 2, 2, 1, 2, 2, 0, 2, 2, 1, 2, 2, 1, 2, 2, 0, 1, 1, 1, 0, 0, 2, 0, 2, 2, 0, 1, 2, 0, 1, 1, 2, 0, 0, 0, 0, 1, 0, 1, 0, 2, 1, 0, 1, 0, 2, 2, 0, 0, 0, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 0, 2, 2, 0, 1, 1, 0, 2, 0, 0, 0, 2, 1, 0, 1, 0, 1, 0, 2, 1, 2, 1, 2, 2, 1, 0, 0, 2, 0, 1, 2, 0, 2, 2, 0, 2, 0, 1, 1, 1, 2, 1, 1, 2, 1, 0, 1, 2, 0, 0, 1, 2, 2, 1, 0, 0, 2, 1, 1, 2, 0, 2, 1, 1, 2, 1, 2, 1, 2, 2, 2, 0, 2, 2, 0, 2, 2, 2, 2, 1, 1, 0, 1, 2, 1, 1, 0, 1, 1, 1, 0, 0, 2, 1, 2, 2, 0, 1, 0, 2, 2, 0, 2, 2, 0, 2, 0, 1, 2, 1, 1, 0, 0, 1, 2, 1, 2, 1, 1, 2, 0, 0, 1, 1, 2, 1, 0, 0, 2, 1, 1, 0, 2, 2, 2, 2, 2, 1, 1, 2, 2, 0, 0, 0, 1, 1, 2, 2, 1, 1, 2, 1, 1, 2, 0, 0, 1, 0, 0, 2, 1, 0, 0, 1, 2, 2, 2, 1, 2, 1, 0, 1, 1, 2, 2, 0, 0, 0, 2, 0, 2, 0, 2, 1, 0, 1, 2, 2, 0, 2, 2, 0, 1, 1, 0, 0, 2, 2, 1, 2, 1, 2, 1, 0, 2, 1, 2, 0, 0, 2, 1, 1, 1, 0, 1, 0, 0, 2, 1, 1, 1, 1, 2, 0, 2, 1, 2, 2, 0, 2, 2, 2, 2, 2, 1, 1, 1, 0, 1, 0, 1, 1, 2, 1, 1, 0, 1, 2, 0, 2, 0, 0, 1, 2, 0, 1, 2, 2, 1, 1, 0, 2, 0, 1, 2, 1, 2, 2, 1, 2, 0, 0, 2, 1, 0, 2, 2, 2, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 1, 2, 2, 0, 2, 0, 2, 0, 2, 1, 2, 0, 0, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 0, 2, 2, 0, 2, 2, 1, 1, 1, 2, 1, 1, 0, 1, 2, 0, 1, 2, 0, 0, 2, 1, 2, 2, 0, 1, 1, 1, 1, 2, 1, 2, 1, 0, 2, 1, 0, 0, 2, 1, 2, 0, 1, 1, 0, 0, 0, 1, 2, 0, 0, 2, 2, 2, 0, 2, 1, 2, 1, 1, 2, 2, 1, 2, 2, 2, 0, 2, 1, 1, 0, 1, 0, 0, 1, 1, 2, 2, 0, 2, 2, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 2, 2, 2, 1, 2, 2, 1, 1, 0, 1, 2, 2, 0, 1, 2, 0, 2, 0, 1, 1, 2, 1, 1, 0, 0, 1, 1, 2, 2, 1, 0, 2, 2, 1, 1, 1, 2, 2, 0, 2, 1, 0, 2, 2, 0, 0, 1, 1, 0, 0, 0, 1, 2, 2, 2, 2, 0, 2, 1, 0, 2, 0, 2, 1, 1, 1, 0, 2, 1, 0, 1, 1, 1, 2, 1, 2, 2, 0, 1, 2, 1, 1, 0, 0, 2, 0, 2, 2, 1, 2, 0, 1, 0, 1, 1, 2, 0, 2, 1, 0, 0, 2, 1, 1, 2, 2, 1, 0, 1, 0, 2, 2, 1, 0, 2, 2, 1, 0, 1, 1, 1, 1, 2, 0, 2, 2, 2, 2, 0, 0, 2, 0, 1, 2, 0, 1, 2, 2, 0, 2, 0, 2, 2, 2, 0, 1, 1, 2, 2, 2, 0, 0, 2, 1, 1, 2, 1, 0, 2, 0, 2, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 2, 2, 2, 1, 2, 0, 2, 1, 1, 2, 2, 2, 1, 1, 2, 1, 1, 0, 2, 2, 1, 2, 1, 2, 2, 0, 2, 1, 1, 1, 1, 1, 2, 2, 1, 1, 0, 2, 0, 2, 1, 2, 1, 0, 2, 1, 0, 1, 2, 1, 2, 0, 2, 2, 1, 1, 1, 0, 0, 0, 1, 2, 1, 0, 1, 1, 0, 1, 1, 0, 0, 2, 1, 1, 1, 2, 1, 0, 0, 2, 2, 2, 0, 0, 0, 2, 1, 1, 2, 2, 1, 0, 2, 1, 2, 1, 1, 1, 1, 1, 1, 2, 2, 1, 2, 1, 1, 2, 1, 2, 0, 2, 1, 0, 0, 2, 1, 2, 1, 1, 0, 2, 2, 2, 2, 0, 1, 2, 1, 0, 2, 2, 0, 2, 2, 2, 0, 0, 1, 2, 1, 2, 0, 1, 1, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 0, 2, 1, 2, 1, 1, 0, 0, 0, 2, 1, 0, 1, 2, 2, 1, 2, 0, 1, 2, 1, 0, 2, 2, 1, 1, 1, 1, 2, 1, 2, 0, 2, 2, 1, 1, 0, 0, 0, 2, 1, 1, 2, 0, 2, 2, 2, 1, 1, 1, 2, 1, 1, 2, 2, 2, 2, 1, 0, 2, 1, 2, 0, 1, 1, 0, 0, 1, 0, 0, 1, 2, 0, 1, 1, 2, 0, 2, 2, 1, 1, 1, 2, 0, 2, 1, 2, 1, 2, 0, 1, 0, 2, 1, 1, 2, 1, 0, 2, 2, 2, 0, 1, 2, 0, 0, 0, 1, 0, 1, 2, 0, 2, 2, 1, 1, 2, 0, 1, 1, 2, 0, 1, 0, 0, 1, 2, 2, 0, 1, 2, 1, 1, 1, 0, 0, 1, 1, 2, 2, 0, 0, 0, 1, 1, 0, 1, 1, 2, 2, 1, 2, 1, 0, 1, 1, 2, 0, 1, 2, 2, 0, 2, 1, 0, 2, 1, 1, 2, 2, 0, 0, 2, 2, 1, 2, 0, 2, 0, 1, 1, 0, 1, 1, 2, 1, 1, 1, 2, 2, 0, 1, 2, 0, 0, 1, 1, 1, 1, 1, 2, 0, 1, 1, 0, 1, 1, 1, 1, 0, 2, 1, 2, 2, 2, 1, 0, 0, 2, 1, 0, 0, 0, 0, 1, 1, 2, 2, 1, 1, 2, 2, 0, 2, 2, 2, 2, 1, 0, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 1, 2, 0, 0, 0, 1, 1, 1, 2, 2, 1, 2, 1, 1, 1, 2, 1, 1, 0, 2, 0, 2, 1, 2, 1, 1, 0, 1, 0, 2, 1, 0, 1, 2, 2, 2, 1, 2, 0, 1, 0, 2, 1, 1, 0, 2, 0, 0, 1, 2, 1, 0, 0, 1, 2, 1, 2, 0, 2, 0, 1, 2, 1, 0, 1, 1, 2, 2, 0, 0, 0, 0, 0, 2, 2, 0, 2, 1, 1, 1, 1, 2, 1, 0, 1, 1, 1, 0, 0, 0, 2, 0, 2, 2, 0, 2, 1, 0, 1, 0, 2, 2, 0, 1, 0, 2, 2, 2, 1, 0, 1, 0, 1, 2, 0, 1, 1, 0, 2, 1, 1, 1, 0, 2, 2, 1, 0, 1, 1, 1, 0, 1, 1, 2, 2, 1, 0, 2, 0, 1, 2, 2, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 2, 2, 2, 2, 0, 2, 0, 1, 1, 1, 2, 2, 2, 2, 2, 1, 0, 2, 2, 1, 1, 0, 0, 1, 2, 0, 1, 1, 2, 1, 2, 1, 2, 1, 1, 1, 2, 2, 2, 2, 1, 2, 1, 1, 2, 1, 2, 0, 1, 1, 1, 0, 1, 2, 0, 0, 0, 2, 2, 2, 1, 1, 0, 2, 0, 1, 1, 2, 2, 2, 2, 2, 2, 2, 0, 2, 0, 2, 1, 2, 1, 0, 2, 2, 2, 2, 1, 1, 2, 1, 0, 1, 2, 0, 0, 0, 1, 0, 1, 2, 1, 1, 0, 1, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 1, 2, 2, 2, 0, 1, 2, 0, 1, 1, 2, 2, 2, 1, 2, 1, 2, 1, 1, 1, 2, 0, 2, 1, 0, 2, 2, 1, 0, 1, 0, 1, 2, 1, 1, 0, 1, 2, 1, 1, 2, 2, 2, 0, 1, 2, 2, 0, 2, 1, 2, 0, 2, 1, 1, 1, 1, 1, 2, 0, 0, 2, 0, 2, 2, 2, 0, 2, 0, 2, 2, 1, 1, 1, 2, 1, 1, 2, 1, 2, 2, 2, 2, 1, 2, 1, 1, 2, 1, 2, 2, 2, 0, 1, 1, 0, 0, 2, 1, 1, 2, 0, 2, 1, 0, 2, 1, 1, 0, 2, 1, 0, 2, 2, 0, 0, 2, 1, 2, 1, 0, 0, 2, 0, 0, 0, 2, 0, 1, 0, 2, 1, 2, 1, 1, 2, 0, 1, 0, 2, 0, 0, 1, 0, 1, 2, 1, 1, 1, 2, 2, 1, 0, 1, 0, 0, 0, 0, 2, 1, 2, 2, 2, 0, 2, 1, 1, 0, 2, 1, 0, 1, 1, 1, 0, 2, 1, 2, 2, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 2, 2, 0, 1, 1, 1, 0, 2, 1, 2, 1, 1, 2, 1, 2, 2, 0, 0, 1, 1, 2, 2, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 2, 2, 1, 2, 0, 1, 1, 2, 1, 1, 0, 2, 0, 1, 2, 2, 0, 2, 1, 0, 1, 2, 1, 2, 1, 2, 2, 2, 0, 0, 1, 0, 0, 1, 0, 2, 2, 2, 0, 2, 1, 2, 1, 1, 2, 2, 1, 1, 2, 2, 2, 2, 1, 1, 1, 0, 1, 2, 2, 1, 2, 1, 2, 1, 1, 1, 0, 2, 1, 1, 2, 2, 2, 2, 1, 1, 2, 0, 1, 1, 2, 2, 1, 0, 1, 1, 2, 0, 2, 0, 2, 1, 1, 2, 1, 2, 0, 0, 2, 1, 2, 1, 2, 0, 0, 0, 1, 0, 1, 2, 0, 2, 2, 1, 0, 1, 2, 2, 0, 1, 1, 2, 2, 1, 1, 2, 2, 1, 1, 0, 2, 1, 1, 2, 1, 0, 2, 1, 1, 2, 2, 1, 1, 0, 2, 2, 1, 1, 2, 2, 1, 1, 2, 0, 2, 1, 2, 2, 1, 2, 0, 1, 1, 1, 0, 1, 2, 0, 1, 1, 1, 1, 0, 1, 0, 2, 1, 2, 2, 2, 1, 1, 2, 0, 1, 2, 2, 0, 0, 2, 1, 0, 0, 2, 0, 1, 1, 0, 1, 2, 0, 0, 0, 2, 1, 2, 2, 2, 0, 1, 2, 2, 2, 0, 1, 1, 0, 2, 1, 2, 1, 0, 2, 0, 2, 2, 1, 0, 1, 2, 2, 1, 0, 2, 2, 0, 0, 2, 2, 0, 0, 2, 1, 2, 0, 1, 1, 1, 2, 0, 0, 1, 0, 0, 0, 2, 2, 2, 1, 2, 0, 0, 2, 1, 0, 0, 2, 2, 1, 1, 2, 0, 2, 2, 2, 2, 2, 1, 0, 2, 0, 1, 1, 1, 0, 1, 2, 2, 1, 1, 2, 1, 0, 0, 2, 1, 1, 2, 1, 1, 0, 1, 1, 0, 0, 2, 0, 1, 1, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 0, 0, 1, 1, 1, 2, 2, 1, 1, 0, 1, 1, 2, 2, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 2, 2, 1, 0, 1, 2, 0, 1, 1, 0, 2, 1, 0, 0, 2, 0, 2, 1, 1, 0, 2, 2, 2, 1, 2, 0, 2, 0, 2, 0, 2, 2, 1, 0, 2, 1, 2, 1, 0, 1, 0, 2, 1, 2, 2, 0, 1, 0, 0, 1, 0, 2, 2, 0, 0, 0, 1, 1, 1, 2, 2, 0, 0, 2, 0, 0, 2, 1, 1, 0, 2, 1, 1, 1, 1, 1, 0, 2, 1, 1, 0, 0, 0, 1, 2, 0, 2, 2, 2, 0, 2, 2, 2, 1, 0, 2, 0, 2, 1, 0, 2, 1, 2, 2, 0, 1, 2, 2, 1, 1, 2, 2, 0, 2, 0, 2, 1, 0, 1, 0, 0, 1, 1, 0, 2, 0, 1, 2, 2, 1, 1, 0, 1, 2, 0, 2, 2, 2, 1, 1, 2, 1, 1, 1, 2, 1, 2, 1, 1, 2, 1, 2, 0, 2, 0, 0, 2, 2, 1, 1, 0, 1, 1, 2, 1, 0, 0, 1, 2, 2, 1, 0, 2, 0, 2, 1, 1, 2, 0, 1, 2, 0, 1, 0, 1, 2, 2, 1, 1, 2, 0, 1, 2, 2, 0, 2, 2, 0, 0, 2, 1, 1, 0, 0, 2, 0, 0, 1, 2, 2, 2, 1, 1, 2, 2, 1, 0, 0, 1, 0, 0, 0, 1, 2, 2, 0, 2, 1, 0, 1, 2, 0, 2, 0, 1, 1, 1, 2, 1, 2, 1, 2, 1, 1, 1, 0, 0, 2, 0, 2, 2, 0, 1, 0, 1, 1, 1, 2, 1, 2, 2, 0, 1, 0, 1, 0, 2, 0, 2, 1, 1, 0, 0, 2, 0, 2, 2, 1, 1, 2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 0, 1, 1, 1, 1, 2, 1, 2, 2, 2, 1, 0, 2, 1, 1, 0, 1, 2, 2, 2, 0, 2, 0, 2, 1, 0, 0, 2, 0, 0, 2, 0, 2, 0, 2, 1, 1, 0, 2, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 2, 0, 1, 2, 0, 0, 2, 0, 2, 1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 0, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 0, 1, 1, 0, 0, 1, 1, 2, 2, 0, 1, 1, 0, 1, 0, 0, 2, 0, 2, 1, 0, 2, 1, 2, 2, 1, 1, 0, 0, 2, 1, 2, 2, 2, 0, 1, 0, 0, 2, 1, 1, 1, 2, 1, 1, 2, 2, 0, 0, 2, 1, 0, 0, 1, 2, 1, 2, 1, 2, 1, 1, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 0, 2, 0, 2, 1, 0, 0, 0, 0, 1, 2, 0, 0, 2, 1, 2, 2, 1, 2, 1, 1, 0, 1, 2, 1, 1, 2, 1, 2, 2, 2, 1, 0, 1, 2, 2, 2, 0, 2, 0, 1, 0, 2, 0, 0, 1, 1, 2, 2, 0, 0, 0, 1, 1, 2, 2, 0, 0, 1, 1, 1, 1, 2, 0, 1, 2, 0, 2, 2, 2, 2, 1, 0, 1, 1, 2, 1, 0, 0, 2, 0, 2, 2, 2, 0, 1, 1, 0, 0, 0, 0, 2, 1, 0, 0, 1, 2, 2, 1, 0, 0, 0, 0, 1, 2, 1, 0, 1, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, 1, 2, 0, 0, 1, 0, 1, 0, 2, 2, 1, 0, 0, 2, 0, 0, 1, 2, 1, 2, 1, 0, 2, 2, 2, 1, 0, 2, 1, 1, 0, 2, 0, 1, 1, 2, 0, 0, 0, 0, 1, 2, 0, 1, 1, 2, 1, 2, 1, 2, 2, 1, 1, 2, 0, 0, 2, 2, 2, 0, 2, 0, 2, 1, 1, 1, 2, 1, 0, 1, 2, 2, 2, 2, 2, 2, 1, 0, 2, 0, 2, 2, 1, 1, 1, 0, 1, 1, 2, 2, 1, 2, 2, 2, 1, 2, 0, 1, 1, 2, 1, 1, 2, 2, 2, 2, 1, 0, 2, 2, 1, 1, 2, 1, 2, 1, 2, 2, 1, 1, 2, 0, 1, 1, 2, 2, 1, 1, 2, 0, 1, 0, 0, 2, 1, 1, 1, 2, 0, 2, 1, 2, 2, 2, 2, 1, 2, 2, 2, 0, 0, 0, 2, 1, 0, 0, 2, 1, 1, 2, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 1, 1, 0, 2, 0, 0, 0, 2, 1, 2, 1, 0, 0, 0, 2, 0, 1, 1, 2, 1, 2, 2, 1, 2, 0, 0, 2, 0, 2, 2, 2, 1, 0, 1, 1, 0, 1, 1, 1, 2, 2, 1, 0, 1, 2, 2, 0, 0, 1, 2, 1, 0, 0, 1, 2, 1, 2, 0, 0, 2, 1, 0, 1, 2, 1, 2, 0, 2, 1, 2, 2, 1, 1, 2, 2, 1, 0, 0, 0, 2, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 2, 2, 1, 2, 2, 2, 2, 0, 1, 1, 1, 2, 2, 0, 1, 2, 1, 1, 2, 2, 0, 0, 1, 2, 1, 1, 1, 0, 1, 2, 1, 2, 2, 2, 2, 1, 1, 1, 1, 2, 2, 1, 0, 2, 2, 2, 0, 1, 1, 2, 1, 1, 1, 1, 2, 1, 1, 2, 1, 1, 1, 2, 0, 1, 1, 2, 0, 2, 2, 1, 1, 1, 1, 1, 2, 0, 2, 2, 1, 1, 1, 2, 2, 1, 2, 2, 1, 1, 1, 1, 2, 1, 1, 1, 2, 0, 2, 0, 2, 2, 0, 2, 0, 1, 1, 2, 2, 1, 2, 2, 1, 0, 0, 2, 2, 2, 2, 1, 1, 0, 0, 2, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 2, 1, 0, 0, 2, 0, 1, 1, 1, 0, 1, 0, 2, 2, 1, 2, 2, 2, 1, 1, 2, 0, 1, 2, 1, 1, 2, 2, 0, 2, 2, 1, 2, 2, 2, 1, 2, 1, 2, 0, 2, 1, 1, 2, 0, 1, 2, 1, 1, 0, 2, 1, 1, 1, 2, 2, 2, 2, 1, 1, 0, 2, 1, 1, 1, 1, 0, 1, 1, 1, 0, 2, 1, 1, 2, 1, 1, 1, 2, 0, 1, 2, 2, 1, 1, 2, 1, 2, 1, 1, 0, 0, 0, 1, 2, 0, 1, 0, 0, 2, 2, 1, 1, 2, 0, 0, 1, 2, 1, 0, 0, 0, 1, 1, 1, 2, 1, 1, 0, 2, 0, 1, 1, 2, 1, 1, 0, 0, 2, 2, 1, 2, 1, 2, 0, 2, 2, 0, 2, 2, 1, 1, 2, 2, 0, 2, 0, 0, 2, 1, 1, 0, 2, 1, 0, 1, 2, 0, 1, 2, 1, 2, 2, 0, 1, 1, 0, 2, 1, 0, 1, 1, 2, 0, 1, 0, 2, 1, 2, 0, 2, 1, 1, 0, 1, 1, 2, 0, 2, 1, 0, 1, 0, 0, 1, 2, 2, 0, 0, 2, 2, 1, 2, 2, 2, 2, 1, 2, 1, 0, 1, 1, 1, 2, 1, 1, 2, 1, 0, 2, 1, 1, 2, 1, 2, 0, 1, 1, 2, 1, 2, 1, 1, 2, 2, 1, 1, 2, 0, 2, 2, 2, 0, 1, 0, 2, 0, 2, 1, 2, 1, 2, 0, 2, 1, 2, 0, 1, 2, 2, 1, 0, 2, 0, 0, 2, 0, 0, 1, 1, 0, 0, 2, 0, 2, 0, 1, 1, 2, 1, 0, 1, 0, 0, 1, 0, 2, 1, 1, 0, 2, 2, 2, 0, 2, 0, 0, 1, 1, 0, 2, 1, 2, 1, 1, 2, 0, 0, 1, 1, 0, 1, 2, 1, 1, 1, 0, 2, 2, 1, 1, 0, 2, 0, 1, 1, 0, 2, 1, 2, 1, 2, 0, 1, 2, 0, 1, 0, 1, 2, 2, 2, 0, 0, 0, 1, 1, 0, 0, 1, 2, 1, 1, 1, 0, 1, 2, 2, 0, 0, 1, 0, 2, 0, 1, 2, 2, 2, 1, 1, 1, 2, 2, 1, 0, 2, 2, 1, 1, 1, 0, 1, 2, 0, 0, 1, 1, 1, 2, 1, 2, 2, 0, 1, 1, 1, 0, 1, 2, 0, 0, 2, 0, 0, 1, 1, 2, 2, 2, 1, 1, 1, 0, 2, 2, 1, 1, 0, 2, 2, 2, 2, 0, 1, 0, 1, 2, 1, 2, 1, 1, 2, 0, 0, 0, 1, 0, 0, 2, 1, 0, 1, 0, 2, 2, 0, 1, 1, 1, 2, 1, 2, 1, 2, 1, 1, 1, 2, 2, 1, 1, 0, 2, 1, 2, 1, 1, 0, 0, 0, 0, 1, 2, 2, 1, 2, 2, 0, 2, 2, 2, 2, 2, 1, 1, 0, 2, 0, 0, 2, 1, 1, 2, 2, 2, 0, 1, 0, 1, 0, 1, 2, 1, 1, 0, 2, 1, 0, 2, 2, 2, 1, 0, 2, 2, 0, 2, 2, 2, 1, 0, 2, 2, 1, 2, 1, 1, 0, 1, 1, 0, 1, 0, 2, 1, 2, 2, 0, 0, 1, 2, 2, 2, 2, 0, 2, 0, 0, 2, 2, 1, 1, 0, 1, 0, 0, 1, 2, 0, 2, 2, 1, 2, 2, 2, 1, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 1, 1, 0, 2, 1, 1, 1, 0, 0, 2, 1, 2, 1, 1, 1, 2, 2, 0, 1, 1, 2, 0, 1, 1, 2, 1, 1, 0, 1, 1, 0, 0, 1, 2, 2, 2, 0, 2, 1, 2, 2, 0, 1, 2, 0, 1, 2, 0, 0, 2, 0, 0, 1, 2, 1, 0, 1, 0, 1, 1, 2, 2, 1, 1, 1, 2, 1, 2, 1, 0, 2, 0, 2, 0, 1, 0, 0, 2, 2, 1, 2, 0, 2, 1, 1, 2, 0, 1, 0, 1, 1, 1, 0, 1, 1, 2, 0, 1, 1, 0, 1, 2, 1, 0, 1, 1, 2, 0, 1, 1, 2, 2, 2, 1, 1, 1, 1, 2, 1, 2, 1, 1, 0, 1, 1, 2, 2, 2, 1, 2, 0, 2, 2, 0, 2, 2, 1, 2, 1, 2, 0, 1, 1, 0, 1, 2, 2, 0, 0, 2, 1, 0, 0, 0, 1, 1, 1, 2, 2, 0, 2, 1, 0, 2, 1, 1, 1, 1, 2, 1, 2, 0, 1, 1, 2, 1, 1, 1, 1, 0, 0, 2, 2, 2, 2, 2, 0, 1, 0, 0, 2, 0, 0, 1, 0, 0, 2, 2, 2, 2, 2, 1, 0, 1, 2, 1, 1, 0, 2, 0, 1, 1, 0, 2, 1, 0, 2, 1, 2, 1, 2, 0, 2, 0, 0, 0, 1, 0, 2, 2, 1, 1, 1, 0, 2, 2, 2, 2, 1, 1, 1, 1, 2, 0, 1, 2, 1, 0, 2, 2, 2, 2, 1, 1, 2, 2, 2, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 2, 2, 1, 1, 1, 1, 2, 2, 1, 2, 2, 1, 0, 1, 1, 1, 1, 2, 2, 1, 2, 0, 1, 0, 1, 2, 1, 2, 0, 0, 2, 0, 2, 2, 1, 1, 2, 1, 2, 1, 1, 2, 0, 2, 2, 2, 0, 0, 0, 0, 1, 1, 2, 2, 0, 2, 1, 2, 2, 2, 1, 0, 1, 2, 1, 1, 1, 0, 1, 1, 2, 2, 1, 0, 2, 1, 1, 1, 1, 2, 0, 1, 0, 1, 2, 2, 2, 1, 0, 2, 1, 1, 2, 0, 2, 2, 0, 1, 0, 0, 0, 0, 0, 0, 0, 2, 2, 1, 0, 0, 1, 0, 2, 2, 1, 1, 1, 2, 2, 2, 1, 2, 1, 1, 2, 0, 1, 1, 2, 0, 1, 1, 2, 1, 1, 2, 1, 0, 0, 0, 1, 1, 0, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 0, 2, 1, 1, 0, 2, 1, 2, 0, 2, 1, 2, 2, 1, 0, 0, 0, 2, 1, 1, 1, 2, 1, 2, 1, 1, 1, 2, 0, 2, 2, 2, 0, 2, 2, 1, 0, 2, 0, 0, 2, 2, 1, 2, 1, 0, 1, 1, 2, 2, 0, 0, 1, 0, 2, 2, 1, 2, 1, 0, 1, 1, 1, 1, 0, 1, 2, 0, 1, 0, 0, 1, 0, 2, 2, 1, 1, 2, 1, 1, 0, 1, 1, 0, 2, 2, 0, 2, 2, 0, 0, 0, 1, 2, 0, 1, 2, 2, 2, 1, 0, 1, 0, 1, 2, 2, 0, 1, 0, 1, 1, 0, 0, 2, 1, 0, 2, 2, 1, 1, 0, 1, 1, 2, 1, 2, 2, 1, 0, 0, 2, 2, 0, 0, 1, 1, 1, 0, 2, 0, 1, 2, 1, 2, 2, 0, 1, 2, 1, 2, 1, 2, 1, 2, 2, 0, 0, 0, 2, 2, 1, 1, 0, 1, 1, 2, 1, 0, 2, 1, 1, 2, 2, 2, 2, 0, 1, 1, 2, 0, 1, 0, 2, 2, 2, 2, 2, 1, 2, 1, 1, 1, 2, 2, 2, 2, 1, 1, 2, 2, 1, 1, 2, 2, 0, 0, 1, 2, 0, 1, 0, 0, 0, 2, 2, 1, 2, 2, 0, 2, 2, 1, 1, 1, 0, 1, 2, 1, 1, 1, 2, 2, 0, 1, 1, 2, 1, 2, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 2, 1, 1, 2, 1, 0, 1, 1, 2, 2, 0, 1, 0, 1, 1, 2, 0, 0, 2, 0, 1, 2, 1, 1, 1, 0, 2, 1, 2, 2, 1, 2, 2, 0, 0, 2, 1, 0, 0, 2, 2, 2, 0, 1, 1, 1, 1, 1, 1, 0, 1, 2, 1, 1, 2, 0, 1, 1, 2, 1, 2, 2, 1, 2, 0, 2, 2, 1, 1, 2, 1, 2, 0, 1, 1, 2, 2, 2, 1, 0, 1, 0, 2, 2, 2, 0, 1, 2, 2, 0, 2, 1, 0, 1, 1, 1, 0, 2, 1, 1, 1, 2, 0, 2, 2, 2, 1, 0, 1, 2, 1, 0, 1, 1, 2, 1, 0, 1, 1, 2, 1, 2, 1, 1, 1, 1, 0, 0, 2, 0, 1, 2, 1, 2, 1, 0, 0, 0, 1, 2, 1, 2, 2, 2, 0, 1, 1, 2, 1, 2, 2, 1, 2, 2, 1, 2, 1, 1, 1, 1, 0, 0, 1, 1, 2, 1, 0, 2, 1, 2, 2, 2, 0, 2, 1, 1, 1, 1, 0, 1, 2, 2, 1, 2, 2, 1, 1, 1, 2, 1, 1, 1, 2, 1, 2, 1, 1, 0, 0, 2, 1, 2, 1, 2, 1, 1, 2, 2, 1, 1, 1, 1, 1, 0, 1, 0, 2, 0, 1, 2, 2, 1, 1, 2, 2, 2, 0, 2, 2, 2, 2, 0, 2, 0, 1, 0, 2, 1, 0, 2, 2, 0, 2, 2, 0, 1, 2, 0, 1, 1, 1, 1, 2, 1, 1, 2, 0, 0, 2, 2, 0, 2, 1, 2, 1, 0, 1, 2, 1, 0, 1, 1, 0, 0, 2, 1, 1, 2, 2, 1, 0, 0, 2, 0, 0, 0, 2, 2, 0, 1, 2, 2, 2, 2, 0, 2, 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 0, 0, 2, 1, 1, 2, 0, 2, 1, 0, 1, 2, 2, 0, 1, 1, 1, 1, 2, 0, 0, 2, 2, 1, 2, 0, 1, 1, 0, 2, 1, 1, 0, 2, 0, 2, 0, 2, 1, 1, 1, 0, 2, 1, 1, 2, 2, 1, 2, 2, 1, 2, 2, 0, 0, 0, 2, 2, 1, 1, 2, 1, 0, 1, 2, 2, 0, 2, 1, 0, 1, 0, 2, 1, 0, 2, 1, 0, 0, 2, 1, 0, 2, 1, 2, 1, 1, 2, 2, 1, 1, 1, 0, 2, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 0, 0, 2, 1, 1, 0, 2, 1, 0, 0, 2, 0, 0, 0, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 0, 1, 0, 0, 2, 1, 2, 1, 1, 0, 1, 0, 2, 2, 0, 0, 2, 2, 1, 2, 1, 1, 1, 0, 2, 2, 0, 2, 2, 2, 1, 0, 0, 1, 2, 1, 0, 2, 0, 2, 2, 1, 0, 1, 1, 1, 0, 2, 0, 1, 0, 1, 0, 1, 2, 0, 0, 2, 1, 2, 2, 2, 2, 0, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 0, 2, 1, 1, 2, 1, 2, 0, 2, 1, 2, 0, 0, 1, 1, 0, 2, 1, 2, 1, 0, 2, 2, 0, 0, 1, 2, 2, 0, 0, 0, 2, 2, 1, 1, 2, 1, 1, 2, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 2, 1, 2, 0, 2, 2, 2, 1, 2, 0, 1, 1, 1, 2, 1, 0, 2, 0, 2, 2, 1, 0, 1, 0, 1, 2, 1, 2, 0, 1, 1, 2, 1, 0, 1, 0, 1, 1, 1, 0, 2, 1, 2, 2, 1, 1, 2, 2, 2, 0, 1, 1, 0, 1, 0, 2, 0, 2, 2, 0, 2, 1, 2, 2, 1, 2, 1, 1, 0, 1, 2, 2, 2, 1, 2, 2, 1, 0, 2, 1, 1, 1, 1, 0, 2, 1, 2, 1, 0, 0, 2, 2, 2, 1, 0, 2, 0, 2, 2, 1, 2, 0, 2, 0, 1, 2, 2, 2, 2, 1, 2, 1, 2, 1, 0, 2, 2, 0, 1, 2, 1, 0, 2, 2, 1, 2, 1, 2, 0, 1, 1, 0, 0, 0, 2, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 1, 1, 1, 0, 1, 1, 1, 2, 0, 1, 2, 2, 2, 0, 1, 2, 2, 2, 2, 1, 0, 2, 0, 1, 2, 0, 1, 1, 1, 2, 2, 1, 1, 2, 2, 0, 2, 2, 2, 2, 1, 0, 2, 1, 2, 2, 1, 1, 2, 2, 1, 0, 1, 2, 1, 0, 2, 1, 0, 1, 2, 0, 1, 0, 2, 1, 1, 2, 0, 1, 0, 1, 2, 0, 0, 1, 1, 0, 1, 2, 1, 2, 2, 2, 0, 1, 2, 2, 2, 0, 2, 1, 2, 0, 2, 0, 1, 0, 1, 1, 2, 0, 2, 0, 2, 1, 2, 2, 2, 2, 2, 0, 1, 1, 1, 2, 2, 2, 1, 2, 1, 1, 1, 2, 1, 2, 0, 0, 0, 1, 0, 0, 0, 2, 2, 0, 2, 2, 2, 0, 2, 2, 1, 2, 1, 1, 2, 0, 1, 0, 0, 1, 0, 1, 2, 2, 1, 1, 2, 2, 1, 0, 0, 0, 1, 1, 1, 2, 1, 0, 1, 0, 1, 0, 2, 0, 1, 2, 1, 1, 1, 2, 2, 0, 2, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 2, 0, 2, 2, 1, 1, 1, 2, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 2, 1, 2, 2, 1, 2, 0, 1, 0, 2, 2, 0, 0, 0, 1, 2, 1, 1, 2, 1, 1, 1, 1, 2, 2, 1, 2, 1, 2, 0, 2, 2, 1, 2, 0, 1, 2, 2, 1, 1, 2, 1, 1, 0, 1, 1, 2, 2, 2, 1, 2, 1, 1, 2, 1, 2, 1, 0, 2, 1, 1, 1, 0, 1, 2, 2, 0, 0, 1, 2, 0, 2, 0, 1, 2, 0, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 0, 0, 0, 1, 1, 2, 0, 0, 0, 0, 2, 2, 1, 1, 1, 1, 0, 1, 0, 1, 0, 2, 0, 2, 1, 1, 0, 2, 1, 2, 1, 0, 2, 1, 0, 1, 1, 2, 0, 0, 2, 2, 1, 1, 2, 0, 0, 0, 1, 1, 0, 2, 1, 1, 1, 2, 2, 0, 0, 2, 0, 2, 2, 0, 0, 2, 2, 2, 2, 1, 1, 2, 0, 0, 2, 1, 2, 1, 0, 2, 2, 2, 0, 2, 0, 2, 1, 1, 1, 0, 2, 1, 0, 2, 1, 1, 2, 2, 2, 2, 2, 1, 0, 1, 1, 2, 1, 1, 1, 0, 2, 2, 1, 1, 1, 1, 2, 1, 0, 1, 1, 2, 2, 2, 0, 2, 0, 1, 0, 0, 1, 1, 1, 1, 2, 2, 2, 1, 1, 2, 0, 2, 1, 2, 2, 1, 0, 1, 1, 0, 0, 2, 0, 1, 1, 1, 1, 0, 2, 2, 1, 1, 1, 2, 1, 2, 1, 1, 0, 1, 1, 1, 0, 1, 2, 1, 0, 1, 1, 2, 2, 2, 1, 2, 0, 2, 0, 0, 2, 1, 2, 1, 2, 2, 0, 0, 2, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 2, 0, 2, 1, 1, 1, 2, 2, 0, 0, 1, 2, 0, 1, 2, 0, 1, 2, 2, 1, 1, 1, 1, 2, 0, 1, 1, 2, 1, 0, 2, 2, 1, 2, 1, 0, 1, 2, 0, 1, 0, 0, 1, 1, 0, 2, 1, 2, 1, 2, 1, 1, 0, 1, 2, 1, 1, 0, 0, 2, 2, 0, 1, 0, 2, 1, 1, 2, 2, 2, 0, 1, 0, 1, 1, 2, 0, 0, 2, 1, 2, 2, 2, 1, 1, 0, 2, 2, 1, 0, 1, 2, 2, 1, 2, 1, 2, 1, 1, 1, 0, 2, 2, 0, 2, 1, 2, 2, 2, 1, 1, 1, 0, 2, 1, 0, 1, 2, 1, 0, 0, 2, 0, 2, 0, 1, 2, 1, 0, 2, 1, 2, 2, 2, 1, 0, 1, 1, 0, 2, 0, 2, 2, 2, 1, 1, 1, 1, 2, 2, 2, 1, 0, 0, 1, 1, 2, 1, 1, 1, 0, 1, 1, 1, 2, 1, 1, 1, 0, 0, 0, 2, 1, 1, 0, 1, 1, 1, 0, 2, 1, 1, 1, 2, 1, 1, 1, 2, 1, 0, 2, 2, 2, 2, 1, 1, 0, 0, 1, 2, 0, 2, 2, 1, 2, 1, 2, 1, 0, 2, 2, 0, 2, 1, 1, 2, 2, 1, 2, 1, 1, 1, 0, 2, 1, 1, 0, 2, 1, 1, 2, 0, 2, 0, 0, 1, 1, 2, 1, 0, 2, 1, 1, 1, 2, 0, 0, 1, 1, 1, 1, 1, 1, 2, 0, 2, 2, 1, 1, 1, 2, 2, 0, 2, 2, 2, 0, 1, 1, 0, 2, 1, 0, 0, 1, 2, 2, 1, 1, 1, 0, 1, 1, 1, 1, 2, 2, 1, 0, 2, 1, 0, 1, 2, 2, 1, 1, 0, 0, 1, 1, 0, 2, 1, 2, 0, 0, 1, 2, 0, 0, 2, 0, 1, 0, 2, 0, 1, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 0, 2, 0, 2, 2, 2, 0, 0, 2, 2, 1, 2, 2, 2, 2, 2, 0, 2, 1, 1, 0, 2, 2, 2, 1, 1, 0, 2, 1, 0, 0, 0, 0, 0, 2, 1, 2, 1, 2, 2, 1, 2, 2, 1, 1, 1, 0, 1, 1, 2, 2, 1, 2, 2, 1, 1, 2, 0, 1, 1, 1, 1, 2, 1, 0, 2, 0, 0, 2, 2, 1, 1, 1, 2, 1, 0, 2, 1, 2, 0, 2, 2, 2, 0, 2, 2, 1, 0, 0, 2, 1, 0, 1, 1, 2, 1, 0, 1, 2, 0, 0, 2, 0, 1, 2, 2, 1, 1, 1, 2, 0, 0, 2, 2, 2, 1, 2, 1, 1, 2, 0, 2, 0, 1, 1, 1, 0, 1, 1, 0, 1, 2, 0, 2, 0, 2, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 2, 1, 0, 0, 0, 2, 2, 0, 1, 2, 0, 1, 0, 1, 1, 1, 1, 0, 2, 2, 2, 2, 2, 2, 2, 1, 0, 1, 2, 1, 0, 0, 2, 0, 2, 2, 1, 2, 2, 0, 1, 2, 2, 0, 1, 2, 2, 2, 1, 0, 1, 1, 1, 2, 2, 2, 1, 1, 0, 1, 2, 1, 0, 1, 1, 1, 0, 1, 1, 0, 2, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 2, 2, 1, 2, 2, 1, 1, 2, 0, 2, 2, 2, 1, 0, 1, 1, 2, 2, 1, 1, 0, 1, 1, 2, 0, 2, 2, 2, 2, 1, 0, 1, 2, 0, 1, 1, 1, 0, 2, 0, 2, 2, 0, 1, 0, 0, 2, 2, 1, 1, 2, 2, 0, 0, 2, 0, 2, 2, 1, 2, 0, 1, 0, 1, 2, 2, 0, 1, 2, 2, 1, 2, 1, 1, 1, 1, 2, 2, 0, 2, 0, 0, 2, 0, 0, 2, 2, 0, 2, 1, 1, 2, 0, 2, 1, 2, 2, 0, 1, 1, 1, 1, 2, 2, 1, 2, 2, 1, 1, 2, 1, 2, 1, 2, 0, 2, 0, 0, 0, 2, 1, 2, 2, 2, 0, 2, 1, 0, 1, 2, 1, 0, 2, 2, 1, 1, 0, 2, 1, 1, 0, 1, 2, 2, 2, 2, 0, 2, 0, 1, 2, 1, 1, 0, 2, 2, 0, 1, 0, 1, 0, 1, 1, 2, 1, 1, 1, 0, 0, 2, 2, 2, 1, 0, 2, 1, 2, 0, 0, 2, 1, 2, 1, 2, 1, 2, 2, 1, 1, 2, 1, 2, 2, 1, 1, 1, 2, 2, 0, 1, 1, 1, 2, 2, 2, 1, 0, 2, 0, 1, 0, 2, 1, 0, 2, 1, 0, 0, 1, 1, 0, 0, 2, 2, 0, 2, 1, 1, 0, 1, 1, 2, 2, 2, 1, 1, 1, 2, 1, 0, 0, 2, 2, 2, 1, 0, 0, 0, 2, 0, 2, 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 0, 1, 0, 2, 2, 1, 1, 1, 0, 0, 2, 2, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 2, 2, 1, 2, 1, 2, 2, 1, 1, 1, 0, 2, 1, 1, 1, 2, 2, 2, 2, 0, 1, 0, 1, 0, 1, 2, 0, 0, 1, 2, 2, 0, 1, 1, 0, 1, 1, 2, 1, 2, 1, 2, 0, 2, 2, 1, 0, 2, 1, 1, 1, 0, 1, 1, 2, 2, 1, 1, 1, 2, 1, 1, 2, 2, 0, 1, 1, 2, 1, 1, 2, 2, 0, 2, 1, 0, 2, 1, 2, 0, 2, 1, 2, 0, 2, 2, 2, 2, 1, 2, 1, 1, 1, 0, 1, 0, 2, 1, 0, 1, 0, 2, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 2, 0, 0, 1, 2, 2, 0, 2, 2, 1, 2, 1, 1, 1, 2, 2, 2, 1, 2, 1, 2, 0, 2, 1, 1, 1, 2, 1, 2, 1, 2, 2, 1, 1, 2, 1, 1, 2, 1, 1, 2, 0, 1, 1, 1, 1, 2, 0, 2, 2, 2, 2, 0, 0, 1, 2, 1, 0, 2, 1, 0, 0, 2, 2, 1, 2, 1, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 0, 0, 1, 2, 2, 1, 2, 2, 1, 0, 1, 2, 1, 2, 0, 1, 0, 2, 0, 1, 2, 1, 0, 1, 2, 0, 2, 1, 2, 1, 2, 0, 2, 1, 0, 1, 2, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 0, 2, 2, 1, 0, 1, 0, 2, 2, 1, 1, 2, 2, 1, 1, 2, 1, 2, 1, 0, 2, 1, 1, 1, 0, 0, 0, 2, 2, 1, 2, 2, 2, 1, 0, 1, 2, 1, 2, 2, 0, 0, 2, 2, 1, 1, 1, 2, 1, 1, 1, 2, 0, 1, 1, 1, 1, 2, 2, 0, 2, 2, 2, 2, 0, 1, 1, 1, 1, 0, 2, 1, 1, 1, 2, 2, 0, 0, 1, 1, 2, 1, 2, 2, 1, 2, 1, 1, 2, 0, 1, 2, 2, 1, 2, 0, 2, 2, 0, 2, 1, 1, 2, 0, 2, 1, 0, 0, 2, 0, 2, 2, 0, 2, 2, 0, 1, 1, 1, 2, 1, 2, 0, 2, 0, 2, 1, 1, 1, 2, 1, 1, 2, 2, 1, 1, 2, 2, 1, 1, 2, 2, 1, 0, 1, 2, 1, 2, 1, 1, 2, 2, 1, 2, 0, 0, 1, 0, 1, 0, 2, 0, 1, 1, 2, 1, 1, 2, 1, 2, 1, 1, 2, 1, 1, 2, 0, 0, 0, 1, 1, 2, 0, 2, 0, 2, 2, 0, 0, 1, 1, 1, 2, 2, 0, 2, 2, 2, 0, 2, 1, 1, 1, 0, 1, 0, 0, 2, 1, 2, 1, 2, 0, 1, 0, 0, 2, 0, 1, 1, 2, 1, 2, 1, 1, 2, 1, 2, 0, 1, 2, 2, 2, 2, 1, 1, 2, 1, 1, 1, 2, 1, 2, 1, 2, 0, 2, 1, 2, 2, 1, 1, 1, 1, 0, 2, 1, 2, 1, 1, 2, 1, 0, 1, 2, 1, 2, 2, 0, 0, 1, 1, 2, 1, 0, 0, 1, 2, 2, 2, 2, 0, 1, 1, 1, 2, 1, 1, 1, 1, 2, 1, 2, 2, 1, 2, 0, 2, 1, 1, 2, 2, 2, 0, 1, 1, 1, 1, 1, 2, 0, 1, 0, 2, 2, 2, 2, 2, 2, 0, 1, 1, 1, 1, 1, 2, 0, 2, 2, 0, 2, 1, 1, 1, 1, 0, 2, 1, 2, 1, 1, 1, 1, 2, 2, 1, 1, 0, 2, 1, 1, 0, 1, 1, 2, 2, 1, 0, 2, 1, 2, 2, 1, 0, 0, 1, 2, 2, 1, 1, 2, 1, 0, 1, 1, 1, 2, 2, 2, 2, 2, 0, 2, 1, 0, 1, 1, 1, 1, 0, 2, 2, 1, 1, 1, 2, 2, 2, 2, 2, 0, 0, 2, 0, 1, 2, 2, 2, 1, 2, 2, 1, 1, 0, 1, 2, 1, 1, 0, 0, 2, 1, 2, 1, 2, 0, 2, 1, 2, 0, 2, 2, 1, 1, 0, 1, 2, 1, 2, 2, 2, 1, 1, 1, 2, 0, 2, 0, 1, 0, 1, 2, 1, 2, 0, 1, 1, 0, 2, 0, 0, 1, 2, 1, 1, 1, 2, 0, 1, 1, 2, 2, 2, 0, 2, 1, 2, 2, 2, 2, 0, 1, 2, 0, 1, 2, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 0, 2, 2, 0, 1, 2, 0, 2, 1, 1, 1, 1, 0, 0, 1, 2, 1, 2, 2, 2, 2, 1, 1, 0, 1, 0, 1, 2, 1, 0, 1, 1, 2, 0, 0, 1, 2, 1, 1, 2, 1, 2, 1, 1, 1, 1, 0, 1, 2, 1, 2, 1, 1, 2, 2, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 2, 0, 2, 1, 1, 2, 1, 1, 0, 1, 0, 1, 2, 2, 2, 2, 1, 2, 2, 0, 0, 1, 1, 0, 2, 2, 2, 2, 2, 0, 1, 1, 2, 2, 2, 2, 2, 2, 0, 1, 2, 0, 2, 1, 2, 0, 2, 2, 1, 1, 1, 1, 2, 0, 0, 2, 2, 0, 2, 0, 2, 1, 1, 1, 2, 1, 0, 1, 2, 0, 1, 1, 1, 1, 2, 1, 1, 1, 2, 2, 2, 2, 1, 0, 0, 0, 1, 2, 1, 1, 0, 0, 1, 0, 1, 1, 2, 2, 0, 2, 2, 0, 0, 1, 0, 1, 0, 2, 0, 0, 0, 2, 2, 0, 1, 2, 1, 1, 2, 1, 2, 2, 1, 2, 2, 0, 0, 2, 2, 1, 1, 2, 2, 2, 0, 1, 0, 2, 2, 1, 2, 2, 2, 1, 2, 2, 1, 0, 1, 2, 1, 2, 1, 2, 0, 1, 2, 2, 2, 1, 2, 0, 2, 0, 1, 2, 2, 2, 2, 0, 2, 1, 1, 1, 1, 0, 2, 0, 2, 1, 1, 1, 2, 1, 0, 1, 0, 2, 0, 2, 2, 1, 2, 2, 1, 1, 1, 1, 0, 2, 1, 1, 2, 1, 1, 2, 1, 2, 0, 0, 2, 1, 2, 1, 2, 0, 2, 0, 1, 1, 2, 1, 2, 0, 0, 0, 1, 2, 2, 2, 1, 2, 0, 2, 1, 0, 0, 2, 1, 0, 1, 0, 1, 1, 2, 0, 1, 0, 2, 1, 0, 2, 2, 1, 2, 1, 1, 2, 0, 1, 1, 0, 0, 0, 0, 0, 2, 0, 1, 2, 0, 1, 1, 0, 1, 1, 2, 0, 2, 2, 1, 1, 1, 1, 1, 1, 0, 1, 0, 2, 2, 0, 2, 0, 2, 0, 2, 0, 1, 0, 0, 1, 2, 0, 1, 0, 1, 2, 2, 2, 1, 1, 2, 2, 0, 2, 1, 2, 2, 1, 1, 1, 0, 2, 0, 1, 2, 2, 1, 1, 1, 0, 1, 0, 2, 2, 1, 2, 2, 2, 2, 2, 0, 1, 1, 2, 0, 2, 1, 1, 2, 1, 1, 2, 2, 2, 2, 2, 1, 1, 1, 0, 1, 0, 1, 2, 1, 0, 1, 1, 2, 1, 1, 1, 2, 2, 0, 0, 1, 2, 1, 1, 1, 2, 1, 2, 2, 2, 0, 2, 1, 1, 0, 1, 2, 2, 1, 0, 1, 2, 0, 0, 1, 0, 1, 0, 2, 2, 0, 1, 1, 0, 2, 1, 1, 1, 2, 2, 2, 2, 2, 1, 0, 0, 2, 0, 1, 2, 1, 0, 1, 0, 1, 0, 2, 1, 0, 2, 2, 2, 0, 1, 2, 1, 2, 2, 0, 1, 1, 1, 0, 1, 2, 2, 1, 1, 2, 2, 0, 2, 2, 2, 0, 1, 2, 1, 1, 2, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 2, 0, 1, 2, 1, 1, 2, 1, 0, 2, 1, 1, 0, 2, 1, 2, 2, 0, 2, 2, 1, 1, 0, 1, 0, 0, 2, 0, 0, 0, 1, 1, 2, 0, 2, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, 0, 0, 1, 0, 1, 2, 1, 2, 2, 0, 2, 1, 0, 0, 2, 1, 2, 2, 2, 0, 2, 1, 1, 2, 0, 1, 2, 2, 2, 1, 2, 1, 1, 2, 2, 1, 0, 2, 1, 0, 1, 2, 2, 1, 0, 1, 2, 1, 0, 1, 1, 0, 1, 2, 1, 1, 2, 1, 0, 2, 0, 1, 1, 1, 0, 2, 1, 1, 1, 2, 2, 1, 2, 1, 2, 2, 2, 0, 0, 2, 1, 1, 1, 1, 1, 1, 2, 1, 0, 1, 0, 1, 1, 1, 1, 0, 2, 1, 1, 0, 0, 0, 0, 1, 2, 0, 1, 1, 2, 1, 1, 0, 0, 2, 1, 0, 1, 1, 2, 1, 1, 2, 1, 0, 2, 0, 2, 2, 2, 1, 2, 2, 1, 0, 1, 1, 0, 0, 0, 0, 2, 0, 2, 2, 0, 1, 2, 0, 2, 1, 2, 2, 2, 0, 1, 0, 1, 0, 1, 2, 2, 2, 2, 0, 0, 2, 2, 2, 1, 1, 1, 1, 2, 1, 2, 2, 2, 1, 2, 0, 2, 1, 0, 1, 0, 2, 0, 0, 1, 2, 2, 2, 0, 1, 1, 2, 2, 0, 2, 1, 0, 1, 0, 2, 2, 0, 2, 0, 2, 2, 0, 0, 0, 2, 1, 1, 1, 1, 1, 2, 0, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 1, 1, 1, 2, 1, 2, 1, 2, 0, 2, 1, 1, 2, 0, 1, 2, 2, 0, 1, 1, 2, 0, 2, 1, 0, 2, 2, 0, 2, 1, 2, 2, 2, 0, 2, 1, 1, 2, 1, 2, 1, 1, 0, 2, 1, 0, 1, 2, 2, 1, 2, 0, 2, 1, 1, 1, 2, 2, 1, 1, 1, 1, 0, 2, 2, 0, 2, 0, 2, 0, 1, 2, 2, 1, 2, 0, 2, 0, 1, 1, 2, 2, 0, 2, 1, 2, 2, 2, 0, 1, 1, 0, 0, 1, 1, 2, 1, 2, 1, 1, 0, 0, 0, 1, 2, 1, 0, 2, 0, 1, 0, 0, 2, 2, 2, 1, 0, 2, 0, 2, 2, 0, 2, 2, 2, 0, 2, 2, 2, 1, 0, 0, 2, 1, 2, 1, 2, 1, 0, 1, 1, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 1, 1, 2, 1, 2, 1, 2, 1, 0, 2, 0, 1, 2, 2, 0, 2, 2, 2, 1, 2, 2, 1, 2, 1, 1, 2, 2, 0, 0, 1, 2, 1, 2, 2, 1, 1, 0, 1, 2, 0, 0, 1, 2, 2, 1, 2, 0, 1, 1, 1, 2, 1, 1, 2, 1, 2, 1, 0, 0, 0, 2, 1, 2, 2, 2, 2, 1, 1, 1, 2, 1, 0, 1, 0, 2, 1, 0, 2, 0, 2, 0, 0, 0, 1, 0, 2, 0, 2, 2, 1]
for i in range(200):
for j in range(200):
pygame.draw.rect(board, clrs[generated_cols[(200*i)+j]], ((i*1), (j*1), 1, 1))
pygame.image.save(board, "Colour.jpeg")
| 6,019.9 | 120,018 | 0.334183 | 40,067 | 120,398 | 1.004093 | 0.000799 | 0.282668 | 0.159578 | 0.08143 | 0.994631 | 0.994258 | 0.994258 | 0.994258 | 0.994134 | 0.980264 | 0 | 0.498599 | 0.332904 | 120,398 | 19 | 120,019 | 6,336.736842 | 0.002303 | 0 | 0 | 0 | 0 | 0 | 0.000091 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.083333 | 0 | 0.083333 | 0 | 0 | 0 | 1 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 12 |
b20effc83ba0f4d5135051fe5269a5dee6d3020c | 44 | py | Python | pydarkstar/scrubbing/__init__.py | Korrbit/pydarkstar | d9d66c656dc30a6e580a86528d23694201d3a0cf | [
"MIT"
] | 18 | 2015-01-13T03:48:02.000Z | 2022-01-23T16:52:10.000Z | pydarkstar/scrubbing/__init__.py | Korrbit/pydarkstar | d9d66c656dc30a6e580a86528d23694201d3a0cf | [
"MIT"
] | 29 | 2015-01-14T01:34:10.000Z | 2022-01-30T16:57:27.000Z | pydarkstar/scrubbing/__init__.py | Korrbit/pydarkstar | d9d66c656dc30a6e580a86528d23694201d3a0cf | [
"MIT"
] | 46 | 2015-01-13T20:26:39.000Z | 2022-03-22T13:19:46.000Z | from . import scrubber
from . import ffxiah
| 14.666667 | 22 | 0.772727 | 6 | 44 | 5.666667 | 0.666667 | 0.588235 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.181818 | 44 | 2 | 23 | 22 | 0.944444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
b7671a9fa7d1998295383dd804c7b51df397a9ab | 154 | py | Python | src/spyd/utils/list_to_unicode.py | DanSeraf/spyd | af893b7f9c67785613b25754eb2cf150523a9fe4 | [
"Zlib"
] | 1 | 2020-03-23T20:28:06.000Z | 2020-03-23T20:28:06.000Z | src/spyd/utils/list_to_unicode.py | DanSeraf/spyd | af893b7f9c67785613b25754eb2cf150523a9fe4 | [
"Zlib"
] | 1 | 2022-02-05T16:59:53.000Z | 2022-02-08T17:30:55.000Z | src/spyd/utils/list_to_unicode.py | DanSeraf/spyd | af893b7f9c67785613b25754eb2cf150523a9fe4 | [
"Zlib"
] | null | null | null | def to_unicode(i):
if not isinstance(i, str):
i = str(i, 'utf_8')
return i
def list_to_unicode(l):
return [to_unicode(i) for i in l]
| 19.25 | 37 | 0.603896 | 29 | 154 | 3.034483 | 0.517241 | 0.306818 | 0.227273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00885 | 0.266234 | 154 | 7 | 38 | 22 | 0.769912 | 0 | 0 | 0 | 0 | 0 | 0.032468 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0 | 0.166667 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
b7eb5402ed79415385a052adeece0e4eef3abcdd | 26 | py | Python | env/Lib/site-packages/win32/print/__init__.py | Daniel-Key/HearStone-Python | 981584d2b9502319393bd92b48f0ec8d906b4d44 | [
"MIT"
] | null | null | null | env/Lib/site-packages/win32/print/__init__.py | Daniel-Key/HearStone-Python | 981584d2b9502319393bd92b48f0ec8d906b4d44 | [
"MIT"
] | 1 | 2020-10-27T14:44:08.000Z | 2020-10-27T14:44:08.000Z | env/Lib/site-packages/win32/print/__init__.py | Daniel-Key/HearStone-Python | 981584d2b9502319393bd92b48f0ec8d906b4d44 | [
"MIT"
] | null | null | null | from win32._print import * | 26 | 26 | 0.807692 | 4 | 26 | 5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.086957 | 0.115385 | 26 | 1 | 26 | 26 | 0.782609 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | 7 |
4daf975e0e1662d0081bbe335a956e3a31d11ead | 107 | py | Python | egghead/python/modules/using_tax.py | broox9/learning | bd2078f4b77cf456a352835864afe53ac1fa3466 | [
"MIT"
] | null | null | null | egghead/python/modules/using_tax.py | broox9/learning | bd2078f4b77cf456a352835864afe53ac1fa3466 | [
"MIT"
] | 3 | 2020-09-04T03:25:36.000Z | 2021-05-06T22:45:17.000Z | egghead/python/modules/using_tax.py | broox9/learning | bd2078f4b77cf456a352835864afe53ac1fa3466 | [
"MIT"
] | null | null | null | import tax
# from tax import total_with_tax
print(tax.total_with_tax(69.99))
print(tax.tax_amount(69.99))
| 17.833333 | 32 | 0.785047 | 21 | 107 | 3.761905 | 0.428571 | 0.227848 | 0.303797 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.082474 | 0.093458 | 107 | 5 | 33 | 21.4 | 0.731959 | 0.280374 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0.666667 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 7 |
421277f835132c156213a4565837357ef3e31441 | 113 | py | Python | mmdet/version.py | FinalFlowers/pedestrian_tracking | 8eb2c330f26a1cf030e535aa29a38288ff031f11 | [
"MIT"
] | 6 | 2020-05-21T07:33:48.000Z | 2022-03-12T14:30:10.000Z | mmdet/version.py | FinalFlowers/pedestrian_tracking | 8eb2c330f26a1cf030e535aa29a38288ff031f11 | [
"MIT"
] | 4 | 2020-10-10T13:23:35.000Z | 2022-01-13T03:25:31.000Z | mmdet/version.py | FinalFlowers/pedestrian_tracking | 8eb2c330f26a1cf030e535aa29a38288ff031f11 | [
"MIT"
] | 1 | 2020-09-25T04:08:45.000Z | 2020-09-25T04:08:45.000Z | # GENERATED VERSION FILE
# TIME: Tue May 5 22:34:46 2020
__version__ = '1.1.0+5cbe3a1'
short_version = '1.1.0'
| 18.833333 | 32 | 0.690265 | 21 | 113 | 3.47619 | 0.714286 | 0.219178 | 0.246575 | 0.273973 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.212766 | 0.168142 | 113 | 5 | 33 | 22.6 | 0.56383 | 0.469027 | 0 | 0 | 1 | 0 | 0.315789 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
42471667c65aec0e122258079a458bd791812857 | 13,249 | py | Python | evalml/tests/data_checks_tests/test_data_check_action_option.py | Mahesh1822/evalml | aa0ec2379aeba12bbd0dcaaa000f9a2a62064169 | [
"BSD-3-Clause"
] | null | null | null | evalml/tests/data_checks_tests/test_data_check_action_option.py | Mahesh1822/evalml | aa0ec2379aeba12bbd0dcaaa000f9a2a62064169 | [
"BSD-3-Clause"
] | null | null | null | evalml/tests/data_checks_tests/test_data_check_action_option.py | Mahesh1822/evalml | aa0ec2379aeba12bbd0dcaaa000f9a2a62064169 | [
"BSD-3-Clause"
] | null | null | null | import re
import pytest
from evalml.data_checks import DataCheckActionCode, DataCheckActionOption
def test_data_check_action_option_attributes(dummy_data_check_name):
data_check_action_option = DataCheckActionOption(
DataCheckActionCode.DROP_COL, dummy_data_check_name
)
assert data_check_action_option.data_check_name == dummy_data_check_name
assert data_check_action_option.action_code == DataCheckActionCode.DROP_COL
assert data_check_action_option.metadata == {"rows": None, "columns": None}
assert data_check_action_option.parameters is None
data_check_action_option = DataCheckActionOption(
DataCheckActionCode.DROP_COL, None, metadata={}, parameters={}
)
assert data_check_action_option.action_code == DataCheckActionCode.DROP_COL
assert data_check_action_option.data_check_name is None
assert data_check_action_option.metadata == {"rows": None, "columns": None}
assert data_check_action_option.parameters == {}
parameters = {
"global_parameter_name": {
"parameter_type": "global",
"type": "float",
"default_value": 0.0,
},
"column_parameter_name": {
"parameter_type": "column",
"columns": {
"a": {
"impute_strategy": {
"categories": ["mean", "mode"],
"type": "category",
"default_value": "mean",
},
"constant_fill_value": {"type": "float", "default_value": 0},
},
},
},
}
data_check_action_option = DataCheckActionOption(
DataCheckActionCode.DROP_COL,
dummy_data_check_name,
metadata={"columns": [1, 2]},
parameters=parameters,
)
assert data_check_action_option.action_code == DataCheckActionCode.DROP_COL
assert data_check_action_option.data_check_name == dummy_data_check_name
assert data_check_action_option.metadata == {"columns": [1, 2], "rows": None}
assert data_check_action_option.parameters == parameters
def test_data_check_action_option_equality(dummy_data_check_name):
data_check_action_option = DataCheckActionOption(
DataCheckActionCode.DROP_COL, dummy_data_check_name
)
data_check_action_option_eq = DataCheckActionOption(
DataCheckActionCode.DROP_COL, dummy_data_check_name
)
assert data_check_action_option == data_check_action_option
assert data_check_action_option == data_check_action_option_eq
assert data_check_action_option_eq == data_check_action_option
data_check_action_option = DataCheckActionOption(
DataCheckActionCode.DROP_COL,
None,
metadata={"same detail": "same same same"},
parameters={
"global_parameter_name": {
"parameter_type": "global",
"type": "float",
"default_value": 0.0,
},
},
)
data_check_action_option_eq = DataCheckActionOption(
DataCheckActionCode.DROP_COL,
None,
metadata={"same detail": "same same same"},
parameters={
"global_parameter_name": {
"parameter_type": "global",
"type": "float",
"default_value": 0.0,
},
},
)
assert data_check_action_option == data_check_action_option
assert data_check_action_option == data_check_action_option_eq
assert data_check_action_option_eq == data_check_action_option
def test_data_check_action_option_inequality():
data_check_action_option = DataCheckActionOption(DataCheckActionCode.DROP_COL, None)
data_check_action_option_diff = DataCheckActionOption(
DataCheckActionCode.DROP_COL, None, metadata={"metadata": ["this is different"]}
)
assert data_check_action_option != data_check_action_option_diff
assert data_check_action_option_diff != data_check_action_option
data_check_action_option = DataCheckActionOption(
DataCheckActionCode.DROP_COL,
data_check_name=None,
metadata={"metadata": ["same metadata"]},
parameters={
"global_parameter_name": {
"parameter_type": "global",
"type": "float",
"default_value": 0.0,
}
},
)
data_check_action_option_diff_parameters = DataCheckActionOption(
DataCheckActionCode.DROP_COL,
data_check_name=None,
metadata={"metadata": ["same metadata"]},
parameters={
"different_global_parameter_name": {
"parameter_type": "global",
"type": "float",
"default_value": 0.0,
}
},
)
assert data_check_action_option != data_check_action_option_diff
assert data_check_action_option_diff != data_check_action_option
assert data_check_action_option != data_check_action_option_diff_parameters
assert data_check_action_option_diff_parameters != data_check_action_option
def test_data_check_action_option_to_dict(dummy_data_check_name):
data_check_action_option = DataCheckActionOption(
DataCheckActionCode.DROP_COL,
dummy_data_check_name,
)
data_check_action_option_empty_metadata = DataCheckActionOption(
DataCheckActionCode.DROP_COL,
dummy_data_check_name,
metadata={},
)
data_check_action_option_with_metadata = DataCheckActionOption(
DataCheckActionCode.DROP_COL,
dummy_data_check_name,
metadata={"some detail": ["some detail value"]},
)
data_check_action_option_with_parameters = DataCheckActionOption(
DataCheckActionCode.DROP_COL,
dummy_data_check_name,
metadata={"some detail": ["some detail value"]},
parameters={
"global_parameter_name": {
"parameter_type": "global",
"type": "float",
"default_value": 0.0,
}
},
)
assert data_check_action_option.to_dict() == {
"code": DataCheckActionCode.DROP_COL.name,
"data_check_name": dummy_data_check_name,
"parameters": None,
"metadata": {"columns": None, "rows": None},
}
assert data_check_action_option_empty_metadata.to_dict() == {
"code": DataCheckActionCode.DROP_COL.name,
"data_check_name": dummy_data_check_name,
"parameters": None,
"metadata": {"columns": None, "rows": None},
}
assert data_check_action_option_with_metadata.to_dict() == {
"code": DataCheckActionCode.DROP_COL.name,
"data_check_name": dummy_data_check_name,
"parameters": None,
"metadata": {
"some detail": ["some detail value"],
"columns": None,
"rows": None,
},
}
assert data_check_action_option_with_parameters.to_dict() == {
"code": DataCheckActionCode.DROP_COL.name,
"data_check_name": dummy_data_check_name,
"metadata": {
"some detail": ["some detail value"],
"columns": None,
"rows": None,
},
"parameters": {
"global_parameter_name": {
"parameter_type": "global",
"type": "float",
"default_value": 0.0,
}
},
}
def test_convert_dict_to_action_bad_input():
data_check_action_option_dict_no_code = {
"metadata": {"columns": None, "rows": None},
}
with pytest.raises(ValueError, match="The input dictionary should have the keys"):
DataCheckActionOption.convert_dict_to_action(
data_check_action_option_dict_no_code
)
data_check_action_option_dict_no_metadata = {
"code": DataCheckActionCode.DROP_COL.name,
}
with pytest.raises(ValueError, match="The input dictionary should have the keys"):
DataCheckActionOption.convert_dict_to_action(
data_check_action_option_dict_no_metadata
)
data_check_action_option_dict_no_columns = {
"code": DataCheckActionCode.DROP_COL.name,
"metadata": {"cow": None},
}
with pytest.raises(
ValueError, match="The metadata dictionary should have the keys"
):
DataCheckActionOption.convert_dict_to_action(
data_check_action_option_dict_no_columns
)
def test_convert_dict_to_action_bad_parameter_input(dummy_data_check_name):
with pytest.raises(
ValueError, match="Each parameter must have a parameter_type key."
):
DataCheckActionOption(
action_code=DataCheckActionCode.DROP_COL,
data_check_name=dummy_data_check_name,
metadata={"columns": None, "rows": None},
parameters={
"global_parameter_name": {
"type": "float",
"default_value": 0.0,
}
},
)
with pytest.raises(ValueError, match="Each global parameter must have a type key."):
DataCheckActionOption(
action_code=DataCheckActionCode.DROP_COL,
data_check_name=dummy_data_check_name,
metadata={"columns": None, "rows": None},
parameters={
"global_parameter_name": {
"parameter_type": "global",
"default_value": 0.0,
}
},
)
with pytest.raises(
ValueError,
match=re.escape(
"Each `column` parameter type must also have a `columns` key indicating which columns the parameter should address"
),
):
DataCheckActionOption(
action_code=DataCheckActionCode.DROP_COL,
data_check_name=dummy_data_check_name,
metadata={"columns": None, "rows": None},
parameters={
"columns_parameter_name": {
"parameter_type": "column",
}
},
)
with pytest.raises(
ValueError,
match=re.escape(
"`columns` must be a dictionary, where each key is the name of a column and the associated value is a dictionary of parameters for that column"
),
):
DataCheckActionOption(
action_code=DataCheckActionCode.DROP_COL,
data_check_name=dummy_data_check_name,
metadata={"columns": None, "rows": None},
parameters={
"columns_parameter_name": {
"parameter_type": "column",
"columns": "some incorrect string input",
}
},
)
with pytest.raises(ValueError, match="Each column parameter must have a type key."):
DataCheckActionOption(
action_code=DataCheckActionCode.DROP_COL,
data_check_name=dummy_data_check_name,
metadata={"columns": None, "rows": None},
parameters={
"columns_parameter_name": {
"parameter_type": "column",
"columns": {
"some_column_name": {
"per_column_parameter": {
"default_value": 0.0,
}
}
},
}
},
)
with pytest.raises(
ValueError, match="Each column parameter must have a default_value key."
):
DataCheckActionOption(
action_code=DataCheckActionCode.DROP_COL,
data_check_name=dummy_data_check_name,
metadata={"columns": None, "rows": None},
parameters={
"columns_parameter_name": {
"parameter_type": "column",
"columns": {
"some_column_name": {"per_column_parameter": {"type": "float"}}
},
}
},
)
def test_convert_dict_to_action(dummy_data_check_name):
data_check_action_option_dict = {
"code": DataCheckActionCode.DROP_COL.name,
"metadata": {"columns": None, "rows": None},
}
expected_data_check_action_option = DataCheckActionOption(
DataCheckActionCode.DROP_COL, None
)
data_check_action_option = DataCheckActionOption.convert_dict_to_action(
data_check_action_option_dict
)
assert data_check_action_option == expected_data_check_action_option
data_check_action_option_dict_with_other_metadata = {
"code": DataCheckActionCode.DROP_COL.name,
"data_check_name": dummy_data_check_name,
"parameters": None,
"metadata": {
"some detail": ["this is different"],
"columns": None,
"rows": None,
},
}
expected_data_check_action_option = DataCheckActionOption(
DataCheckActionCode.DROP_COL,
dummy_data_check_name,
metadata={"some detail": ["this is different"]},
)
data_check_action_option = DataCheckActionOption.convert_dict_to_action(
data_check_action_option_dict_with_other_metadata
)
assert data_check_action_option == expected_data_check_action_option
| 36.498623 | 155 | 0.617254 | 1,307 | 13,249 | 5.82785 | 0.07039 | 0.141788 | 0.151635 | 0.212288 | 0.923986 | 0.917553 | 0.844821 | 0.809374 | 0.789812 | 0.783379 | 0 | 0.002667 | 0.29255 | 13,249 | 362 | 156 | 36.599448 | 0.809986 | 0 | 0 | 0.585075 | 0 | 0.00597 | 0.171032 | 0.023247 | 0 | 0 | 0 | 0 | 0.089552 | 1 | 0.020896 | false | 0 | 0.008955 | 0 | 0.029851 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
425df296c4157089653a99e7f9b2274e453fcb1e | 8,761 | py | Python | src/environments/tests/test_permissions.py | nixplay/bullet-train-api | 608422d174443a4d9178d875ccaeb756a771e908 | [
"BSD-3-Clause"
] | null | null | null | src/environments/tests/test_permissions.py | nixplay/bullet-train-api | 608422d174443a4d9178d875ccaeb756a771e908 | [
"BSD-3-Clause"
] | null | null | null | src/environments/tests/test_permissions.py | nixplay/bullet-train-api | 608422d174443a4d9178d875ccaeb756a771e908 | [
"BSD-3-Clause"
] | null | null | null | from unittest import TestCase, mock
import pytest
from environments.models import Environment, UserEnvironmentPermission, Identity
from environments.permissions import EnvironmentPermissions, NestedEnvironmentPermissions
from organisations.models import Organisation, OrganisationRole
from projects.models import Project, UserProjectPermission, ProjectPermissionModel
from users.models import FFAdminUser
mock_view = mock.MagicMock()
mock_request = mock.MagicMock()
environment_permissions = EnvironmentPermissions()
nested_environment_permissions = NestedEnvironmentPermissions()
@pytest.mark.django_db
class EnvironmentPermissionsTestCase(TestCase):
def setUp(self) -> None:
self.organisation = Organisation.objects.create(name='Test')
self.org_admin = FFAdminUser.objects.create(email='admin@test.com')
self.org_admin.add_organisation(self.organisation, OrganisationRole.ADMIN)
self.user = FFAdminUser.objects.create(email='user@test.com')
self.user.add_organisation(self.organisation, OrganisationRole.USER)
self.project = Project.objects.create(name='Test Project', organisation=self.organisation)
self.environment = Environment.objects.create(name='Test Environment', project=self.project)
def test_org_admin_can_create_environment_for_any_project(self):
# Given
mock_view.action = 'create'
mock_view.detail = False
mock_request.user = self.org_admin
mock_request.data = {
'project': self.project.id,
'name': 'Test environment'
}
# When
result = environment_permissions.has_permission(mock_request, mock_view)
# Then
assert result
def test_project_admin_can_create_environment_in_project(self):
# Given
UserProjectPermission.objects.create(user=self.user, project=self.project, admin=True)
mock_request.user = self.user
mock_view.action = 'create'
mock_view.detail = False
mock_request.data = {
'project': self.project.id,
'name': 'Test environment'
}
# When
result = environment_permissions.has_permission(mock_request, mock_view)
# Then
assert result
def test_project_user_with_create_environment_permission_can_create_environment(self):
# Given
create_environment_permission = ProjectPermissionModel.objects.get(key="CREATE_ENVIRONMENT")
user_project_permission = UserProjectPermission.objects.create(user=self.user, project=self.project)
user_project_permission.permissions.set([create_environment_permission])
mock_request.user = self.user
mock_view.action = 'create'
mock_view.detail = False
mock_request.data = {
'project': self.project.id,
'name': 'Test environment'
}
# When
result = environment_permissions.has_permission(mock_request, mock_view)
# Then
assert result
def test_project_user_without_create_environment_permission_cannot_create_environment(self):
# Given
mock_request.user = self.user
mock_view.action = 'create'
mock_view.detail = False
mock_request.data = {
'project': self.project.id,
'name': 'Test environment'
}
# When
result = environment_permissions.has_permission(mock_request, mock_view)
# Then
assert not result
def test_all_users_can_list_environments_for_project(self):
# Given
mock_view.action = 'list'
mock_view.detail = False
mock_request.user = self.user
# When
result = environment_permissions.has_permission(mock_request, mock_view)
# Then
assert result
def test_organisation_admin_can_delete_environment(self):
# Given
mock_view.action = 'delete'
mock_view.detail = True
mock_request.user = self.org_admin
# When
result = environment_permissions.has_object_permission(mock_request, mock_view, self.environment)
# Then
assert result
def test_project_admin_can_delete_environment(self):
# Given
UserProjectPermission.objects.create(user=self.user, project=self.project, admin=True)
mock_request.user = self.user
mock_view.action = 'delete'
mock_view.detail = True
# When
result = environment_permissions.has_object_permission(mock_request, mock_view, self.environment)
# Then
assert result
def test_environment_admin_can_delete_environment(self):
# Given
UserEnvironmentPermission.objects.create(user=self.user, environment=self.environment, admin=True)
mock_request.user = self.user
mock_view.action = 'delete'
mock_view.detail = True
# When
result = environment_permissions.has_object_permission(mock_request, mock_view, self.environment)
# Then
assert result
def test_regular_user_cannot_delete_environment(self):
# Given
mock_request.user = self.user
mock_view.action = 'delete'
mock_view.detail = True
# When
result = environment_permissions.has_object_permission(mock_request, mock_view, self.environment)
# Then
assert not result
@pytest.mark.django_db
class NestedEnvironmentPermissionsTestCase(TestCase):
def setUp(self) -> None:
self.organisation = Organisation.objects.create(name='Test')
self.org_admin = FFAdminUser.objects.create(email='admin@test.com')
self.org_admin.add_organisation(self.organisation, OrganisationRole.ADMIN)
self.user = FFAdminUser.objects.create(email='user@test.com')
self.user.add_organisation(self.organisation, OrganisationRole.USER)
self.project = Project.objects.create(name='Test Project', organisation=self.organisation)
self.environment = Environment.objects.create(name='Test Environment', project=self.project)
self.identity = Identity.objects.create(identifier='test-identity', environment=self.environment)
def test_organisation_admin_has_create_permission(self):
# Given
mock_view.action = 'create'
mock_view.detail = False
mock_request.user = self.org_admin
mock_view.kwargs = {
'environment_api_key': self.environment.api_key
}
# When
result = nested_environment_permissions.has_permission(mock_request, mock_view)
# Then
assert result
def test_environment_admin_has_create_permission(self):
# Given
UserEnvironmentPermission.objects.create(user=self.user, environment=self.environment, admin=True)
mock_view.action = 'create'
mock_view.detail = False
mock_view.kwargs = {
'environment_api_key': self.environment.api_key
}
mock_request.user = self.user
# When
result = nested_environment_permissions.has_permission(mock_request, mock_view)
# Then
assert result
def test_regular_user_does_not_have_create_permission(self):
# Given
mock_view.action = 'create'
mock_view.detail = False
mock_request.user = self.user
mock_view.kwargs = {
'environment_api_key': self.environment.api_key
}
# When
result = nested_environment_permissions.has_permission(mock_request, mock_view)
# Then
assert not result
def test_organisation_admin_has_destroy_permission(self):
# Given
mock_view.action = 'destroy'
mock_view.detail = True
mock_request.user = self.org_admin
# When
result = nested_environment_permissions.has_object_permission(mock_request, mock_view, self.identity)
# Then
assert result
def test_environment_admin_has_destroy_permission(self):
# Given
UserEnvironmentPermission.objects.create(user=self.user, environment=self.environment, admin=True)
mock_view.action = 'destroy'
mock_view.detail = True
mock_request.user = self.user
# When
result = nested_environment_permissions.has_object_permission(mock_request, mock_view, self.identity)
# Then
assert result
def test_regular_user_does_not_have_destroy_permission(self):
# Given
mock_view.action = 'destroy'
mock_view.detail = True
mock_request.user = self.user
# When
result = nested_environment_permissions.has_object_permission(mock_request, mock_view, self.identity)
# Then
assert not result
| 33.311787 | 109 | 0.687364 | 949 | 8,761 | 6.075869 | 0.086407 | 0.067985 | 0.03538 | 0.049428 | 0.818765 | 0.802289 | 0.778529 | 0.773847 | 0.759799 | 0.736386 | 0 | 0 | 0.235133 | 8,761 | 262 | 110 | 33.438931 | 0.860469 | 0.02728 | 0 | 0.741935 | 0 | 0 | 0.047782 | 0 | 0 | 0 | 0 | 0 | 0.096774 | 1 | 0.109677 | false | 0 | 0.045161 | 0 | 0.167742 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4291bb12703898adb26d53c5247275ca24e840be | 1,649 | py | Python | api/src/tests/integration/provider/fields/test_task_model.py | sedlar/work-tracking | 78917ff8200829eb674142ce43b503d8e892d7eb | [
"BSD-2-Clause"
] | null | null | null | api/src/tests/integration/provider/fields/test_task_model.py | sedlar/work-tracking | 78917ff8200829eb674142ce43b503d8e892d7eb | [
"BSD-2-Clause"
] | null | null | null | api/src/tests/integration/provider/fields/test_task_model.py | sedlar/work-tracking | 78917ff8200829eb674142ce43b503d8e892d7eb | [
"BSD-2-Clause"
] | null | null | null | import pytest
from tests.integration.factories.objs import create_task
from wt.fields.tasks import DuplicateTaskReceived
from wt.ids import EntityId
OBJECT_ID = EntityId("PRJ-15")
def test_add_tasks(tasks_model):
tasks = [create_task("a"), create_task("b")]
tasks_model.set_entity_tasks(OBJECT_ID, tasks)
saved_tasks = tasks_model.get_entity_tasks(OBJECT_ID)
assert tasks == saved_tasks
def test_add_more_tasks(tasks_model):
original_tasks = [create_task("a"), create_task("b")]
tasks_model.set_entity_tasks(OBJECT_ID, original_tasks)
tasks = [create_task("a"), create_task("b"), create_task("c")]
tasks_model.set_entity_tasks(OBJECT_ID, tasks)
saved_tasks = tasks_model.get_entity_tasks(OBJECT_ID)
assert tasks == saved_tasks
def test_remove_tasks(tasks_model):
original_tasks = [create_task("a"), create_task("b")]
tasks_model.set_entity_tasks(OBJECT_ID, original_tasks)
tasks = [create_task("a")]
tasks_model.set_entity_tasks(OBJECT_ID, tasks)
saved_tasks = tasks_model.get_entity_tasks(OBJECT_ID)
assert tasks == saved_tasks
def test_add_remove_tasks(tasks_model):
original_tasks = [create_task("a"), create_task("b")]
tasks_model.set_entity_tasks(OBJECT_ID, original_tasks)
tasks = [create_task("b"), create_task("c")]
tasks_model.set_entity_tasks(OBJECT_ID, tasks)
saved_tasks = tasks_model.get_entity_tasks(OBJECT_ID)
assert tasks == saved_tasks
def test_duplicate_tasks(tasks_model):
tasks = [create_task("a"), create_task("a")]
with pytest.raises(DuplicateTaskReceived):
tasks_model.set_entity_tasks(OBJECT_ID, tasks)
| 30.537037 | 66 | 0.751364 | 240 | 1,649 | 4.758333 | 0.154167 | 0.148862 | 0.178634 | 0.19965 | 0.816112 | 0.816112 | 0.816112 | 0.816112 | 0.783713 | 0.734676 | 0 | 0.001406 | 0.137659 | 1,649 | 53 | 67 | 31.113208 | 0.801688 | 0 | 0 | 0.542857 | 0 | 0 | 0.013341 | 0 | 0 | 0 | 0 | 0 | 0.114286 | 1 | 0.142857 | false | 0 | 0.114286 | 0 | 0.257143 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
42a06444cb22bd4a7c44cd26de5ff21cb7c4e630 | 8,979 | py | Python | python/pytest/test_check.py | oooo26/abess | e2715db6ba0b5fcee9a82603692753ea3ceb1b4e | [
"Intel"
] | 2 | 2021-06-06T03:56:44.000Z | 2021-08-07T02:00:46.000Z | python/pytest/test_check.py | oooo26/abess | e2715db6ba0b5fcee9a82603692753ea3ceb1b4e | [
"Intel"
] | null | null | null | python/pytest/test_check.py | oooo26/abess | e2715db6ba0b5fcee9a82603692753ea3ceb1b4e | [
"Intel"
] | null | null | null | import abess
import numpy as np
class TestCheck:
"""
Test for argument error, which should be recognized before the algorithm.
"""
@staticmethod
def test_fit():
# path
try:
model = abess.LinearRegression(path_type='other')
model.fit([[1]], [1])
except ValueError as e:
print(e)
else:
assert False
try:
model = abess.LinearRegression(support_size=[3])
model.fit([[1]], [1])
except ValueError as e:
print(e)
else:
assert False
try:
model = abess.LinearRegression(path_type='gs', s_min=1, s_max=0)
model.fit([[1]], [1])
except ValueError as e:
print(e)
else:
assert False
# ic
try:
model = abess.LinearRegression(ic_type='other')
model.fit([[1]], [1])
except ValueError as e:
print(e)
else:
assert False
# exchange_num
try:
model = abess.LinearRegression(exchange_num=-1)
model.fit([[1]], [1])
except ValueError as e:
print(e)
else:
assert False
# screening_size
try:
model = abess.LinearRegression(screening_size=3)
model.fit([[1]], [1])
except ValueError as e:
print(e)
else:
assert False
try:
model = abess.LinearRegression(support_size=[2],
screening_size=1)
model.fit([[1, 2, 3]], [1])
except ValueError as e:
print(e)
else:
assert False
# primary_fit_xxx
try:
model = abess.LogisticRegression(primary_model_fit_max_iter=0.5)
model.fit([[1]], [1])
except ValueError as e:
print(e)
else:
assert False
try:
model = abess.LogisticRegression(primary_model_fit_epsilon=-1)
model.fit([[1]], [1])
except ValueError as e:
print(e)
else:
assert False
try:
model = abess.LogisticRegression(primary_model_fit_epsilon=-1)
model.fit([[1]], [1])
except ValueError as e:
print(e)
else:
assert False
# thread
try:
model = abess.LinearRegression(thread=-1)
model.fit([[1]], [1])
except ValueError as e:
print(e)
else:
assert False
# splicing_type
try:
model = abess.LinearRegression(splicing_type=-1)
model.fit([[1]], [1])
except ValueError as e:
print(e)
else:
assert False
# cv
try:
model = abess.LinearRegression(cv=2)
model.fit([[1]], [1])
except ValueError as e:
print(e)
else:
assert False
try:
model = abess.LinearRegression(cv=2)
cv_fold_id = [1, 1]
model.fit([[1], [1]], [1, 1], cv_fold_id=cv_fold_id)
except ValueError as e:
print(e)
else:
assert False
try:
model = abess.LinearRegression(cv=2)
cv_fold_id = [1, 2, 1]
model.fit([[1], [1]], [1, 1], cv_fold_id=cv_fold_id)
except ValueError as e:
print(e)
else:
assert False
model = abess.LinearRegression()
# datatype error
try:
model.fit([['c', 1, 1]], [1])
except ValueError as e:
print(e)
else:
assert False
try:
model.fit([[1, 1, 1]], [1], weight=['c'])
except ValueError as e:
print(e)
else:
assert False
try:
model1 = abess.LinearRegression(cv='c')
model1.fit([[1]], [1])
except ValueError as e:
print(e)
else:
assert False
# incompatible shape
try:
model.fit([1, 1, 1], [1])
except ValueError as e:
print(e)
else:
assert False
try:
model.fit([[1, 1, 1]], [1, 2])
except ValueError as e:
print(e)
else:
assert False
try:
model.fit([[1, 1, 1]], [1])
model.predict([[1, 1]])
except ValueError as e:
print(e)
else:
assert False
try:
model.fit([[1, 1, 1]], [1], weight=[1, 2])
except ValueError as e:
print(e)
else:
assert False
try:
model.fit([[1, 1, 1]], [1], group=[1])
except ValueError as e:
print(e)
else:
assert False
# lack of necessary parameter
try:
model.fit(X=[[1]])
except ValueError as e:
print(e)
else:
assert False
try:
model.fit(y=[1])
except ValueError as e:
print(e)
else:
assert False
@staticmethod
def test_pca():
"""
For `abess.decomposition.SparsePCA.fit`.
"""
model = abess.SparsePCA()
# datatype error
try:
model.fit([['c']])
except ValueError as e:
print(e)
else:
assert False
try:
model.fit(Sigma=[['c']])
except ValueError as e:
print(e)
else:
assert False
try:
model.fit(Sigma=[[np.nan]])
except ValueError as e:
print(e)
else:
assert False
try:
model1 = abess.SparsePCA(cv='c')
model1.fit([[1]])
except ValueError as e:
print(e)
else:
assert False
# incompatible shape
try:
model.fit([1])
except ValueError as e:
print(e)
else:
assert False
try:
model.fit(Sigma=[1])
except ValueError as e:
print(e)
else:
assert False
try:
model.fit([[1]], group=[1, 2])
except ValueError as e:
print(e)
else:
assert False
try:
model1 = abess.SparsePCA(support_size=np.array([1, 2]))
model1.fit([[1]])
except ValueError as e:
print(e)
else:
assert False
# lack of necessary parameter
try:
model.fit()
except ValueError as e:
print(e)
else:
assert False
try:
model1 = abess.SparsePCA(cv=5)
model1.fit(Sigma=[[1]])
except ValueError as e:
print(e)
else:
assert False
# number
try:
model.fit([[1]], [1], number=-1)
except ValueError as e:
print(e)
else:
assert False
# invalid sigma
try:
model.fit(Sigma=[[1, 0], [1, 0]])
except ValueError as e:
print(e)
else:
assert False
try:
model.fit(Sigma=[[-1, 0], [0, -1]])
except ValueError as e:
print(e)
else:
assert False
# invalid arg
try:
model1 = abess.SparsePCA(ic_type='other')
model1.fit([[1]])
except ValueError as e:
print(e)
else:
assert False
try:
model1 = abess.SparsePCA(cv=5)
model1.fit([[1]])
except ValueError as e:
print(e)
else:
assert False
@staticmethod
def test_rpca():
model = abess.RobustPCA()
# datatype error
try:
model.fit([['c']], r=1)
except ValueError as e:
print(e)
else:
assert False
try:
model.fit([[1]], r='c')
except ValueError as e:
print(e)
else:
assert False
# incompatible shape
try:
model.fit([1], r=1)
except ValueError as e:
print(e)
else:
assert False
try:
model.fit(1, r=1)
except ValueError as e:
print(e)
else:
assert False
try:
model.fit([[1]], r=1, group=[1, 2])
except ValueError as e:
print(e)
else:
assert False
# invalid arg
try:
model1 = abess.RobustPCA(ic_type='other')
model1.fit([[1]], r=1)
except ValueError as e:
print(e)
else:
assert False
| 23.082262 | 77 | 0.440806 | 940 | 8,979 | 4.16383 | 0.090426 | 0.188043 | 0.211548 | 0.223301 | 0.841339 | 0.835462 | 0.785897 | 0.774144 | 0.774144 | 0.774144 | 0 | 0.02964 | 0.455173 | 8,979 | 388 | 78 | 23.141753 | 0.770442 | 0.04388 | 0 | 0.821656 | 0 | 0 | 0.003518 | 0 | 0 | 0 | 0 | 0 | 0.146497 | 1 | 0.009554 | false | 0 | 0.006369 | 0 | 0.019108 | 0.146497 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
c4757f3946c3fb6d6168f43177c23d01c4a77a0d | 14,300 | py | Python | ios/build/bots/scripts/shard_util_test.py | zealoussnow/chromium | fd8a8914ca0183f0add65ae55f04e287543c7d4a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 76 | 2020-09-02T03:05:41.000Z | 2022-03-30T04:40:55.000Z | ios/build/bots/scripts/shard_util_test.py | zealoussnow/chromium | fd8a8914ca0183f0add65ae55f04e287543c7d4a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 45 | 2020-09-02T03:21:37.000Z | 2022-03-31T22:19:45.000Z | ios/build/bots/scripts/shard_util_test.py | zealoussnow/chromium | fd8a8914ca0183f0add65ae55f04e287543c7d4a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 8 | 2020-07-22T18:49:18.000Z | 2022-02-08T10:27:16.000Z | #!/usr/bin/env vpython
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
from mock import patch
import os
import unittest
import shard_util
DEBUG_APP_OTOOL_OUTPUT = '\n'.join([
'Meta Class', 'name 0x1064b8438 CacheTestCase',
'baseMethods 0x1068586d8 (struct method_list_t *)',
'imp 0x1075e6887 -[CacheTestCase testA]', 'types 0x1064cc3e1',
'imp 0x1075e6887 -[CacheTestCase testB]',
'imp 0x1075e6887 -[CacheTestCase testc]', 'name 0x1064b8438 TabUITestCase',
'baseMethods 0x1068586d8 (struct method_list_t *)',
'imp 0x1075e6887 -[TabUITestCase testD]', 'types 0x1064cc3e1 v16@0:8',
'imp 0x1075e6887 -[TabUITestCase testE]',
'name 0x1064b8438 KeyboardTestCase',
'imp 0x1075e6887 -[KeyboardTestCase testF]',
'name 0x1064b8438 PasswordsTestCase',
'imp 0x1075e6887 -[PasswordsTestCase testG]',
'name 0x1064b8438 ToolBarTestCase',
'imp 0x1075e6887 -[ToolBarTestCase testH]',
'imp 0x1075e6887 -[ToolBarTestCase DISABLED_testI]',
'imp 0x1075e6887 -[ToolBarTestCase FLAKY_testJ]', 'version 0'
])
# Debug app otool output format in Xcode 11.4 toolchain.
DEBUG_APP_OTOOL_OUTPUT_114 = '\n'.join([
'Meta Class', 'name 0x1064b8438 CacheTestCase',
'baseMethods 0x1068586d8 (struct method_list_t *)',
' imp 0x1075e6887 -[CacheTestCase testA]', ' types 0x1064cc3e1',
' imp 0x1075e6887 -[CacheTestCase testB]',
' imp 0x1075e6887 -[CacheTestCase testc]',
' name 0x1064b8438 TabUITestCase',
'baseMethods 0x1068586d8 (struct method_list_t *)',
' imp 0x1075e6887 -[TabUITestCase testD]',
' types 0x1064cc3e1 v16@0:8',
' imp 0x1075e6887 -[TabUITestCase testE]',
' name 0x1064b8438 KeyboardTestCase',
' imp 0x1075e6887 -[KeyboardTestCase testF]',
' name 0x1064b8438 PasswordsTestCase',
' imp 0x1075e6887 -[PasswordsTestCase testG]',
' name 0x1064b8438 ToolBarTestCase',
' imp 0x1075e6887 -[ToolBarTestCase testH]',
' imp 0x1075e6887 -[ToolBarTestCase DISABLED_testI]',
' imp 0x1075e6887 -[ToolBarTestCase FLAKY_testJ]', 'version 0'
])
RELEASE_APP_OTOOL_OUTPUT = '\n'.join([
'Meta Class', 'name 0x1064b8438 CacheTestCase',
'baseMethods 0x1068586d8 (struct method_list_t *)',
'name 0x1075e6887 testA', 'types 0x1064cc3e1', 'name 0x1075e6887 testB',
'name 0x1075e6887 testc', 'baseProtocols 0x0',
'name 0x1064b8438 CacheTestCase', 'unrelated line', 'Meta Class',
'name 0x1064b8438 TabUITestCase', 'no test methods in this case',
'name 0x1064b8438 TabUITestCase', 'unrelated line',
'baseMethods 0x1068586d8 (struct method_list_t *)',
'name 0x1064b8438 KeyboardTest', 'name 0x1075e6887 testD',
'types 0x1064cc3e1 v16@0:8', 'name 0x1075e6887 testE',
'name 0x1075e6887 testF', 'baseProtocols 0x0',
'name 0x1064b8438 KeyboardTest', 'name 0x1075e6887 testUnrelatedG',
'unrelated line', 'name 0x1064b8438 ChromeTestCase',
'name 0x1064b8438 setUp', 'name 0x1064b8438 testPort',
'name 0x5345ac561 testSomeUnrelatedUtil', 'baseProtocols 0x0',
'name 0x1064b8438 ChromeTestCase', 'unrelated line',
'name 0x1064b8438 invalidTestCase', 'name 0x1075e6887 testG',
'baseProtocols 0x0', 'name 0x1064b8438 ToolBarTestCase',
'name 0x1075e6887 testG', 'name 0x1075e6887 testH',
'name 0x1075e6887 DISABLED_testI', 'name 0x1075e6887 FLAKY_testJ',
'name 0x1064b8438 ToolBarTestCase', 'baseProtocols 0x0', 'version 0'
])
RELEASE_APP_OTOOL_OUTPUT_CLASS_NOT_IN_PAIRS = '\n'.join([
'Meta Class', 'name 0x1064b8438 CacheTestCase',
'baseMethods 0x1068586d8 (struct method_list_t *)',
'name 0x1075e6887 testA', 'types 0x1064cc3e1', 'name 0x1075e6887 testB',
'name 0x1075e6887 testc', 'baseProtocols 0x0',
'name 0x1064b8438 CacheTestCase', 'unrelated line', 'Meta Class',
'name 0x1064b8438 TabUITestCase', 'no test methods in this case',
'name 0x1064b8438 TabUITestCase', 'unrelated line',
'baseMethods 0x1068586d8 (struct method_list_t *)',
'name 0x1064b8438 KeyboardTest', 'name 0x1075e6887 testD',
'types 0x1064cc3e1 v16@0:8', 'name 0x1075e6887 testE',
'name 0x1075e6887 testF', 'baseProtocols 0x0',
'name 0x1075e6887 testUnrelatedG', 'unrelated line',
'name 0x1064b8438 ChromeTestCase', 'name 0x1064b8438 setUp',
'name 0x1064b8438 testPort', 'name 0x5345ac561 testSomeUnrelatedUtil',
'baseProtocols 0x0', 'name 0x1064b8438 ChromeTestCase', 'unrelated line',
'name 0x1064b8438 invalidTestCase', 'name 0x1075e6887 testG',
'baseProtocols 0x0', 'name 0x1064b8438 ToolBarTestCase',
'name 0x1075e6887 testG', 'name 0x1075e6887 testH',
'name 0x1075e6887 DISABLED_testI', 'name 0x1075e6887 FLAKY_testJ',
'name 0x1064b8438 ToolBarTestCase', 'baseProtocols 0x0', 'version 0'
])
# Release app otool output format in Xcode 11.4 toolchain.
RELEASE_APP_OTOOL_OUTPUT_114 = '\n'.join([
'Meta Class', ' name 0x1064b8438 CacheTestCase',
'baseMethods 0x1068586d8 (struct method_list_t *)',
' name 0x1075e6887 testA', ' types 0x1064cc3e1',
' name 0x1075e6887 testB', ' name 0x1075e6887 testc',
'baseProtocols 0x0', ' name 0x1064b8438 CacheTestCase',
'unrelated line', 'Meta Class', ' name 0x1064b8438 TabUITestCase',
'no test methods in this case', ' name 0x1064b8438 TabUITestCase',
'unrelated line', 'baseMethods 0x1068586d8 (struct method_list_t *)',
' name 0x1064b8438 KeyboardTest', ' name 0x1075e6887 testD',
' types 0x1064cc3e1 v16@0:8', ' name 0x1075e6887 testE',
' name 0x1075e6887 testF', 'baseProtocols 0x0',
' name 0x1064b8438 KeyboardTest',
' name 0x1075e6887 testUnrelatedG', 'unrelated line',
' name 0x1064b8438 ChromeTestCase', ' name 0x1064b8438 setUp',
' name 0x1064b8438 testPort',
' name 0x5345ac561 testSomeUnrelatedUtil', 'baseProtocols 0x0',
' name 0x1064b8438 ChromeTestCase', 'unrelated line',
' name 0x1064b8438 invalidTestCase', ' name 0x1075e6887 testG',
'baseProtocols 0x0', ' name 0x1064b8438 ToolBarTestCase',
' name 0x1075e6887 testG', ' name 0x1075e6887 testH',
' name 0x1075e6887 DISABLED_testI',
' name 0x1075e6887 FLAKY_testJ',
' name 0x1064b8438 ToolBarTestCase', 'baseProtocols 0x0', 'version 0'
])
class TestShardUtil(unittest.TestCase):
"""Test cases for shard_util.py"""
@patch('shard_util.os.path.abspath')
def test_determine_path_non_eg2(self, mock_abspath):
mock_abspath.return_value = '/b/s/w/ir/ios/build/bots/scripts/share_util.py'
app = 'some_ios_test.app'
actual_path = shard_util.determine_app_path(app)
expected_path = os.path.join('/b/s/w/ir', 'out/Debug', app, 'some_ios_test')
self.assertEqual(actual_path, expected_path)
@patch('shard_util.os.path.abspath')
def test_determine_path_eg2(self, mock_abspath):
mock_abspath.return_value = '/b/s/w/ir/ios/build/bots/scripts/share_util.py'
app = 'some_ios_test-Runner.app'
host = 'some_host.app'
actual_path = shard_util.determine_app_path(app, host)
expected_path = os.path.join('/b/s/w/ir', 'out/Debug', app, 'PlugIns',
'some_ios_test.xctest', 'some_ios_test')
self.assertEqual(actual_path, expected_path)
@patch('shard_util.os.path.abspath')
def test_determine_path_eg2_release(self, mock_abspath):
mock_abspath.return_value = '/b/s/w/ir/ios/build/bots/scripts/share_util.py'
app = 'some_ios_test-Runner.app'
host = 'some_host.app'
actual_path = shard_util.determine_app_path(app, host, True)
expected_path = os.path.join('/b/s/w/ir', 'out/Release', app, 'PlugIns',
'some_ios_test.xctest', 'some_ios_test')
self.assertEqual(actual_path, expected_path)
def test_fetch_test_names_debug(self):
"""Ensures that the debug output is formatted correctly"""
resp = shard_util.fetch_test_names_for_debug(DEBUG_APP_OTOOL_OUTPUT)
self.assertEqual(len(resp), 10)
expected_test_names = [
('CacheTestCase', 'testA'),
('CacheTestCase', 'testB'),
('CacheTestCase', 'testc'),
('TabUITestCase', 'testD'),
('TabUITestCase', 'testE'),
('KeyboardTestCase', 'testF'),
('PasswordsTestCase', 'testG'),
('ToolBarTestCase', 'testH'),
('ToolBarTestCase', 'DISABLED_testI'),
('ToolBarTestCase', 'FLAKY_testJ'),
]
for test_name in expected_test_names:
self.assertTrue(test_name in resp)
test_cases = map(lambda (test_case, test_method): test_case, resp)
# ({'CacheTestCase': 3, 'TabUITestCase': 2, 'PasswordsTestCase': 1,
# 'KeyboardTestCase': 1, 'ToolBarTestCase': 3})
counts = collections.Counter(test_cases).most_common()
name, _ = counts[0]
self.assertEqual(name, 'ToolBarTestCase')
def test_fetch_test_counts_release(self):
"""Ensures that the release output is formatted correctly"""
resp = shard_util.fetch_test_names_for_release(RELEASE_APP_OTOOL_OUTPUT)
self.assertEqual(len(resp), 10)
expected_test_names = [
('CacheTestCase', 'testA'),
('CacheTestCase', 'testB'),
('CacheTestCase', 'testc'),
('KeyboardTest', 'testD'),
('KeyboardTest', 'testE'),
('KeyboardTest', 'testF'),
('ToolBarTestCase', 'testG'),
('ToolBarTestCase', 'testH'),
('ToolBarTestCase', 'DISABLED_testI'),
('ToolBarTestCase', 'FLAKY_testJ'),
]
for test_name in expected_test_names:
self.assertTrue(test_name in resp)
test_cases = map(lambda (test_case, test_method): test_case, resp)
# ({'KeyboardTest': 3, 'CacheTestCase': 3,
# 'ToolBarTestCase': 4})
counts = collections.Counter(test_cases).most_common()
name, _ = counts[0]
self.assertEqual(name, 'ToolBarTestCase')
def test_fetch_test_error_release(self):
"""Ensures that unexpected release output raises error."""
with self.assertRaises(shard_util.ShardingError) as context:
shard_util.fetch_test_names_for_release(
RELEASE_APP_OTOOL_OUTPUT_CLASS_NOT_IN_PAIRS)
expected_message = (
'Incorrect otool output in which a test class name doesn\'t appear in '
'group of 2. Test class: KeyboardTest')
self.assertTrue(expected_message in str(context.exception))
def test_fetch_test_names_debug_114(self):
"""Test the debug output from otool in Xcode 11.4"""
resp = shard_util.fetch_test_names_for_debug(DEBUG_APP_OTOOL_OUTPUT_114)
self.assertEqual(len(resp), 10)
expected_test_names = [
('CacheTestCase', 'testA'),
('CacheTestCase', 'testB'),
('CacheTestCase', 'testc'),
('TabUITestCase', 'testD'),
('TabUITestCase', 'testE'),
('KeyboardTestCase', 'testF'),
('PasswordsTestCase', 'testG'),
('ToolBarTestCase', 'testH'),
('ToolBarTestCase', 'DISABLED_testI'),
('ToolBarTestCase', 'FLAKY_testJ'),
]
for test_name in expected_test_names:
self.assertTrue(test_name in resp)
test_cases = map(lambda (test_case, test_method): test_case, resp)
# ({'CacheTestCase': 3, 'TabUITestCase': 2, 'PasswordsTestCase': 1,
# 'KeyboardTestCase': 1, 'ToolBarTestCase': 3})
counts = collections.Counter(test_cases).most_common()
name, _ = counts[0]
self.assertEqual(name, 'ToolBarTestCase')
def test_fetch_test_counts_release_114(self):
"""Test the release output from otool in Xcode 11.4"""
resp = shard_util.fetch_test_names_for_release(RELEASE_APP_OTOOL_OUTPUT_114)
self.assertEqual(len(resp), 10)
expected_test_names = [
('CacheTestCase', 'testA'),
('CacheTestCase', 'testB'),
('CacheTestCase', 'testc'),
('KeyboardTest', 'testD'),
('KeyboardTest', 'testE'),
('KeyboardTest', 'testF'),
('ToolBarTestCase', 'testG'),
('ToolBarTestCase', 'testH'),
('ToolBarTestCase', 'DISABLED_testI'),
('ToolBarTestCase', 'FLAKY_testJ'),
]
for test_name in expected_test_names:
self.assertTrue(test_name in resp)
test_cases = map(lambda (test_case, test_method): test_case, resp)
# ({'KeyboardTest': 3, 'CacheTestCase': 3,
# 'ToolBarTestCase': 4})
counts = collections.Counter(test_cases).most_common()
name, _ = counts[0]
self.assertEqual(name, 'ToolBarTestCase')
def test_balance_into_sublists_debug(self):
"""Ensure the balancing algorithm works"""
resp = shard_util.fetch_test_names_for_debug(DEBUG_APP_OTOOL_OUTPUT)
test_cases = map(lambda (test_case, test_method): test_case, resp)
test_counts = collections.Counter(test_cases)
sublists_1 = shard_util.balance_into_sublists(test_counts, 1)
self.assertEqual(len(sublists_1), 1)
self.assertEqual(len(sublists_1[0]), 5)
sublists_3 = shard_util.balance_into_sublists(test_counts, 3)
self.assertEqual(len(sublists_3), 3)
# CacheTestCase has 3,
# TabUITestCase has 2, ToolBarTestCase has 4
# PasswordsTestCase has 1, KeyboardTestCase has 1
# They will be balanced into:
# [[ToolBarTestCase], [CacheTestCase, PasswordsTestCase],
# [TabUITestCase, KeyboardTestCase]]
self.assertEqual(
sorted([len(sublists_3[0]),
len(sublists_3[1]),
len(sublists_3[2])]), [1, 2, 2])
def test_balance_into_sublists_release(self):
"""Ensure the balancing algorithm works"""
resp = shard_util.fetch_test_names_for_release(RELEASE_APP_OTOOL_OUTPUT)
test_cases = map(lambda (test_case, test_method): test_case, resp)
test_counts = collections.Counter(test_cases)
sublists_3 = shard_util.balance_into_sublists(test_counts, 3)
self.assertEqual(len(sublists_3), 3)
# KeyboardTest has 3
# CacheTestCase has 3
# ToolbarTest Case has 4
# They will be balanced as one in each shard.
self.assertEqual(len(sublists_3[0]), 1)
self.assertEqual(len(sublists_3[1]), 1)
self.assertEqual(len(sublists_3[2]), 1)
if __name__ == '__main__':
unittest.main()
| 44.272446 | 80 | 0.685315 | 1,580 | 14,300 | 6.001266 | 0.120253 | 0.075933 | 0.020671 | 0.035963 | 0.893061 | 0.882514 | 0.864375 | 0.860367 | 0.857203 | 0.84613 | 0 | 0.111498 | 0.195944 | 14,300 | 322 | 81 | 44.409938 | 0.713168 | 0.068392 | 0 | 0.709804 | 0 | 0 | 0.490386 | 0.025353 | 0 | 0 | 0.1118 | 0 | 0.098039 | 0 | null | null | 0.023529 | 0.019608 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
c4862f6aa6a5480b8333bc612d5bd66a51f5967b | 70 | py | Python | tests/test_zsl_jwt.py | AtteqCom/zsl_jwt | e1a81b756805b8d4d1ce371a562bc4c003d88df9 | [
"BSD-2-Clause"
] | null | null | null | tests/test_zsl_jwt.py | AtteqCom/zsl_jwt | e1a81b756805b8d4d1ce371a562bc4c003d88df9 | [
"BSD-2-Clause"
] | null | null | null | tests/test_zsl_jwt.py | AtteqCom/zsl_jwt | e1a81b756805b8d4d1ce371a562bc4c003d88df9 | [
"BSD-2-Clause"
] | null | null | null | def test_assert_import_works():
import zsl_jwt
assert zsl_jwt
| 17.5 | 31 | 0.757143 | 11 | 70 | 4.363636 | 0.636364 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.2 | 70 | 3 | 32 | 23.333333 | 0.857143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.666667 | 1 | 0.333333 | true | 0 | 0.666667 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
676148c34d9a0b53c6a54011665bfea54a75dfd9 | 147 | py | Python | degvabank/degvabank/apps/account/utils.py | Vixx-X/DEGVABanck-backend | de413d55b55dba25e89b7f3bc60dfa94e89ddcde | [
"MIT"
] | null | null | null | degvabank/degvabank/apps/account/utils.py | Vixx-X/DEGVABanck-backend | de413d55b55dba25e89b7f3bc60dfa94e89ddcde | [
"MIT"
] | null | null | null | degvabank/degvabank/apps/account/utils.py | Vixx-X/DEGVABanck-backend | de413d55b55dba25e89b7f3bc60dfa94e89ddcde | [
"MIT"
] | 1 | 2022-02-03T03:18:43.000Z | 2022-02-03T03:18:43.000Z | def is_account(account_number):
return len(account_number) == 20
def is_valid_account(account_number):
return is_account(account_number)
| 21 | 37 | 0.782313 | 21 | 147 | 5.095238 | 0.380952 | 0.485981 | 0.560748 | 0.411215 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.015748 | 0.136054 | 147 | 6 | 38 | 24.5 | 0.826772 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 8 |
677dbab71ab5e74540f99a6251578ac505c516e5 | 3,141 | py | Python | tests/test_q0204.py | mirzadm/ctci-5th-py | ba2f4de0aba4c7c04d7e0ddf3120ce312d9e5d66 | [
"MIT"
] | null | null | null | tests/test_q0204.py | mirzadm/ctci-5th-py | ba2f4de0aba4c7c04d7e0ddf3120ce312d9e5d66 | [
"MIT"
] | 1 | 2018-07-04T23:10:20.000Z | 2018-07-04T23:10:20.000Z | tests/test_q0204.py | mirzadm/ctci-5th-py | ba2f4de0aba4c7c04d7e0ddf3120ce312d9e5d66 | [
"MIT"
] | null | null | null | """Unit tests for q0204.py."""
import unittest
from src.utils.linkedlist import LinkedList
from src.q0204 import partition_linkedlist_around_value as partition
class TestPartitionLinkedList(unittest.TestCase):
"""Tests for partition_linkedlist_around_value."""
def test_partition_linkedlist(self):
self.assertIsNone(partition(None, 1))
linked_list = LinkedList()
self.assertEqual(linked_list.convert_to_list(), [])
p_linked_list = partition(linked_list, 1)
self.assertIs(p_linked_list, linked_list)
self.assertEqual(p_linked_list.convert_to_list(), [])
linked_list = LinkedList()
linked_list.insert_at_head(3)
self.assertEqual(linked_list.convert_to_list(), [3])
p_linked_list = partition(linked_list, 1)
self.assertIs(p_linked_list, linked_list)
self.assertEqual(p_linked_list.convert_to_list(), [3])
linked_list = LinkedList()
linked_list.insert_at_head(3)
self.assertEqual(linked_list.convert_to_list(), [3])
p_linked_list = partition(linked_list, 3)
self.assertIs(p_linked_list, linked_list)
self.assertEqual(p_linked_list.convert_to_list(), [3])
linked_list = LinkedList()
linked_list.insert_at_head(3)
self.assertEqual(linked_list.convert_to_list(), [3])
p_linked_list = partition(linked_list, 5)
self.assertIs(p_linked_list, linked_list)
self.assertEqual(p_linked_list.convert_to_list(), [3])
linked_list = LinkedList()
linked_list.insert_at_head(30)
linked_list.insert_at_head(20)
linked_list.insert_at_head(10)
self.assertEqual(linked_list.convert_to_list(), [10, 20, 30])
p_linked_list = partition(linked_list, 10)
self.assertIs(p_linked_list, linked_list)
self.assertEqual(p_linked_list.convert_to_list(), [30, 10, 20])
linked_list = LinkedList()
linked_list.insert_at_head(30)
linked_list.insert_at_head(20)
linked_list.insert_at_head(10)
self.assertEqual(linked_list.convert_to_list(), [10, 20, 30])
p_linked_list = partition(linked_list, 20)
self.assertIs(p_linked_list, linked_list)
self.assertEqual(p_linked_list.convert_to_list(), [10, 30, 20])
linked_list = LinkedList()
linked_list.insert_at_head(30)
linked_list.insert_at_head(20)
linked_list.insert_at_head(10)
self.assertEqual(linked_list.convert_to_list(), [10, 20, 30])
p_linked_list = partition(linked_list, 30)
self.assertIs(p_linked_list, linked_list)
self.assertEqual(p_linked_list.convert_to_list(), [10, 20, 30])
linked_list = LinkedList()
linked_list.insert_at_head(30)
linked_list.insert_at_head(20)
linked_list.insert_at_head(10)
self.assertEqual(linked_list.convert_to_list(), [10, 20, 30])
p_linked_list = partition(linked_list, 40)
self.assertIs(p_linked_list, linked_list)
self.assertEqual(p_linked_list.convert_to_list(), [10, 20, 30])
if __name__ == '__main__':
unittest.main()
| 39.2625 | 71 | 0.690863 | 424 | 3,141 | 4.709906 | 0.101415 | 0.355533 | 0.132198 | 0.152228 | 0.834752 | 0.834752 | 0.834752 | 0.815724 | 0.815724 | 0.815724 | 0 | 0.040767 | 0.203438 | 3,141 | 79 | 72 | 39.759494 | 0.757394 | 0.021968 | 0 | 0.714286 | 0 | 0 | 0.002614 | 0 | 0 | 0 | 0 | 0 | 0.396825 | 1 | 0.015873 | false | 0 | 0.047619 | 0 | 0.079365 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
678d96468f1fe15679d4638dedc96eeac87e787c | 14,295 | py | Python | client_test_cases/xbridge_client_canceltx_test.py | Palem1988/blocknetdx-utils | 2d91f3a2b416ff49949cd105de733b4585e3c8b0 | [
"MIT"
] | null | null | null | client_test_cases/xbridge_client_canceltx_test.py | Palem1988/blocknetdx-utils | 2d91f3a2b416ff49949cd105de733b4585e3c8b0 | [
"MIT"
] | null | null | null | client_test_cases/xbridge_client_canceltx_test.py | Palem1988/blocknetdx-utils | 2d91f3a2b416ff49949cd105de733b4585e3c8b0 | [
"MIT"
] | 1 | 2019-10-02T23:40:17.000Z | 2019-10-02T23:40:17.000Z | import unittest
import time
import sys
from interface import xbridge_client
from utils import xbridge_utils
from strgen import StringGenerator
"""
- Here, the length of the garbage data is very high and increased.
The "j" parameter in the "generate_garbage_input" function is the length of the garbage input we want.
"""
def test_cancel_load_v1(nb_of_runs):
time_distribution = []
total_elapsed_seconds = 0
for j in range(10000, 10000+nb_of_runs):
garbage_input_str = xbridge_utils.generate_garbage_input(j)
ts = time.time()
assert type(xbridge_client.CHECK_CANCEL_TX(garbage_input_str)) == dict
te = time.time()
total_elapsed_seconds += te - ts
json_str = {"time": te - ts, "char_nb": len(garbage_input_str), "API": "dxCancel"}
time_distribution.append(json_str)
xbridge_utils.export_data("test_cancel_load_v1.xlsx", time_distribution)
"""
- Here, the length of garbage parameters is random.
"""
def test_cancel_load_v2(nb_of_runs):
time_distribution = []
total_elapsed_seconds = 0
for i in range(1, nb_of_runs):
garbage_input_str = xbridge_utils.generate_garbage_input(xbridge_utils.generate_random_number(1, 10000))
ts = time.time()
assert type(xbridge_client.CHECK_CANCEL_TX(garbage_input_str)) == dict
te = time.time()
total_elapsed_seconds += te - ts
json_str = {"time": te - ts, "char_nb": len(garbage_input_str), "API": "dxCancel"}
time_distribution.append(json_str)
xbridge_utils.export_data("test_cancel_load_v2.xlsx", time_distribution)
"""
- Here, The length of the random parameter is kept fixed, we just increase the number of iterations ==> Pure load test, when resources are available.
"""
def test_cancel_load_v3(nb_of_runs):
time_distribution = []
total_elapsed_seconds = 0
for i in range(1, nb_of_runs):
garbage_input_str = xbridge_utils.generate_garbage_input(64)
ts = time.time()
assert type(xbridge_client.CHECK_CANCEL_TX(garbage_input_str)) == dict
te = time.time()
total_elapsed_seconds += te - ts
json_str = {"time": te - ts, "char_nb": len(garbage_input_str), "API": "dxCancel"}
time_distribution.append(json_str)
xbridge_utils.export_data("test_cancel_load_v3.xlsx", time_distribution)
""" *** UNIT TESTS ***
- Assertions currently take the following form ==> self.assertIsInstance(..., dict).
More precise assertions may have to be written, when we have real data.
- We test many combinations. But additional scenarios may have to be added.
"""
class cancel_Tx_UnitTest(unittest.TestCase):
"""
- Basic tests
"""
def test_invalid_cancel_1(self):
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(" "), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(""), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX("[]"), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX("[[]]"), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX("{{}}"), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX("{[]}"), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX("[{[]}]"), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX("''"), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX("'"), dict)
"""
- We test various random inputs from individual character classes.
- We then combine those character classes.
- Size of the input parameter is fixed.
"""
def test_invalid_cancel_2(self):
# We pick from a single class at a time
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\a]{64}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p]{64}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\d]{64}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\W]{64}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\w]{64}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\h]{64}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\s]{64}').render()), dict)
# We pick from combinations of 2 classes
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p\d]{64}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p\a]{64}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p\h]{64}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\s\d]{64}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\s\a]{64}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\s\h]{64}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\s\p]{64}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\W\d]{64}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\W\a]{64}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\W\h]{64}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\W\p]{64}').render()), dict)
# We pick from combinations of 3 classes
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\a\d\W]{64}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\a\d\h]{64}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\a\d\s]{64}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\a\d\p]{64}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\a\d\W]{64}').render()), dict)
# We pick from combinations of 4 classes
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p\d\w\s]{64}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p\d\a\h]{64}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p\d\a\s]{64}').render()), dict)
# We pick from combinations of 5 classes
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p\d\W\w\h\a]{64}').render()), dict)
"""
- Same as before, but now the random strings are of random but always very high size [9 000-11 000]
"""
def test_invalid_cancel_3(self):
# We pick from a single class at a time
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\a]{9000:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p]{9000:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\d]{9000:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\W]{9000:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\w]{9000:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\h]{9000:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\s]{9000:11000}').render()), dict)
# We pick from combinations of 2 classes
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p\d]{9000:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p\a]{9000:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p\h]{9000:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\s\d]{9000:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\s\a]{9000:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\s\h]{9000:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\s\p]{9000:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\W\d]{9000:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\W\a]{9000:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\W\h]{9000:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\W\p]{9000:11000}').render()), dict)
# We pick from combinations of 3 classes
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\a\d\W]{9000:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\a\d\h]{9000:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\a\d\s]{9000:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\a\d\p]{9000:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\a\d\W]{9000:11000}').render()), dict)
# We pick from combinations of 4 classes
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p\d\w\s]{9000:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p\d\a\h]{9000:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p\d\a\s]{9000:11000}').render()), dict)
# We pick from combinations of 5 classes
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p\d\W\w\h\a]{9000:11000}').render()), dict)
"""
- Same as before, but now the random input parameters are of random length [1-11 000]
"""
def test_invalid_cancel_4(self):
# We pick from a single class at a time
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\a]{1:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p]{1:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\d]{1:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\W]{1:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\w]{1:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\h]{1:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\s]{1:11000}').render()), dict)
# We pick from combinations of 2 classes
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p\d]{1:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p\a]{1:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p\h]{1:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\s\d]{1:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\s\a]{1:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\s\h]{1:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\s\p]{1:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\W\d]{1:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\W\a]{1:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\W\h]{1:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\W\p]{1:11000}').render()), dict)
# We pick from combinations of 3 classes
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\a\d\W]{1:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\a\d\h]{1:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\a\d\s]{1:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\a\d\p]{1:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\a\d\W]{1:11000}').render()), dict)
# We pick from combinations of 4 classes
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p\d\w\s]{1:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p\d\a\h]{1:11000}').render()), dict)
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p\d\a\s]{1:11000}').render()), dict)
# We pick from combinations of 5 classes
self.assertIsInstance(xbridge_client.CHECK_CANCEL_TX(StringGenerator('[\p\d\W\w\h\a]{1:11000}').render()), dict)
def repeat_cancel_tx_unit_tests(nb_of_runs):
for i in (1, nb_of_runs):
wasSuccessful = unittest.main(exit=False).result.wasSuccessful()
if not wasSuccessful:
sys.exit(1)
| 67.748815 | 154 | 0.699265 | 1,804 | 14,295 | 5.318736 | 0.08204 | 0.079208 | 0.174466 | 0.232621 | 0.887233 | 0.879521 | 0.87431 | 0.867014 | 0.866806 | 0.858155 | 0 | 0.042891 | 0.150262 | 14,295 | 210 | 155 | 68.071429 | 0.747016 | 0.041693 | 0 | 0.239437 | 0 | 0 | 0.111246 | 0.015174 | 0 | 0 | 0 | 0 | 0.65493 | 1 | 0.056338 | false | 0 | 0.042254 | 0 | 0.105634 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
67b7940032b2a83ac5a72e009a65c3429b47acfe | 65,786 | py | Python | cli/tests/test_parser/test_parser.py | polyaxon/cli | 3543c0220a8a7c06fc9573cd2a740f8ae4930641 | [
"Apache-2.0"
] | null | null | null | cli/tests/test_parser/test_parser.py | polyaxon/cli | 3543c0220a8a7c06fc9573cd2a740f8ae4930641 | [
"Apache-2.0"
] | 1 | 2022-01-24T11:26:47.000Z | 2022-03-18T23:17:58.000Z | cli/tests/test_parser/test_parser.py | polyaxon/cli | 3543c0220a8a7c06fc9573cd2a740f8ae4930641 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
#
# Copyright 2018-2022 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import pytest
import uuid
from dateutil.tz import tzutc
from polyaxon.exceptions import PolyaxonSchemaError
from polyaxon.parser import parser
from polyaxon.parser.constants import NO_VALUE_FOUND
from polyaxon.schemas.types import (
V1ArtifactsType,
V1AuthType,
V1DockerfileType,
V1FileType,
V1GcsType,
V1GitType,
V1S3Type,
V1TensorboardType,
V1UriType,
V1WasbType,
)
from polyaxon.utils.test_utils import BaseTestCase
@pytest.mark.parser_mark
class TestParser(BaseTestCase):
def test_get_boolean(self):
value = parser.get_boolean(key="bool_key_1", value="1")
self.assertEqual(value, True)
value = parser.get_boolean(key="bool_key_2", value="true")
self.assertEqual(value, True)
value = parser.get_boolean(key="bool_key_2", value=True)
self.assertEqual(value, True)
value = parser.get_boolean(key="bool_key_3", value="0")
self.assertEqual(value, False)
value = parser.get_boolean(key="bool_key_4", value="false")
self.assertEqual(value, False)
value = parser.get_boolean(key="bool_key_4", value=False)
self.assertEqual(value, False)
value = parser.get_boolean(
key="bool_list_key_1",
value=[False, "false", True, "true", "1", "0"],
is_list=True,
)
self.assertEqual(value, [False, False, True, True, True, False])
with self.assertRaises(PolyaxonSchemaError):
parser.get_boolean(key="bool_error_key_1", value="null")
with self.assertRaises(PolyaxonSchemaError):
parser.get_boolean(key="bool_error_key_1", value=None)
with self.assertRaises(PolyaxonSchemaError):
parser.get_boolean(key="bool_error_key_2", value="foo")
with self.assertRaises(PolyaxonSchemaError):
parser.get_boolean(key="bool_error_key_3", value=0)
with self.assertRaises(PolyaxonSchemaError):
parser.get_boolean(key="bool_error_key_4", value=1)
with self.assertRaises(PolyaxonSchemaError):
parser.get_boolean(key="bool_error_key_5", value="")
with self.assertRaises(PolyaxonSchemaError):
parser.get_boolean(
key="bool_list_key_1", value=[False, "false", True, "true", "1", "0"]
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_boolean(
key="bool_list_error_key_2", value=[False, 1, 0], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_boolean(
key="bool_list_error_key_1",
value=[False, "false", True, "true", "1", "0", "foo"],
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_boolean(key="bool_key_1", value="1", is_list=True)
with self.assertRaises(PolyaxonSchemaError):
parser.get_boolean(key="bool_key_2", value=True, is_list=True)
with self.assertRaises(PolyaxonSchemaError):
parser.get_boolean(key="bool_non_existing_key", value=None)
with self.assertRaises(PolyaxonSchemaError):
parser.get_boolean(key="bool_non_existing_key", value=NO_VALUE_FOUND)
with self.assertRaises(PolyaxonSchemaError):
parser.get_boolean(key="bool_non_existing_key", value=None, is_list=True)
self.assertEqual(
parser.get_boolean(
key="bool_non_existing_key", value=None, is_optional=True
),
None,
)
self.assertEqual(
parser.get_boolean(
key="bool_non_existing_key", value=None, is_optional=True, default=True
),
True,
)
self.assertEqual(
parser.get_boolean(
key="bool_non_existing_key", value=None, is_list=True, is_optional=True
),
None,
)
self.assertEqual(
parser.get_boolean(
key="bool_non_existing_key",
value=None,
is_list=True,
is_optional=True,
default=[True, False],
),
[True, False],
)
def test_get_int(self):
value = parser.get_int(key="int_key_1", value=123)
self.assertEqual(value, 123)
value = parser.get_int(key="int_key_2", value="123")
self.assertEqual(value, 123)
value = parser.get_int(key="float_key_1", value=12.0)
self.assertEqual(value, 12)
value = parser.get_int(key="float_key_1", value="12.")
self.assertEqual(value, 12)
value = parser.get_int(key="float_key_1", value=12.0)
self.assertEqual(value, 12)
value = parser.get_int(key="float_key_1", value="12.0")
self.assertEqual(value, 12)
value = parser.get_int(
key="int_list_key_1", value=["123", 124, 125, "125"], is_list=True
)
self.assertEqual(value, [123, 124, 125, 125])
value = parser.get_int(
key="int_list_key_1", value='["123", 124, 125, "125"]', is_list=True
)
self.assertEqual(value, [123, 124, 125, 125])
with self.assertRaises(PolyaxonSchemaError):
parser.get_int(key="int_error_key_1", value="null")
with self.assertRaises(PolyaxonSchemaError):
parser.get_int(key="int_error_key_1", value=None)
with self.assertRaises(PolyaxonSchemaError):
parser.get_int(key="int_error_key_2", value="")
with self.assertRaises(PolyaxonSchemaError):
parser.get_int(key="float_error_key_3", value=12.1)
with self.assertRaises(PolyaxonSchemaError):
parser.get_int(key="float_error_key_3", value="12.1")
with self.assertRaises(PolyaxonSchemaError):
parser.get_int(key="int_error_key_3", value="foo")
with self.assertRaises(PolyaxonSchemaError):
parser.get_int(key="int_list_key_1", value=["123", 124, 125, "125"])
with self.assertRaises(PolyaxonSchemaError):
parser.get_int(
key="int_list_error_key_1",
value=["123", 124, 125, "125", None],
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_int(
key="int_list_error_key_2",
value=["123", 1.24, 125, "125"],
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_int(
key="int_list_error_key_3",
value=["123", 1.24, 125, "foo"],
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_int(key="int_key_1", value=125, is_list=True)
with self.assertRaises(PolyaxonSchemaError):
parser.get_int(key="int_key_2", value="125", is_list=True)
with self.assertRaises(PolyaxonSchemaError):
parser.get_int(key="int_non_existing_key", value=None)
with self.assertRaises(PolyaxonSchemaError):
parser.get_int(key="int_non_existing_key", value=NO_VALUE_FOUND)
self.assertEqual(
parser.get_int(key="int_non_existing_key", value=None, is_optional=True),
None,
)
self.assertEqual(
parser.get_int(
key="int_non_existing_key", value=None, is_optional=True, default=34
),
34,
)
self.assertEqual(
parser.get_int(
key="int_non_existing_key", value=None, is_list=True, is_optional=True
),
None,
)
self.assertEqual(
parser.get_int(
key="int_non_existing_key",
value=None,
is_list=True,
is_optional=True,
default=[34, 1],
),
[34, 1],
)
def test_get_float(self):
value = parser.get_float(key="float_key_1", value=1.23)
self.assertEqual(value, 1.23)
value = parser.get_float(key="float_key_2", value="1.23")
self.assertEqual(value, 1.23)
value = parser.get_float(key="float_key_3", value="123")
self.assertEqual(value, 123)
value = parser.get_float(
key="float_list_key_1", value=[1.23, 13.3, "4.4", "555", 66.0], is_list=True
)
self.assertEqual(value, [1.23, 13.3, 4.4, 555.0, 66.0])
value = parser.get_float(
key="float_list_key_1",
value='[1.23, 13.3, "4.4", "555", 66.0]',
is_list=True,
)
self.assertEqual(value, [1.23, 13.3, 4.4, 555.0, 66.0])
value = parser.get_float(key="float_from_int", value=123)
self.assertEqual(value, 123.0)
value = parser.get_float(
key="float_key_2", value=[1.23, 13.3, 66], is_list=True
)
self.assertEqual(value, [1.23, 13.3, 66])
with self.assertRaises(PolyaxonSchemaError):
parser.get_float(key="float_error_key_1", value=None)
with self.assertRaises(PolyaxonSchemaError):
parser.get_float(key="float_error_key_1", value="null")
with self.assertRaises(PolyaxonSchemaError):
parser.get_float(key="float_error_key_2", value="")
with self.assertRaises(PolyaxonSchemaError):
parser.get_float(key="float_error_key_3", value="foo")
with self.assertRaises(PolyaxonSchemaError):
parser.get_float(
key="float_list_key_1", value=[1.23, 13.3, "4.4", "555", 66.0]
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_float(key="float_list_error_key_1", value=None, is_list=True)
with self.assertRaises(PolyaxonSchemaError):
parser.get_float(key="float_list_error_key_2", value="", is_list=True)
with self.assertRaises(PolyaxonSchemaError):
parser.get_float(key="float_list_error_key_3", value="foo", is_list=True)
with self.assertRaises(PolyaxonSchemaError):
parser.get_float(key="float_key_1", value=213, is_list=True)
with self.assertRaises(PolyaxonSchemaError):
parser.get_float(key="float_non_existing_key", value=NO_VALUE_FOUND)
with self.assertRaises(PolyaxonSchemaError):
parser.get_float(key="float_non_existing_key", value=[1.23, 13.3, "foo"])
with self.assertRaises(PolyaxonSchemaError):
parser.get_float(
key="float_non_existing_key", value=[1.23, 13.3, None], is_list=True
)
self.assertEqual(
parser.get_float(
key="float_non_existing_key", value=None, is_optional=True
),
None,
)
self.assertEqual(
parser.get_float(
key="float_non_existing_key", value=None, is_optional=True, default=3.4
),
3.4,
)
self.assertEqual(
parser.get_float(
key="float_non_existing_key",
value="null",
is_list=True,
is_optional=True,
),
None,
)
self.assertEqual(
parser.get_float(
key="float_non_existing_key",
value=None,
is_list=True,
is_optional=True,
default=[3.4, 1.2],
),
[3.4, 1.2],
)
def test_get_string(self):
value = parser.get_string(key="string_key_1", value="123")
self.assertEqual(value, "123")
value = parser.get_string(key="string_key_2", value="1.23")
self.assertEqual(value, "1.23")
value = parser.get_string(key="string_key_3", value="foo")
self.assertEqual(value, "foo")
value = parser.get_string(key="string_key_4", value="")
self.assertEqual(value, "")
value = parser.get_string(
key="string_list_key_1", value=["123", "1.23", "foo", ""], is_list=True
)
self.assertEqual(value, ["123", "1.23", "foo", ""])
value = parser.get_string(
key="string_list_key_1", value='["123", "1.23", "foo", ""]', is_list=True
)
self.assertEqual(value, ["123", "1.23", "foo", ""])
with self.assertRaises(PolyaxonSchemaError):
parser.get_string(key="string_error_key_1", value=None)
assert parser.get_string(key="string_error_key_2", value=123) == "123"
with self.assertRaises(PolyaxonSchemaError):
parser.get_string(key="string_error_key_2", value=123, strong_type=True)
assert parser.get_string(key="string_error_key_3", value=1.23) == "1.23"
with self.assertRaises(PolyaxonSchemaError):
parser.get_string(key="string_error_key_3", value=1.23, strong_type=True)
assert parser.get_string(key="string_error_key_4", value=True) == "true"
with self.assertRaises(PolyaxonSchemaError):
parser.get_string(key="string_error_key_4", value=True, strong_type=True)
assert (
parser.get_string(key="string_list_key_1", value=["123", "1.23", "foo", ""])
== '["123", "1.23", "foo", ""]'
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_string(
key="string_list_key_1",
value=["123", "1.23", "foo", ""],
strong_type=True,
)
assert parser.get_string(
key="string_list_error_key_1", value=["123", 123], is_list=True
) == ["123", "123"]
with self.assertRaises(PolyaxonSchemaError):
parser.get_string(
key="string_list_error_key_1",
value=["123", 123],
is_list=True,
strong_type=True,
)
assert parser.get_string(
key="string_list_error_key_2",
value=["123", 12.3],
is_list=True,
) == ["123", "12.3"]
with self.assertRaises(PolyaxonSchemaError):
parser.get_string(
key="string_list_error_key_2",
value=["123", 12.3],
is_list=True,
strong_type=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_string(
key="string_list_error_key_3",
value=["123", None],
is_list=True,
strong_type=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_string(
key="string_list_error_key_4",
value=["123", False],
is_list=True,
strong_type=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_string(key="string_key_3", value="foo", is_list=True)
with self.assertRaises(PolyaxonSchemaError):
parser.get_string(key="string_key_4", value="", is_list=True)
with self.assertRaises(PolyaxonSchemaError):
parser.get_string(key="string_non_existing_key", value=None)
with self.assertRaises(PolyaxonSchemaError):
parser.get_string(key="string_non_existing_key", value=NO_VALUE_FOUND)
with self.assertRaises(PolyaxonSchemaError):
parser.get_string(key="string_non_existing_key", value=None, is_list=True)
self.assertEqual(
parser.get_string(
key="string_non_existing_key", value=None, is_optional=True
),
None,
)
self.assertEqual(
parser.get_string(
key="string_non_existing_key",
value=None,
is_optional=True,
default="foo",
),
"foo",
)
self.assertEqual(
parser.get_string(
key="string_non_existing_key",
value=None,
is_list=True,
is_optional=True,
),
None,
)
self.assertEqual(
parser.get_string(
key="string_non_existing_key",
value=None,
is_list=True,
is_optional=True,
default=["foo", "bar"],
),
["foo", "bar"],
)
def test_get_dict(self):
value = parser.get_dict(
key="dict_key_1",
value={"key1": "foo", "key2": 2, "key3": False, "key4": "1"},
)
self.assertEqual(value, {"key1": "foo", "key2": 2, "key3": False, "key4": "1"})
value = parser.get_dict(
key="dict_key_1",
value='{"key1": "foo", "key2": 2, "key3": false, "key4": "1"}',
)
self.assertEqual(value, {"key1": "foo", "key2": 2, "key3": False, "key4": "1"})
value = parser.get_dict(
key="dict_key_1",
value='{"key1": "foo", "key2": 2, "key3": false, "key4": "1"}',
)
self.assertEqual(value, {"key1": "foo", "key2": 2, "key3": False, "key4": "1"})
value = parser.get_dict(
key="dict_list_key_1",
value=[
{"key1": "foo", "key2": 2, "key3": False, "key4": "1"},
{"key3": True, "key4": "2"},
{"key1": False, "key2": "3"},
],
is_list=True,
)
self.assertEqual(
value,
[
{"key1": "foo", "key2": 2, "key3": False, "key4": "1"},
{"key3": True, "key4": "2"},
{"key1": False, "key2": "3"},
],
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dict(key="dict_error_key_1", value="foo")
with self.assertRaises(PolyaxonSchemaError):
parser.get_dict(key="dict_error_key_2", value=1)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dict(key="dict_error_key_3", value=False)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dict(key="dict_error_key_4", value=["1", "foo"])
with self.assertRaises(PolyaxonSchemaError):
parser.get_dict(key="dict_list_key_1", value=["123", {"key3": True}])
with self.assertRaises(PolyaxonSchemaError):
parser.get_dict(
key="dict_list_error_key_1", value=["123", {"key3": True}], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dict(
key="dict_list_error_key_2", value=[{"key3": True}, 12.3], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dict(
key="dict_list_error_key_3", value=[{"key3": True}, None], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dict(
key="dict_list_error_key_4",
value=[{"key3": True}, "123", False],
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dict(
key="dict_key_1",
value={"key1": "foo", "key2": 2, "key3": False, "key4": "1"},
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dict(key="dict_non_existing_key", value=None)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dict(key="dict_non_existing_key", value=NO_VALUE_FOUND)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dict(key="dict_non_existing_key", value=None, is_list=True)
self.assertEqual(
parser.get_dict(key="dict_non_existing_key", value=None, is_optional=True),
None,
)
self.assertEqual(
parser.get_dict(
key="dict_non_existing_key",
value=None,
is_optional=True,
default={"foo": "bar"},
),
{"foo": "bar"},
)
self.assertEqual(
parser.get_dict(
key="dict_non_existing_key", value=None, is_list=True, is_optional=True
),
None,
)
self.assertEqual(
parser.get_dict(
key="dict_non_existing_key",
value=None,
is_list=True,
is_optional=True,
default=[{"foo": "bar"}, {"foo": "boo"}],
),
[{"foo": "bar"}, {"foo": "boo"}],
)
def test_get_uri(self):
value = parser.get_uri(key="uri_key_1", value="user:pass@siteweb.ca")
self.assertEqual(value, V1UriType("user", "pass", "siteweb.ca"))
value = parser.get_uri(key="uri_key_2", value="user2:pass@localhost:8080")
self.assertEqual(value, V1UriType("user2", "pass", "localhost:8080"))
value = parser.get_uri(key="uri_key_3", value="user2:pass@https://quay.io")
self.assertEqual(value, V1UriType("user2", "pass", "https://quay.io"))
value = parser.get_uri(
key="uri_list_key_1",
value=[
"user:pass@siteweb.ca",
"user2:pass@localhost:8080",
"user2:pass@https://quay.io",
],
is_list=True,
)
self.assertEqual(
value,
[
V1UriType("user", "pass", "siteweb.ca"),
V1UriType("user2", "pass", "localhost:8080"),
V1UriType("user2", "pass", "https://quay.io"),
],
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_uri(key="uri_error_key_1", value="foo")
with self.assertRaises(PolyaxonSchemaError):
parser.get_uri(key="uri_error_key_2", value=1)
with self.assertRaises(PolyaxonSchemaError):
parser.get_uri(key="uri_error_key_3", value=False)
with self.assertRaises(PolyaxonSchemaError):
parser.get_uri(key="uri_error_key_4", value=["1", "foo"])
with self.assertRaises(PolyaxonSchemaError):
parser.get_uri(
key="uri_list_key_1",
value=[
"user:pass@siteweb.ca",
"user2:pass@localhost:8080",
"user2:pass@https://quay.io",
],
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_uri(
key="uri_list_error_key_1",
value=["123", "user:pass@siteweb.ca"],
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_uri(
key="uri_list_error_key_2",
value=["user:pass@siteweb.ca", 12.3],
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_uri(
key="uri_list_error_key_3",
value=["user:pass@siteweb.ca", None],
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_uri(
key="uri_list_error_key_4",
value=["user:pass@siteweb.ca", "123", False],
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_uri(key="uri_key_1", value="user:pass@siteweb.ca", is_list=True)
with self.assertRaises(PolyaxonSchemaError):
parser.get_uri(key="uri_non_existing_key", value=None)
with self.assertRaises(PolyaxonSchemaError):
parser.get_uri(key="uri_non_existing_key", value=NO_VALUE_FOUND)
with self.assertRaises(PolyaxonSchemaError):
parser.get_uri(key="uri_non_existing_key", value=None, is_list=True)
self.assertEqual(
parser.get_uri(key="uri_non_existing_key", value=None, is_optional=True),
None,
)
self.assertEqual(
parser.get_uri(
key="uri_non_existing_key",
value=None,
is_optional=True,
default=V1UriType("user2", "pass", "localhost:8080"),
),
V1UriType("user2", "pass", "localhost:8080"),
)
self.assertEqual(
parser.get_uri(
key="uri_non_existing_key", value=None, is_list=True, is_optional=True
),
None,
)
self.assertEqual(
parser.get_uri(
key="uri_non_existing_key",
value=None,
is_list=True,
is_optional=True,
default=[
V1UriType("user", "pass", "siteweb.ca"),
V1UriType("user2", "pass", "localhost:8080"),
],
),
[
V1UriType("user", "pass", "siteweb.ca"),
V1UriType("user2", "pass", "localhost:8080"),
],
)
def test_get_auth(self):
value = parser.get_auth(
key="auth_key_1", value={"user": "user", "password": "pass"}
)
self.assertEqual(value, V1AuthType("user", "pass"))
value = parser.get_auth(key="auth_key_1", value=V1AuthType("user", "pass"))
self.assertEqual(value, V1AuthType("user", "pass"))
value = parser.get_auth(key="auth_key_1", value="user:pass")
self.assertEqual(value, V1AuthType("user", "pass"))
value = parser.get_auth(
key="auth_list_key_1", value=["user:pass", "user2:pass"], is_list=True
)
self.assertEqual(
value, [V1AuthType("user", "pass"), V1AuthType("user2", "pass")]
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_auth(key="auth_error_key_1", value="foo")
with self.assertRaises(PolyaxonSchemaError):
parser.get_auth(key="auth_error_key_2", value=1)
with self.assertRaises(PolyaxonSchemaError):
parser.get_auth(key="auth_error_key_3", value=False)
with self.assertRaises(PolyaxonSchemaError):
parser.get_auth(key="auth_error_key_4", value=["1", "foo"])
with self.assertRaises(PolyaxonSchemaError):
parser.get_auth(key="auth_list_key_1", value=["user:pass", "user2:pass"])
with self.assertRaises(PolyaxonSchemaError):
parser.get_auth(
key="auth_list_error_key_1", value=["123", "user:pass"], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_auth(
key="auth_list_error_key_2", value=["user:pass", 12.3], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_auth(
key="auth_list_error_key_3", value=["user:pass", None], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_auth(
key="auth_list_error_key_4",
value=["user:pass", "123", False],
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_auth(key="auth_key_1", value="user:pass", is_list=True)
with self.assertRaises(PolyaxonSchemaError):
parser.get_auth(key="auth_non_existing_key", value=None)
with self.assertRaises(PolyaxonSchemaError):
parser.get_auth(key="auth_non_existing_key", value=NO_VALUE_FOUND)
with self.assertRaises(PolyaxonSchemaError):
parser.get_auth(key="auth_non_existing_key", value=None, is_list=True)
self.assertEqual(
parser.get_auth(key="auth_non_existing_key", value=None, is_optional=True),
None,
)
self.assertEqual(
parser.get_auth(
key="auth_non_existing_key",
value=None,
is_optional=True,
default=V1AuthType("user2", "pass"),
),
V1AuthType("user2", "pass"),
)
self.assertEqual(
parser.get_auth(
key="auth_non_existing_key", value=None, is_list=True, is_optional=True
),
None,
)
self.assertEqual(
parser.get_auth(
key="auth_non_existing_key",
value=None,
is_list=True,
is_optional=True,
default=[V1AuthType("user", "pass"), V1AuthType("user2", "pass")],
),
[V1AuthType("user", "pass"), V1AuthType("user2", "pass")],
)
def test_get_list(self):
value = parser.get_list(
key="list_key_1", value="user:pass@siteweb.ca, 'pp', 0.1, 'foo'"
)
self.assertEqual(value, ["user:pass@siteweb.ca", "'pp'", "0.1", "'foo'"])
value = parser.get_list(
key="list_key_2", value="user1,user2 , user3, user4 , user5"
)
self.assertEqual(value, ["user1", "user2", "user3", "user4", "user5"])
value = parser.get_list(key="list_key_3", value=[False])
self.assertEqual(value, [False])
value = parser.get_list(key="list_key_3", value=["false"])
self.assertEqual(value, ["false"])
value = parser.get_list(key="list_key_4", value="foo")
self.assertEqual(value, ["foo"])
value = parser.get_list(key="list_key_5", value="")
self.assertEqual(value, [])
value = parser.get_list(key="list_error_key_3", value="null")
self.assertEqual(value, ["null"])
with self.assertRaises(PolyaxonSchemaError):
parser.get_list(key="list_error_key_1", value=True)
with self.assertRaises(PolyaxonSchemaError):
parser.get_list(key="list_error_key_2", value={"key": "value"})
with self.assertRaises(PolyaxonSchemaError):
parser.get_list(key="list_error_key_4", value=123)
with self.assertRaises(PolyaxonSchemaError):
parser.get_list(key="list_non_existing_key", value=None)
with self.assertRaises(PolyaxonSchemaError):
parser.get_list(key="list_non_existing_key", value=NO_VALUE_FOUND)
self.assertEqual(
parser.get_list(key="list_non_existing_key", value=None, is_optional=True),
None,
)
self.assertEqual(
parser.get_list(
key="list_non_existing_key",
value=None,
is_optional=True,
default=["foo"],
),
["foo"],
)
def test_get_dict_of_dicts(self):
value = parser.get_dict_of_dicts(
key="dict_dicts_key_1",
value={"data1": {"mountPath": "/data/21", "existingClaim": "data-1-pvc"}},
)
self.assertEqual(
value, {"data1": {"mountPath": "/data/21", "existingClaim": "data-1-pvc"}}
)
value = parser.get_dict_of_dicts(
key="dict_dicts_key_1",
value='{"data1": {"mountPath": "/data/21", "existingClaim": "data-1-pvc"}}',
)
self.assertEqual(
value, {"data1": {"mountPath": "/data/21", "existingClaim": "data-1-pvc"}}
)
value = parser.get_dict_of_dicts(
key="dict_dicts_key_2",
value={
"outputs1": {
"mountPath": "/output/2",
"existingClaim": "outputs-1-pvc",
},
"outputs2": {"mountPath": "/output/2", "existingClaim": "output-2-pvc"},
},
)
self.assertEqual(
value,
{
"outputs1": {
"mountPath": "/output/2",
"existingClaim": "outputs-1-pvc",
},
"outputs2": {"mountPath": "/output/2", "existingClaim": "output-2-pvc"},
},
)
value = parser.get_dict_of_dicts(key="dict_dicts_key_3", value={})
self.assertEqual(value, None)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dict_of_dicts(key="dict_dicts_error_key_1", value=True)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dict_of_dicts(
key="dict_dicts_error_key_2", value={"key": "value"}
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dict_of_dicts(key="dict_dicts_error_key_3", value="null")
with self.assertRaises(PolyaxonSchemaError):
parser.get_dict_of_dicts(key="dict_dicts_error_key_4", value=123)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dict_of_dicts(key="dict_dicts_error_key_5", value="foo")
with self.assertRaises(PolyaxonSchemaError):
parser.get_dict_of_dicts(key="dict_dicts_error_key_6", value="")
with self.assertRaises(PolyaxonSchemaError):
parser.get_dict_of_dicts(
key="dict_dicts_error_key_7",
value={"mountPath": "/data/2", "existingClaim": "data-2-pvc"},
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dict_of_dicts(key="dict_dicts_non_existing_key", value=None)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dict_of_dicts(
key="dict_dicts_non_existing_key", value=NO_VALUE_FOUND
)
self.assertEqual(
parser.get_dict_of_dicts(
key="dict_dicts_non_existing_key", value=None, is_optional=True
),
None,
)
self.assertEqual(
parser.get_dict_of_dicts(
key="dict_dicts_non_existing_key",
value=None,
is_optional=True,
default={},
),
{},
)
def test_get_wasbs_path(self):
# Correct url
wasbs_path = "wasbs://container@user.blob.core.windows.net/path"
expected = V1WasbType("container", "user", "path")
parsed_url = parser.parse_wasbs_path(wasbs_path)
assert parsed_url == expected
parsed_url = parser.get_wasbs_path(key="wasb_key", value=wasbs_path)
assert parsed_url == expected
wasbs_path = "wasbs://container@user.blob.core.windows.net/"
expected = V1WasbType("container", "user", "")
parsed_url = parser.parse_wasbs_path(wasbs_path)
assert parsed_url == expected
parsed_url = parser.get_wasbs_path(key="wasb_key", value=wasbs_path)
assert parsed_url == expected
wasbs_path = "wasbs://container@user.blob.core.windows.net"
expected = V1WasbType("container", "user", "")
parsed_url = parser.parse_wasbs_path(wasbs_path)
assert parsed_url == expected
parsed_url = parser.get_wasbs_path(key="wasb_key", value=wasbs_path)
assert parsed_url == expected
wasbs_path = "wasbs://container@user.blob.core.windows.net/path/to/file"
expected = V1WasbType("container", "user", "path/to/file")
parsed_url = parser.parse_wasbs_path(wasbs_path)
assert parsed_url == expected
parsed_url = parser.get_wasbs_path(key="wasb_key", value=wasbs_path)
assert parsed_url == expected
# Wrong url
wasbs_path = "wasbs://container@user.foo.bar.windows.net/path/to/file"
with self.assertRaises(PolyaxonSchemaError):
parser.parse_wasbs_path(wasbs_path)
with self.assertRaises(PolyaxonSchemaError):
parser.get_wasbs_path(key="wasb_key", value=wasbs_path)
wasbs_path = "wasbs://container@user.blob.core.foo.net/path/to/file"
with self.assertRaises(PolyaxonSchemaError):
parser.parse_wasbs_path(wasbs_path)
with self.assertRaises(PolyaxonSchemaError):
parser.get_wasbs_path(key="wasb_key", value=wasbs_path)
wasbs_path = "wasbs://container@user.blob.windows.net/path/to/file"
with self.assertRaises(PolyaxonSchemaError):
parser.parse_wasbs_path(wasbs_path)
with self.assertRaises(PolyaxonSchemaError):
parser.get_wasbs_path(key="wasb_key", value=wasbs_path)
def test_parse_gcs_path(self):
# Correct url
gcs_path = "gs://bucket/path/to/blob"
expected = V1GcsType("bucket", "path/to/blob")
parsed_url = parser.parse_gcs_path(gcs_path)
assert parsed_url == expected
parsed_url = parser.get_gcs_path(key="gcs_key", value=gcs_path)
assert parsed_url == expected
# Wrong url
gcs_path = "gs:/bucket/path/to/blob"
with self.assertRaises(PolyaxonSchemaError):
parser.parse_gcs_path(gcs_path)
with self.assertRaises(PolyaxonSchemaError):
parser.get_gcs_path(key="gcs_key", value=gcs_path)
# Trailing slash
gcs_path = "gs://bucket/path/to/blob/"
expected = V1GcsType("bucket", "path/to/blob/")
assert parser.parse_gcs_path(gcs_path) == expected
parsed_url = parser.get_gcs_path(key="gcs_key", value=gcs_path)
assert parsed_url == expected
# Bucket only
gcs_path = "gs://bucket/"
expected = V1GcsType("bucket", "")
assert parser.parse_gcs_path(gcs_path) == expected
parsed_url = parser.get_gcs_path(key="gcs_key", value=gcs_path)
assert parsed_url == expected
def test_parse_s3_path(self):
s3_path = "s3://test/this/is/bad/key.txt"
expected = V1S3Type("test", "this/is/bad/key.txt")
parsed_url = parser.parse_s3_path(s3_path)
assert parsed_url == expected
parsed_url = parser.get_s3_path(key="s3_key", value=s3_path)
assert parsed_url == expected
def test_parse_date(self):
value = "2010-12-12"
parsed_url = parser.get_date(key="date_key", value=value)
assert parsed_url == datetime.date(2010, 12, 12)
value = datetime.date(2010, 12, 12)
parsed_url = parser.get_date(key="date_key", value=value)
assert parsed_url == value
value = "2010-12-12-12"
with self.assertRaises(PolyaxonSchemaError):
parser.get_date(key="date_key", value=value)
def test_parse_datetime(self):
value = "2010-12-12 10:10"
parsed_url = parser.get_datetime(key="date_key", value=value)
assert parsed_url == datetime.datetime(2010, 12, 12, 10, 10)
value = "2010-12-12 01:00"
parsed_url = parser.get_datetime(key="date_key", value=value)
assert parsed_url == datetime.datetime(2010, 12, 12, 1, 0)
value = "2010-12-12 01:53:12"
parsed_url = parser.get_datetime(key="date_key", value=value)
assert parsed_url == datetime.datetime(2010, 12, 12, 1, 53, 12)
value = "2014-12-22T03:12:58.019077+00:00"
parsed_url = parser.get_datetime(key="date_key", value=value)
assert parsed_url == datetime.datetime(
2014, 12, 22, 3, 12, 58, 19077, tzinfo=tzutc()
)
value = datetime.datetime(2010, 12, 12, 0, 0, 0)
parsed_url = parser.get_datetime(key="date_key", value=value)
assert parsed_url == value
value = datetime.datetime(2010, 12, 12, 0, 0, 0, tzinfo=tzutc())
parsed_url = parser.get_datetime(key="date_key", value=value)
assert parsed_url == value
# Dates are not validate by datetime
value = "2010-12-12"
with self.assertRaises(PolyaxonSchemaError):
parser.get_datetime(key="date_key", value=value)
def test_parse_uuid(self):
value = uuid.uuid4()
parsed_uid = parser.get_uuid(key="uuid_key", value=value)
assert parsed_uid == value.hex
parsed_uid = parser.get_uuid(key="uuid_key", value=value.hex)
assert parsed_uid == value.hex
value = "2sd2"
with self.assertRaises(PolyaxonSchemaError):
parser.get_uuid(key="uuid_key", value=value)
value = "2sd2-sdf"
with self.assertRaises(PolyaxonSchemaError):
parser.get_uuid(key="uuid_key", value=value)
def test_get_dockerfile_init(self):
value = parser.get_dockerfile_init(
key="dict_key_1", value={"image": "foo", "env": {"key1": 2, "key2": 21}}
)
self.assertEqual(
value, V1DockerfileType(image="foo", env={"key1": 2, "key2": 21})
)
value = parser.get_dockerfile_init(
key="dict_key_1", value='{"image": "foo", "env": {"key1": 2, "key2": 21}}'
)
self.assertEqual(
value, V1DockerfileType(image="foo", env={"key1": 2, "key2": 21})
)
value = parser.get_dockerfile_init(
key="dict_key_1", value='{"image": "foo", "run": ["exec1", "exec2"]}'
)
self.assertEqual(value, V1DockerfileType(image="foo", run=["exec1", "exec2"]))
value = parser.get_dockerfile_init(
key="dict_list_key_1",
value=[
{"image": "foo", "env": {"key1": 2, "key2": 21}},
{"image": "foo2", "copy": ["exec1", "exec2"]},
{"image": "foo3", "run": ["exec1", "exec2"]},
],
is_list=True,
)
self.assertEqual(
value,
[
V1DockerfileType(image="foo", env={"key1": 2, "key2": 21}),
V1DockerfileType(image="foo2", copy=["exec1", "exec2"]),
V1DockerfileType(image="foo3", run=["exec1", "exec2"]),
],
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dockerfile_init(key="dict_error_key_1", value=None)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dockerfile_init(key="dict_error_key_1", value="foo")
with self.assertRaises(PolyaxonSchemaError):
parser.get_dockerfile_init(key="dict_error_key_2", value=1)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dockerfile_init(key="dict_error_key_3", value=False)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dockerfile_init(key="dict_error_key_4", value=["1", "foo"])
with self.assertRaises(PolyaxonSchemaError):
parser.get_dockerfile_init(
key="dict_list_key_1", value=["123", {"key3": True}]
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dockerfile_init(
key="dict_list_error_key_1", value=["123", {"key3": True}], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dockerfile_init(
key="dict_list_error_key_2", value=[{"key3": True}, 12.3], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dockerfile_init(
key="dict_list_error_key_3", value=[{"key3": True}, None], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dockerfile_init(
key="dict_list_error_key_4",
value=[{"key3": True}, "123", False],
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dockerfile_init(
key="dict_key_1",
value={"key1": "foo", "key2": 2, "key3": False, "key4": "1"},
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dockerfile_init(key="dict_non_existing_key", value=None)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dockerfile_init(
key="dict_non_existing_key", value=NO_VALUE_FOUND
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_dockerfile_init(
key="dict_non_existing_key", value=None, is_list=True
)
self.assertEqual(
parser.get_dockerfile_init(
key="dict_non_existing_key", value=None, is_optional=True
),
None,
)
def test_get_file_init(self):
value = parser.get_file_init(
key="dict_key_1", value={"filename": "foo.yaml", "content": "test"}
)
self.assertEqual(value, V1FileType(filename="foo.yaml", content="test"))
value = parser.get_file_init(key="dict_key_1", value={"content": "test"})
self.assertEqual(value, V1FileType(content="test"))
value = parser.get_file_init(
key="dict_key_1", value='{"filename": "foo.yaml", "content": "test"}'
)
self.assertEqual(value, V1FileType(filename="foo.yaml", content="test"))
value = parser.get_file_init(key="dict_key_1", value='{"content": "test"}')
self.assertEqual(value, V1FileType(content="test"))
with self.assertRaises(PolyaxonSchemaError):
parser.get_file_init(
key="dict_error_key_1",
value=dict(content="foo", connection="foo", init=True),
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_file_init(
key="dict_error_key_1", value=dict(content="foo", init=True)
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_file_init(
key="dict_error_key_1", value=dict(content="foo", connection="foo")
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_file_init(key="dict_error_key_1", value="foo")
with self.assertRaises(PolyaxonSchemaError):
parser.get_file_init(key="dict_error_key_2", value=1)
with self.assertRaises(PolyaxonSchemaError):
parser.get_file_init(key="dict_error_key_3", value=False)
with self.assertRaises(PolyaxonSchemaError):
parser.get_file_init(key="dict_error_key_4", value=["1", "foo"])
with self.assertRaises(PolyaxonSchemaError):
parser.get_file_init(key="dict_list_key_1", value=["123", {"key3": True}])
with self.assertRaises(PolyaxonSchemaError):
parser.get_file_init(
key="dict_list_error_key_1", value=["123", {"key3": True}], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_file_init(
key="dict_list_error_key_2", value=[{"key3": True}, 12.3], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_file_init(
key="dict_list_error_key_3", value=[{"key3": True}, None], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_file_init(
key="dict_list_error_key_4",
value=[{"key3": True}, "123", False],
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_file_init(
key="dict_key_1",
value={"key1": "foo", "key2": 2, "key3": False, "key4": "1"},
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_file_init(key="dict_non_existing_key", value=None)
with self.assertRaises(PolyaxonSchemaError):
parser.get_file_init(key="dict_non_existing_key", value=NO_VALUE_FOUND)
with self.assertRaises(PolyaxonSchemaError):
parser.get_file_init(key="dict_non_existing_key", value=None, is_list=True)
self.assertEqual(
parser.get_file_init(
key="dict_non_existing_key", value=None, is_optional=True
),
None,
)
def test_get_git_init(self):
value = parser.get_git_init(key="dict_key_1", value={"revision": "foo"})
self.assertEqual(value, V1GitType(revision="foo"))
value = parser.get_git_init(
key="dict_key_1",
value={"revision": "foo"},
)
self.assertEqual(value, V1GitType(revision="foo"))
value = parser.get_git_init(
key="dict_key_1", value={"url": "https://github.com", "revision": "foo"}
)
self.assertEqual(value, V1GitType(revision="foo", url="https://github.com"))
value = parser.get_git_init(
key="dict_key_1", value='{"revision": "foo", "url": "https://github.com"}'
)
self.assertEqual(value, V1GitType(revision="foo", url="https://github.com"))
value = parser.get_git_init(key="dict_key_1", value='{"revision": "foo"}')
self.assertEqual(value, V1GitType(revision="foo"))
value = parser.get_git_init(
key="dict_list_key_1",
value=[
{"revision": "foo"},
{"url": "https://github.com", "revision": "foo"},
{"url": "https://github.com"},
],
is_list=True,
)
self.assertEqual(
value,
[
V1GitType(revision="foo"),
V1GitType(revision="foo", url="https://github.com"),
V1GitType(url="https://github.com"),
],
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_git_init(
key="dict_error_key_1",
value=dict(revision="foo", connection="foo", init=True),
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_git_init(
key="dict_error_key_1", value=dict(revision="foo", init=True)
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_git_init(
key="dict_error_key_1", value=dict(revision="foo", connection="foo")
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_git_init(key="dict_error_key_1", value="foo")
with self.assertRaises(PolyaxonSchemaError):
parser.get_git_init(key="dict_error_key_2", value=1)
with self.assertRaises(PolyaxonSchemaError):
parser.get_git_init(key="dict_error_key_3", value=False)
with self.assertRaises(PolyaxonSchemaError):
parser.get_git_init(key="dict_error_key_4", value=["1", "foo"])
with self.assertRaises(PolyaxonSchemaError):
parser.get_git_init(key="dict_list_key_1", value=["123", {"key3": True}])
with self.assertRaises(PolyaxonSchemaError):
parser.get_git_init(
key="dict_list_error_key_1", value=["123", {"key3": True}], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_git_init(
key="dict_list_error_key_2", value=[{"key3": True}, 12.3], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_git_init(
key="dict_list_error_key_3", value=[{"key3": True}, None], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_git_init(
key="dict_list_error_key_4",
value=[{"key3": True}, "123", False],
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_git_init(
key="dict_key_1",
value={"key1": "foo", "key2": 2, "key3": False, "key4": "1"},
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_git_init(key="dict_non_existing_key", value=None)
with self.assertRaises(PolyaxonSchemaError):
parser.get_git_init(key="dict_non_existing_key", value=NO_VALUE_FOUND)
with self.assertRaises(PolyaxonSchemaError):
parser.get_git_init(key="dict_non_existing_key", value=None, is_list=True)
self.assertEqual(
parser.get_git_init(
key="dict_non_existing_key", value=None, is_optional=True
),
None,
)
def test_get_tensorboard_init(self):
value = parser.get_tensorboard_init(key="dict_key_1", value={"port": 8000})
self.assertEqual(value, V1TensorboardType(port=8000))
value = parser.get_tensorboard_init(
key="dict_key_1",
value={"port": 8000},
)
self.assertEqual(value, V1TensorboardType(port=8000))
value = parser.get_tensorboard_init(
key="dict_key_1", value={"port": 8000, "uuids": ["uuid1", "uuid2"]}
)
self.assertEqual(value, V1TensorboardType(port=8000, uuids=["uuid1", "uuid2"]))
value = parser.get_tensorboard_init(
key="dict_key_1", value='{"port": 8000, "uuids": ["uuid1","uuid2"]}'
)
self.assertEqual(value, V1TensorboardType(port=8000, uuids=["uuid1", "uuid2"]))
value = parser.get_tensorboard_init(
key="dict_key_1", value='{"useNames": false}'
)
self.assertEqual(value, V1TensorboardType(use_names=False))
value = parser.get_tensorboard_init(
key="dict_list_key_1",
value=[
{"useNames": False},
{"uuids": ["uuid1", "uuid2"]},
{
"port": 8000,
"uuids": ["uuid1", "uuid2"],
"plugins": ["plug1", "plug2"],
},
],
is_list=True,
)
self.assertEqual(
value,
[
V1TensorboardType(use_names=False),
V1TensorboardType(uuids=["uuid1", "uuid2"]),
V1TensorboardType(
port=8000, uuids=["uuid1", "uuid2"], plugins=["plug1", "plug2"]
),
],
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_tensorboard_init(
key="dict_error_key_1",
value=dict(port="foo"),
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_tensorboard_init(
key="dict_error_key_1", value=dict(port=8000, init=True)
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_tensorboard_init(
key="dict_error_key_1", value=dict(port=8000, connection="foo")
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_tensorboard_init(key="dict_error_key_1", value="foo")
with self.assertRaises(PolyaxonSchemaError):
parser.get_tensorboard_init(key="dict_error_key_2", value=1)
with self.assertRaises(PolyaxonSchemaError):
parser.get_tensorboard_init(key="dict_error_key_3", value=False)
with self.assertRaises(PolyaxonSchemaError):
parser.get_tensorboard_init(key="dict_error_key_4", value=["1", "foo"])
with self.assertRaises(PolyaxonSchemaError):
parser.get_tensorboard_init(
key="dict_list_key_1", value=["123", {"key3": True}]
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_tensorboard_init(
key="dict_list_error_key_1", value=["123", {"key3": True}], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_tensorboard_init(
key="dict_list_error_key_2", value=[{"key3": True}, 12.3], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_tensorboard_init(
key="dict_list_error_key_3", value=[{"key3": True}, None], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_tensorboard_init(
key="dict_list_error_key_4",
value=[{"key3": True}, "123", False],
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_tensorboard_init(
key="dict_key_1",
value={"key1": "foo", "key2": 2, "key3": False, "key4": "1"},
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_tensorboard_init(key="dict_non_existing_key", value=None)
with self.assertRaises(PolyaxonSchemaError):
parser.get_tensorboard_init(
key="dict_non_existing_key", value=NO_VALUE_FOUND
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_tensorboard_init(
key="dict_non_existing_key", value=None, is_list=True
)
self.assertEqual(
parser.get_tensorboard_init(
key="dict_non_existing_key", value=None, is_optional=True
),
None,
)
def test_get_image_init(self):
value = parser.get_image_init(key="dict_key_1", value="foo")
self.assertEqual(value, "foo")
value = parser.get_image_init(key="dict_key_1", value={"name": "foo"})
self.assertEqual(value, "foo")
value = parser.get_image_init(key="dict_key_1", value="foo:bar")
self.assertEqual(value, "foo:bar")
value = parser.get_image_init(
key="dict_key_1", value={"name": "foo:bar", "connection": "foo"}
)
self.assertEqual(value, "foo:bar")
value = parser.get_image_init(
key="dict_key_1", value="https://registry.com/foo:bar"
)
self.assertEqual(value, "https://registry.com/foo:bar")
value = parser.get_image_init(
key="dict_key_1", value='{"name": "https://registry.com/foo:bar"}'
)
self.assertEqual(value, "https://registry.com/foo:bar")
value = parser.get_image_init(
key="dict_list_key_1",
value=[
{"name": "https://registry.com/foo:bar"},
{"name": "test", "connection": "registry"},
"foo:bar",
],
is_list=True,
)
self.assertEqual(
value,
[
"https://registry.com/foo:bar",
"test",
"foo:bar",
],
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_image_init(key="dict_error_key_2", value=1)
with self.assertRaises(PolyaxonSchemaError):
parser.get_image_init(key="dict_error_key_3", value=False)
with self.assertRaises(PolyaxonSchemaError):
parser.get_image_init(key="dict_error_key_4", value=["1", "foo"])
with self.assertRaises(PolyaxonSchemaError):
parser.get_image_init(key="dict_list_key_1", value=["123", {"key3": True}])
with self.assertRaises(PolyaxonSchemaError):
parser.get_image_init(
key="dict_list_error_key_1", value=["123", {"key3": True}], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_image_init(
key="dict_list_error_key_2", value=[{"key3": True}, 12.3], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_image_init(
key="dict_list_error_key_3", value=[{"key3": True}, None], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_image_init(
key="dict_list_error_key_4",
value=[{"key3": True}, "123", False],
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_image_init(
key="dict_key_1",
value={"key1": "foo", "key2": 2, "key3": False, "key4": "1"},
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_image_init(key="dict_non_existing_key", value=None)
with self.assertRaises(PolyaxonSchemaError):
parser.get_image_init(key="dict_non_existing_key", value=NO_VALUE_FOUND)
with self.assertRaises(PolyaxonSchemaError):
parser.get_image_init(key="dict_non_existing_key", value=None, is_list=True)
self.assertEqual(
parser.get_image_init(
key="dict_non_existing_key", value=None, is_optional=True
),
None,
)
def test_get_artifacts_init(self):
value = parser.get_artifacts_init(
key="dict_key_1", value={"files": ["foo", "bar"]}
)
self.assertEqual(value, V1ArtifactsType(files=["foo", "bar"]))
value = parser.get_artifacts_init(
key="dict_key_1", value={"files": [["from-foo", "to-foo"], "bar"]}
)
self.assertEqual(value, V1ArtifactsType(files=[["from-foo", "to-foo"], "bar"]))
value = parser.get_artifacts_init(
key="dict_key_1", value='{"dirs": ["foo", "bar"]}'
)
self.assertEqual(value, V1ArtifactsType(dirs=["foo", "bar"]))
value = parser.get_artifacts_init(
key="dict_key_1", value='{"dirs": [["from-foo", "to-foo"], "bar"]}'
)
self.assertEqual(value, V1ArtifactsType(dirs=[["from-foo", "to-foo"], "bar"]))
value = parser.get_artifacts_init(
key="dict_list_key_1",
value=[
{
"dirs": [["from-foo", "to-foo"], "bar"],
"files": [["from-foo", "to-foo"], "bar"],
},
{"files": [["from-foo", "to-foo"], "bar"]},
],
is_list=True,
)
self.assertEqual(
value,
[
V1ArtifactsType(
dirs=[["from-foo", "to-foo"], "bar"],
files=[["from-foo", "to-foo"], "bar"],
),
V1ArtifactsType(files=[["from-foo", "to-foo"], "bar"]),
],
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_artifacts_init(
key="dict_key_1", value={"connection": "foo", "init": True}
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_artifacts_init(
key="dict_key_1", value={"init": True, "dirs": ["foo", "bar"]}
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_artifacts_init(
key="dict_key_1", value={"connection": "foo", "files": ["foo", "bar"]}
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_artifacts_init(
key="dict_error_key_1", value={"paths": ["foo", "bar"]}
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_artifacts_init(key="dict_error_key_1", value="foo")
with self.assertRaises(PolyaxonSchemaError):
parser.get_artifacts_init(key="dict_error_key_2", value=1)
with self.assertRaises(PolyaxonSchemaError):
parser.get_artifacts_init(key="dict_error_key_3", value=False)
with self.assertRaises(PolyaxonSchemaError):
parser.get_artifacts_init(key="dict_error_key_4", value=["1", "foo"])
with self.assertRaises(PolyaxonSchemaError):
parser.get_artifacts_init(
key="dict_list_key_1", value=["123", {"key3": True}]
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_artifacts_init(
key="dict_list_error_key_1", value=["123", {"key3": True}], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_artifacts_init(
key="dict_list_error_key_2", value=[{"key3": True}, 12.3], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_artifacts_init(
key="dict_list_error_key_3", value=[{"key3": True}, None], is_list=True
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_artifacts_init(
key="dict_list_error_key_4",
value=[{"key3": True}, "123", False],
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_artifacts_init(
key="dict_key_1",
value={"key1": "foo", "key2": 2, "key3": False, "key4": "1"},
is_list=True,
)
with self.assertRaises(PolyaxonSchemaError):
parser.get_artifacts_init(key="dict_non_existing_key", value=None)
with self.assertRaises(PolyaxonSchemaError):
parser.get_artifacts_init(key="dict_non_existing_key", value=NO_VALUE_FOUND)
with self.assertRaises(PolyaxonSchemaError):
parser.get_artifacts_init(
key="dict_non_existing_key", value=None, is_list=True
)
self.assertEqual(
parser.get_artifacts_init(
key="dict_non_existing_key", value=None, is_optional=True
),
None,
)
| 36.205834 | 88 | 0.57117 | 7,285 | 65,786 | 4.9035 | 0.035141 | 0.088433 | 0.117575 | 0.22927 | 0.945608 | 0.929455 | 0.908572 | 0.889228 | 0.867533 | 0.851016 | 0 | 0.030721 | 0.299851 | 65,786 | 1,816 | 89 | 36.225771 | 0.744833 | 0.010337 | 0 | 0.56156 | 0 | 0.002736 | 0.15987 | 0.047754 | 0 | 0 | 0 | 0 | 0.24829 | 1 | 0.014364 | false | 0.031464 | 0.006156 | 0 | 0.021204 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
67d2b5cc125397bb12d32e1fe2ebbdc94173bde9 | 155 | py | Python | platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/margay/calculators/calc_ber.py | lmnotran/gecko_sdk | 2e82050dc8823c9fe0e8908c1b2666fb83056230 | [
"Zlib"
] | 69 | 2021-12-16T01:34:09.000Z | 2022-03-31T08:27:39.000Z | platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/margay/calculators/calc_ber.py | lmnotran/gecko_sdk | 2e82050dc8823c9fe0e8908c1b2666fb83056230 | [
"Zlib"
] | 6 | 2022-01-12T18:22:08.000Z | 2022-03-25T10:19:27.000Z | platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/margay/calculators/calc_ber.py | lmnotran/gecko_sdk | 2e82050dc8823c9fe0e8908c1b2666fb83056230 | [
"Zlib"
] | 21 | 2021-12-20T09:05:45.000Z | 2022-03-28T02:52:28.000Z | from pyradioconfig.parts.ocelot.calculators.calc_ber import CALC_Ber_Ocelot
class CALC_Ber_Margay(CALC_Ber_Ocelot):
#Inherit all from Ocelot
pass | 25.833333 | 75 | 0.819355 | 23 | 155 | 5.217391 | 0.565217 | 0.233333 | 0.216667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.129032 | 155 | 6 | 76 | 25.833333 | 0.888889 | 0.148387 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 8 |
67f83298ea441f835abeb4ec5fc1cb75e1a2deaf | 144 | py | Python | api/tacticalrmm/logs/admin.py | rfost52/tacticalrmm | 44dd59fa3f530e7cf6730ef44606fa869c8cd222 | [
"MIT"
] | 2 | 2021-04-29T13:34:07.000Z | 2021-04-29T13:34:11.000Z | api/tacticalrmm/logs/admin.py | rfost52/tacticalrmm | 44dd59fa3f530e7cf6730ef44606fa869c8cd222 | [
"MIT"
] | null | null | null | api/tacticalrmm/logs/admin.py | rfost52/tacticalrmm | 44dd59fa3f530e7cf6730ef44606fa869c8cd222 | [
"MIT"
] | 2 | 2021-05-01T22:40:47.000Z | 2022-03-07T16:16:14.000Z | from django.contrib import admin
from .models import AuditLog, PendingAction
admin.site.register(PendingAction)
admin.site.register(AuditLog)
| 20.571429 | 43 | 0.833333 | 18 | 144 | 6.666667 | 0.555556 | 0.3 | 0.366667 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.090278 | 144 | 6 | 44 | 24 | 0.916031 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
e1dc4fb862e37f5faf5d99766a3470e16a5bae46 | 8,824 | py | Python | tools/distrib/python/grpcio_tools/protoc_lib_deps.py | casperisfine/grpc | 9facfe2b684bbaaf9e75e2d285e7bffceeefe2b1 | [
"Apache-2.0"
] | 2 | 2020-03-27T17:01:34.000Z | 2020-03-27T19:38:58.000Z | tools/distrib/python/grpcio_tools/protoc_lib_deps.py | OneCodeMonkey/grpc | 03fc68bb5a10c2604e299a8089776462efbfc8c7 | [
"Apache-2.0"
] | 62 | 2020-02-27T00:53:36.000Z | 2021-02-05T06:10:53.000Z | tools/distrib/python/grpcio_tools/protoc_lib_deps.py | OneCodeMonkey/grpc | 03fc68bb5a10c2604e299a8089776462efbfc8c7 | [
"Apache-2.0"
] | 1 | 2022-02-22T16:32:03.000Z | 2022-02-22T16:32:03.000Z |
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# AUTO-GENERATED BY make_grpcio_tools.py!
CC_FILES=['google/protobuf/compiler/zip_writer.cc', 'google/protobuf/compiler/subprocess.cc', 'google/protobuf/compiler/ruby/ruby_generator.cc', 'google/protobuf/compiler/python/python_generator.cc', 'google/protobuf/compiler/plugin.pb.cc', 'google/protobuf/compiler/plugin.cc', 'google/protobuf/compiler/php/php_generator.cc', 'google/protobuf/compiler/objectivec/objectivec_primitive_field.cc', 'google/protobuf/compiler/objectivec/objectivec_oneof.cc', 'google/protobuf/compiler/objectivec/objectivec_message_field.cc', 'google/protobuf/compiler/objectivec/objectivec_message.cc', 'google/protobuf/compiler/objectivec/objectivec_map_field.cc', 'google/protobuf/compiler/objectivec/objectivec_helpers.cc', 'google/protobuf/compiler/objectivec/objectivec_generator.cc', 'google/protobuf/compiler/objectivec/objectivec_file.cc', 'google/protobuf/compiler/objectivec/objectivec_field.cc', 'google/protobuf/compiler/objectivec/objectivec_extension.cc', 'google/protobuf/compiler/objectivec/objectivec_enum_field.cc', 'google/protobuf/compiler/objectivec/objectivec_enum.cc', 'google/protobuf/compiler/js/well_known_types_embed.cc', 'google/protobuf/compiler/js/js_generator.cc', 'google/protobuf/compiler/java/java_string_field_lite.cc', 'google/protobuf/compiler/java/java_string_field.cc', 'google/protobuf/compiler/java/java_shared_code_generator.cc', 'google/protobuf/compiler/java/java_service.cc', 'google/protobuf/compiler/java/java_primitive_field_lite.cc', 'google/protobuf/compiler/java/java_primitive_field.cc', 'google/protobuf/compiler/java/java_name_resolver.cc', 'google/protobuf/compiler/java/java_message_lite.cc', 'google/protobuf/compiler/java/java_message_field_lite.cc', 'google/protobuf/compiler/java/java_message_field.cc', 'google/protobuf/compiler/java/java_message_builder_lite.cc', 'google/protobuf/compiler/java/java_message_builder.cc', 'google/protobuf/compiler/java/java_message.cc', 'google/protobuf/compiler/java/java_map_field_lite.cc', 'google/protobuf/compiler/java/java_map_field.cc', 'google/protobuf/compiler/java/java_helpers.cc', 'google/protobuf/compiler/java/java_generator_factory.cc', 'google/protobuf/compiler/java/java_generator.cc', 'google/protobuf/compiler/java/java_file.cc', 'google/protobuf/compiler/java/java_field.cc', 'google/protobuf/compiler/java/java_extension_lite.cc', 'google/protobuf/compiler/java/java_extension.cc', 'google/protobuf/compiler/java/java_enum_lite.cc', 'google/protobuf/compiler/java/java_enum_field_lite.cc', 'google/protobuf/compiler/java/java_enum_field.cc', 'google/protobuf/compiler/java/java_enum.cc', 'google/protobuf/compiler/java/java_doc_comment.cc', 'google/protobuf/compiler/java/java_context.cc', 'google/protobuf/compiler/csharp/csharp_wrapper_field.cc', 'google/protobuf/compiler/csharp/csharp_source_generator_base.cc', 'google/protobuf/compiler/csharp/csharp_repeated_primitive_field.cc', 'google/protobuf/compiler/csharp/csharp_repeated_message_field.cc', 'google/protobuf/compiler/csharp/csharp_repeated_enum_field.cc', 'google/protobuf/compiler/csharp/csharp_reflection_class.cc', 'google/protobuf/compiler/csharp/csharp_primitive_field.cc', 'google/protobuf/compiler/csharp/csharp_message_field.cc', 'google/protobuf/compiler/csharp/csharp_message.cc', 'google/protobuf/compiler/csharp/csharp_map_field.cc', 'google/protobuf/compiler/csharp/csharp_helpers.cc', 'google/protobuf/compiler/csharp/csharp_generator.cc', 'google/protobuf/compiler/csharp/csharp_field_base.cc', 'google/protobuf/compiler/csharp/csharp_enum_field.cc', 'google/protobuf/compiler/csharp/csharp_enum.cc', 'google/protobuf/compiler/csharp/csharp_doc_comment.cc', 'google/protobuf/compiler/cpp/cpp_string_field.cc', 'google/protobuf/compiler/cpp/cpp_service.cc', 'google/protobuf/compiler/cpp/cpp_primitive_field.cc', 'google/protobuf/compiler/cpp/cpp_padding_optimizer.cc', 'google/protobuf/compiler/cpp/cpp_message_field.cc', 'google/protobuf/compiler/cpp/cpp_message.cc', 'google/protobuf/compiler/cpp/cpp_map_field.cc', 'google/protobuf/compiler/cpp/cpp_helpers.cc', 'google/protobuf/compiler/cpp/cpp_generator.cc', 'google/protobuf/compiler/cpp/cpp_file.cc', 'google/protobuf/compiler/cpp/cpp_field.cc', 'google/protobuf/compiler/cpp/cpp_extension.cc', 'google/protobuf/compiler/cpp/cpp_enum_field.cc', 'google/protobuf/compiler/cpp/cpp_enum.cc', 'google/protobuf/compiler/command_line_interface.cc', 'google/protobuf/compiler/code_generator.cc', 'google/protobuf/wrappers.pb.cc', 'google/protobuf/wire_format.cc', 'google/protobuf/util/type_resolver_util.cc', 'google/protobuf/util/time_util.cc', 'google/protobuf/util/message_differencer.cc', 'google/protobuf/util/json_util.cc', 'google/protobuf/util/internal/utility.cc', 'google/protobuf/util/internal/type_info_test_helper.cc', 'google/protobuf/util/internal/type_info.cc', 'google/protobuf/util/internal/protostream_objectwriter.cc', 'google/protobuf/util/internal/protostream_objectsource.cc', 'google/protobuf/util/internal/proto_writer.cc', 'google/protobuf/util/internal/object_writer.cc', 'google/protobuf/util/internal/json_stream_parser.cc', 'google/protobuf/util/internal/json_objectwriter.cc', 'google/protobuf/util/internal/json_escaping.cc', 'google/protobuf/util/internal/field_mask_utility.cc', 'google/protobuf/util/internal/error_listener.cc', 'google/protobuf/util/internal/default_value_objectwriter.cc', 'google/protobuf/util/internal/datapiece.cc', 'google/protobuf/util/field_mask_util.cc', 'google/protobuf/util/field_comparator.cc', 'google/protobuf/util/delimited_message_util.cc', 'google/protobuf/unknown_field_set.cc', 'google/protobuf/type.pb.cc', 'google/protobuf/timestamp.pb.cc', 'google/protobuf/text_format.cc', 'google/protobuf/stubs/substitute.cc', 'google/protobuf/struct.pb.cc', 'google/protobuf/source_context.pb.cc', 'google/protobuf/service.cc', 'google/protobuf/reflection_ops.cc', 'google/protobuf/message.cc', 'google/protobuf/map_field.cc', 'google/protobuf/io/tokenizer.cc', 'google/protobuf/io/printer.cc', 'google/protobuf/io/gzip_stream.cc', 'google/protobuf/generated_message_table_driven.cc', 'google/protobuf/generated_message_reflection.cc', 'google/protobuf/field_mask.pb.cc', 'google/protobuf/extension_set_heavy.cc', 'google/protobuf/empty.pb.cc', 'google/protobuf/dynamic_message.cc', 'google/protobuf/duration.pb.cc', 'google/protobuf/descriptor_database.cc', 'google/protobuf/descriptor.pb.cc', 'google/protobuf/descriptor.cc', 'google/protobuf/compiler/parser.cc', 'google/protobuf/compiler/importer.cc', 'google/protobuf/api.pb.cc', 'google/protobuf/any.pb.cc', 'google/protobuf/any.cc', 'google/protobuf/wire_format_lite.cc', 'google/protobuf/stubs/time.cc', 'google/protobuf/stubs/strutil.cc', 'google/protobuf/stubs/structurally_valid.cc', 'google/protobuf/stubs/stringprintf.cc', 'google/protobuf/stubs/stringpiece.cc', 'google/protobuf/stubs/statusor.cc', 'google/protobuf/stubs/status.cc', 'google/protobuf/stubs/int128.cc', 'google/protobuf/stubs/common.cc', 'google/protobuf/stubs/bytestream.cc', 'google/protobuf/repeated_field.cc', 'google/protobuf/parse_context.cc', 'google/protobuf/message_lite.cc', 'google/protobuf/io/zero_copy_stream_impl_lite.cc', 'google/protobuf/io/zero_copy_stream_impl.cc', 'google/protobuf/io/zero_copy_stream.cc', 'google/protobuf/io/strtod.cc', 'google/protobuf/io/io_win32.cc', 'google/protobuf/io/coded_stream.cc', 'google/protobuf/implicit_weak_message.cc', 'google/protobuf/generated_message_util.cc', 'google/protobuf/generated_message_table_driven_lite.cc', 'google/protobuf/generated_enum_util.cc', 'google/protobuf/extension_set.cc', 'google/protobuf/arena.cc', 'google/protobuf/any_lite.cc']
PROTO_FILES=['google/protobuf/wrappers.proto', 'google/protobuf/type.proto', 'google/protobuf/timestamp.proto', 'google/protobuf/struct.proto', 'google/protobuf/source_context.proto', 'google/protobuf/field_mask.proto', 'google/protobuf/empty.proto', 'google/protobuf/duration.proto', 'google/protobuf/descriptor.proto', 'google/protobuf/compiler/plugin.proto', 'google/protobuf/api.proto', 'google/protobuf/any.proto']
CC_INCLUDE='third_party/protobuf/src'
PROTO_INCLUDE='third_party/protobuf/src'
PROTOBUF_SUBMODULE_VERSION="d0bfd5221182da1a7cc280f3337b5e41a89539cf"
| 367.666667 | 7,631 | 0.81879 | 1,240 | 8,824 | 5.65 | 0.160484 | 0.343705 | 0.363117 | 0.280902 | 0.637025 | 0.507565 | 0.365258 | 0.134313 | 0.037967 | 0 | 0 | 0.004449 | 0.031958 | 8,824 | 23 | 7,632 | 383.652174 | 0.815734 | 0.066863 | 0 | 0 | 0 | 0 | 0.903991 | 0.903991 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0.2 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e1fee29cf0b43f4030927d4aa22f408017fee51a | 377,928 | py | Python | src/genie/libs/parser/iosxr/tests/test_show_isis.py | alsyz/genieparser | e80a219851aa074482f9cccee1cb9fb42216e225 | [
"Apache-2.0"
] | 1 | 2021-10-01T05:41:06.000Z | 2021-10-01T05:41:06.000Z | src/genie/libs/parser/iosxr/tests/test_show_isis.py | alsyz/genieparser | e80a219851aa074482f9cccee1cb9fb42216e225 | [
"Apache-2.0"
] | null | null | null | src/genie/libs/parser/iosxr/tests/test_show_isis.py | alsyz/genieparser | e80a219851aa074482f9cccee1cb9fb42216e225 | [
"Apache-2.0"
] | null | null | null | # Python
import unittest
from unittest.mock import Mock
# Metaparser
from genie.metaparser.util.exceptions import SchemaEmptyParserError, SchemaMissingKeyError
# iosxr show_isis
from genie.libs.parser.iosxr.show_isis import (
ShowIsis,
ShowIsisLspLog,
ShowIsisSpfLog,
ShowIsisProtocol,
ShowIsisHostname,
ShowIsisInterface,
ShowIsisAdjacency,
ShowIsisNeighbors,
ShowIsisStatistics,
ShowIsisPrivateAll,
ShowIsisSpfLogDetail,
ShowIsisDatabaseDetail,
ShowIsisSegmentRoutingLabelTable,
ShowIsisFastRerouteSummary
)
# ==================================================
# Unit test for 'show isis fast-reroute summary'
# ==================================================
class TestShowIsisFastRerouteSummary(unittest.TestCase):
''' Unit test for 'show isis fast-reroute summary' '''
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
'instance':{
'SR':{
'topology':{
'IPv4 Unicast':{
'level':{
1:{
'all_paths_protected':{
'critical_priority' : 0,
'high_priority' : 0,
'medium_priority' : 0,
'low_priority' : 0,
'total' : 0,
},
'some_paths_protected':{
'critical_priority' : 0,
'high_priority' : 0,
'medium_priority' : 0,
'low_priority' : 0,
'total' : 0,
},
'unprotected':{
'critical_priority' : 0,
'high_priority' : 0,
'medium_priority' : 4,
'low_priority' : 6,
'total' : 10,
},
'protection_coverage':{
'critical_priority' : '0.00%',
'high_priority' : '0.00%',
'medium_priority' : '0.00%',
'low_priority' : '0.00%',
'total' : '0.00%',
},
},
2:{
'all_paths_protected':{
'critical_priority' : 0,
'high_priority' : 0,
'medium_priority' : 0,
'low_priority' : 0,
'total' : 0,
},
'some_paths_protected':{
'critical_priority' : 0,
'high_priority' : 0,
'medium_priority' : 0,
'low_priority' : 0,
'total' : 0,
},
'unprotected':{
'critical_priority' : 0,
'high_priority' : 0,
'medium_priority' : 1,
'low_priority' : 0,
'total' : 1,
},
'protection_coverage':{
'critical_priority' : '0.00%',
'high_priority' : '0.00%',
'medium_priority' : '0.00%',
'low_priority' : '0.00%',
'total' : '0.00%',
},
},
},
},
},
},
},
}
golden_output = {'execute.return_value': '''
RP/0/RP0/CPU0:R3#show isis fast-reroute summary
Thu Aug 29 15:31:09.046 UTC
IS-IS SR IPv4 Unicast FRR summary
Critical High Medium Low Total
Priority Priority Priority Priority
Prefixes reachable in L1
All paths protected 0 0 0 0 0
Some paths protected 0 0 0 0 0
Unprotected 0 0 4 6 10
Protection coverage 0.00% 0.00% 0.00% 0.00% 0.00%
Prefixes reachable in L2
All paths protected 0 0 0 0 0
Some paths protected 0 0 0 0 0
Unprotected 0 0 1 0 1
Protection coverage 0.00% 0.00% 0.00% 0.00% 0.00%
'''}
def test_show_isis_fast_reroute_summary_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIsisFastRerouteSummary(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_show_isis_fast_reroute_summary_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowIsisFastRerouteSummary(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
# ==================================================
# Unit test for 'show isis adjacency'
# ==================================================
class TestShowIsisAdjacency(unittest.TestCase):
'''Unit test for 'show isis adjacency'''
empty_output = {'execute.return_value': ''}
maxDiff = None
golden_parsed_output1 = {
'isis': {
'p': {
'vrf': {
'default': {
'level': {
'Level-1': {
'interfaces': {
'PO0/1/0/1': {
'system_id': {
'12a4': {
'interface': 'Port-channel0/1/0/1',
'snpa': '*PtoP*',
'state': 'Up',
'hold': '23',
'changed': '00:00:06',
'nsf': 'Capable',
'bfd': 'Init'}}},
'Gi0/6/0/2': {
'system_id': {
'12a4': {
'interface': 'GigabitEthernet0/6/0/2',
'snpa': '0004.28ff.868a',
'state': 'Up',
'hold': '56',
'changed': '00:04:01',
'nsf': 'Capable',
'bfd': 'Up'}}}},
'total_adjacency_count': 2},
'Level-2': {
'interfaces': {
'PO0/1/0/1': {
'system_id': {
'12a4': {
'interface': 'Port-channel0/1/0/1',
'snpa': '*PtoP*',
'state': 'Up',
'hold': '23',
'changed': '00:00:06',
'nsf': 'Capable',
'bfd': 'None'}}},
'Gi0/6/0/2': {
'system_id': {
'12a4': {
'interface': 'GigabitEthernet0/6/0/2',
'snpa': '0004.28ff.868a',
'state': 'Up',
'hold': '26',
'changed': '00:00:13',
'nsf': 'Capable',
'bfd': 'Init'}}}},
'total_adjacency_count': 2}}}}}}}
golden_output1 = {'execute.return_value': '''
IS-IS p Level-1 adjacencies:
System Id Interface SNPA State Hold Changed NSF BFD
12a4 PO0/1/0/1 *PtoP* Up 23 00:00:06 Capable Init
12a4 Gi0/6/0/2 0004.28ff.868a Up 56 00:04:01 Capable Up
Total adjacency count: 2
IS-IS p Level-2 adjacencies:
System Id Interface SNPA State Hold Changed NSF BFD
12a4 PO0/1/0/1 *PtoP* Up 23 00:00:06 Capable None
12a4 Gi0/6/0/2 0004.28ff.868a Up 26 00:00:13 Capable Init
Total adjacency count: 2
'''}
golden_parsed_output2 = {
'isis': {
'test': {
'vrf': {
'default': {
'level': {
'Level-1': {
'interfaces': {
'Gi0/0/0/0.115': {
'system_id': {
'R1_xe': {
'interface': 'GigabitEthernet0/0/0/0.115',
'snpa': 'fa16.3eff.4f49',
'state': 'Up',
'hold': '23',
'changed': '22:30:27',
'nsf': 'Yes',
'ipv4_bfd': 'None',
'ipv6_bfd': 'None'}}},
'Gi0/0/0/1.115': {
'system_id': {
'R3_nx': {
'interface': 'GigabitEthernet0/0/0/1.115',
'snpa': '5e00.40ff.0209',
'state': 'Up',
'hold': '20',
'changed': '22:30:27',
'nsf': 'Yes',
'ipv4_bfd': 'None',
'ipv6_bfd': 'None'}}}},
'total_adjacency_count': 2},
'Level-2': {
'interfaces': {
'Gi0/0/0/0.115': {
'system_id': {
'R1_xe': {
'interface': 'GigabitEthernet0/0/0/0.115',
'snpa': 'fa16.3eff.4f49',
'state': 'Up',
'hold': '26',
'changed': '22:30:26',
'nsf': 'Yes',
'ipv4_bfd': 'None',
'ipv6_bfd': 'None'}}},
'Gi0/0/0/1.115': {
'system_id': {
'R3_nx': {
'interface': 'GigabitEthernet0/0/0/1.115',
'snpa': '5e00.40ff.0209',
'state': 'Up',
'hold': '23',
'changed': '22:30:27',
'nsf': 'Yes',
'ipv4_bfd': 'None',
'ipv6_bfd': 'None'}}}},
'total_adjacency_count': 2}}}}},
'test1': {
'vrf': {
'default': {
'level': {
'Level-1': {},
'Level-2': {}}}}}}}
golden_output2 = {'execute.return_value': '''
+++ R2_xr: executing command 'show isis adjacency' +++
show isis adjacency
Wed Apr 17 16:25:06.870 UTC
IS-IS test Level-1 adjacencies:
System Id Interface SNPA State Hold Changed NSF IPv4 IPv6
BFD BFD
R1_xe Gi0/0/0/0.115 fa16.3eff.4f49 Up 23 22:30:27 Yes None None
R3_nx Gi0/0/0/1.115 5e00.40ff.0209 Up 20 22:30:27 Yes None None
Total adjacency count: 2
IS-IS test Level-2 adjacencies:
System Id Interface SNPA State Hold Changed NSF IPv4 IPv6
BFD BFD
R1_xe Gi0/0/0/0.115 fa16.3eff.4f49 Up 26 22:30:26 Yes None None
R3_nx Gi0/0/0/1.115 5e00.40ff.0209 Up 23 22:30:27 Yes None None
Total adjacency count: 2
IS-IS test1 Level-1 adjacencies:
System Id Interface SNPA State Hold Changed NSF IPv4 IPv6
BFD BFD
IS-IS test1 Level-2 adjacencies:
System Id Interface SNPA State Hold Changed NSF IPv4 IPv6
BFD BFD
'''}
def test_show_isis_adjacency_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIsisAdjacency(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_show_isis_adjacency_golden1(self):
self.device = Mock(**self.golden_output1)
obj = ShowIsisAdjacency(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
def test_show_isis_adjacency_golden2(self):
self.device = Mock(**self.golden_output2)
obj = ShowIsisAdjacency(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output2)
# ====================================
# Unit test for 'show isis neighbors'
# ====================================
class TestShowIsisNeighbors(unittest.TestCase):
'''Unit test for "show isis neighbors"'''
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output1 = {
'isis': {
'test': {
'vrf': {
'default': {
'interfaces': {
'GigabitEthernet0/0/0/0.115': {
'neighbors': {
'R1_xe': {
'snpa': 'fa16.3eff.4f49',
'state': 'Up',
'holdtime': '24',
'type': 'L1L2',
'ietf_nsf': 'Capable'}}},
'GigabitEthernet0/0/0/1.115': {
'neighbors': {
'R3_nx': {
'snpa': '5e00.40ff.0209',
'state': 'Up',
'holdtime': '25',
'type': 'L1L2',
'ietf_nsf': 'Capable'}}}},
'total_neighbor_count': 2}}}}}
golden_output1 = {'execute.return_value': '''
+++ R2_xr: executing command 'show isis neighbors' +++
show isis neighbors
Wed Apr 17 16:21:30.075 UTC
IS-IS test neighbors:
System Id Interface SNPA State Holdtime Type IETF-NSF
R1_xe Gi0/0/0/0.115 fa16.3eff.4f49 Up 24 L1L2 Capable
R3_nx Gi0/0/0/1.115 5e00.40ff.0209 Up 25 L1L2 Capable
Total neighbor count: 2
'''}
golden_parsed_output2 = {
'isis': {
'test': {
'vrf': {
'default': {
'interfaces': {
'GigabitEthernet0/0/0/0.115': {
'neighbors': {
'R1_xe': {
'snpa': 'fa16.3eff.4f49',
'state': 'Up',
'holdtime': '22',
'type': 'L1L2',
'ietf_nsf': 'Capable'}}},
'GigabitEthernet0/0/0/1.115': {
'neighbors': {
'R3_nx': {
'snpa': '5e00.40ff.0209',
'state': 'Up',
'holdtime': '22',
'type': 'L1L2',
'ietf_nsf': 'Capable'}}}},
'total_neighbor_count': 2}}},
'test1': {
'vrf': {
'default': {}}}}}
golden_output2 = {'execute.return_value': '''
show isis neighbors
Thu Apr 18 11:00:22.192 UTC
IS-IS test neighbors:
System Id Interface SNPA State Holdtime Type IETF-NSF
R1_xe Gi0/0/0/0.115 fa16.3eff.4f49 Up 22 L1L2 Capable
R3_nx Gi0/0/0/1.115 5e00.40ff.0209 Up 22 L1L2 Capable
Total neighbor count: 2
IS-IS test1 neighbors:
System Id Interface SNPA State Holdtime Type IETF-NSF
'''}
def test_show_isis_neighbors_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIsisNeighbors(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_show_isis_neighbors_golden1(self):
self.device = Mock(**self.golden_output1)
obj = ShowIsisNeighbors(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
def test_show_isis_neighbors_golden2(self):
self.device = Mock(**self.golden_output2)
obj = ShowIsisNeighbors(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output2)
# ======================================================
# Unit test for 'show isis segment-routing label table'
# ======================================================
class TestShowIsisSegmentRoutingLabelTable(unittest.TestCase):
'''Unit test for "show isis segment-routing label table"'''
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output1 = {
'instance': {
'SR': {
'label': {
16001: {
'prefix_interface': 'Loopback0'},
16002: {
'prefix_interface': '10.2.2.2/32'},
16003: {
'prefix_interface': '10.3.3.3/32'}
}
}
}
}
golden_output1 = {'execute.return_value': '''
RP/0/RP0/CPU0:iosxrv9000-1#show isis segment-routing label table
Mon Sep 30 13:22:32.921 EDT
IS-IS SR IS Label Table
Label Prefix/Interface
---------- ----------------
16001 Loopback0
16002 10.2.2.2/32
16003 10.3.3.3/32
'''}
def test_show_isis_segment_routing_label_table_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIsisSegmentRoutingLabelTable(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_show_isis_segment_routing_label_table_golden1(self):
self.device = Mock(**self.golden_output1)
obj = ShowIsisSegmentRoutingLabelTable(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
class TestShowIsis(unittest.TestCase):
''' Unitest for commands:
* show isis -> ShowIsis
'''
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output_1 = {
"instance": {
"test": {
"process_id": "test",
"instance": "0",
"vrf": {
"default": {
"system_id": "3333.33ff.6666",
"is_levels": "level-1-2",
"manual_area_address": ["49.0002"],
"routing_area_address": ["49.0002", "49.0001"],
"non_stop_forwarding": "Disabled",
"most_recent_startup_mode": "Cold Restart",
"te_connection_status": "Down",
"topology": {
"IPv4 Unicast": {
'vrf': {
'default': {
"level": {
1: {
"generate_style": "Wide",
"accept_style": "Wide",
"metric": 10,
"ispf_status": "Disabled",
},
2: {
"generate_style": "Wide",
"accept_style": "Wide",
"metric": 10,
"ispf_status": "Disabled",
},
},
"protocols_redistributed": False,
"distance": 115,
"adv_passive_only": False,
},
},
},
"IPv6 Unicast": {
'vrf': {
'default': {
"level": {
1: {
"metric": 10,
"ispf_status": "Disabled"},
2: {
"metric": 10,
"ispf_status": "Disabled"},
},
"protocols_redistributed": False,
"distance": 115,
"adv_passive_only": False,
}
}
},
},
"interfaces": {
"Loopback0": {
"running_state": "running actively",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/0": {
"running_state": "running actively",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/1": {
"running_state": "running actively",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/2": {
"running_state": "running actively",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/3": {
"running_state": "running actively",
"configuration_state": "active in configuration",
},
},
}
},
}
}
}
golden_output_1 = {'execute.return_value': '''
IS-IS Router: test
System Id: 3333.33ff.6666
Instance Id: 0
IS Levels: level-1-2
Manual area address(es):
49.0002
Routing for area address(es):
49.0002
49.0001
Non-stop forwarding: Disabled
Most recent startup mode: Cold Restart
TE connection status: Down
Topologies supported by IS-IS:
IPv4 Unicast
Level-1
Metric style (generate/accept): Wide/Wide
Metric: 10
ISPF status: Disabled
Level-2
Metric style (generate/accept): Wide/Wide
Metric: 10
ISPF status: Disabled
No protocols redistributed
Distance: 115
Advertise Passive Interface Prefixes Only: No
IPv6 Unicast
Level-1
Metric: 10
ISPF status: Disabled
Level-2
Metric: 10
ISPF status: Disabled
No protocols redistributed
Distance: 115
Advertise Passive Interface Prefixes Only: No
SRLB not allocated
SRGB not allocated
Interfaces supported by IS-IS:
Loopback0 is running actively (active in configuration)
GigabitEthernet0/0/0/0 is running actively (active in configuration)
GigabitEthernet0/0/0/1 is running actively (active in configuration)
GigabitEthernet0/0/0/2 is running actively (active in configuration)
GigabitEthernet0/0/0/3 is running actively (active in configuration)
'''}
golden_parsed_output_2 = {
'instance': {
'Cisco': {
'process_id': 'Cisco',
'instance': '0',
'vrf': {
'default': {
'system_id': '1781.81ff.43c7',
'is_levels': 'level-2-only',
'manual_area_address': ['49.0000'],
'routing_area_address': ['49.0000'],
'non_stop_forwarding': 'Disabled',
'most_recent_startup_mode': 'Cold Restart',
'te_connection_status': 'Up',
'topology': {
'IPv4 Unicast': {
'vrf': {
'default': {
'level': {
2: {
'generate_style': 'Wide',
'accept_style': 'Wide',
'metric': 100000,
'ispf_status': 'Disabled',
},
},
'protocols_redistributed': True,
'redistributing': ['Connected', 'Static', 'OSPF process 65001', 'OSPF process 65002', 'OSPF process 65003'],
'distance': 115,
'adv_passive_only': True,
},
},
},
},
'srlb': {
'start': 15000,
'end': 15999,
},
'srgb': {
'start': 16000,
'end': 81534,
},
'interfaces': {
'Bundle-Ether1': {
'running_state': 'running suppressed',
'configuration_state': 'active in configuration',
},
'Bundle-Ether2': {
'running_state': 'running suppressed',
'configuration_state': 'active in configuration',
},
'Loopback0': {
'running_state': 'running passively',
'configuration_state': 'passive in configuration',
},
'TenGigE0/0/1/2': {
'running_state': 'running suppressed',
'configuration_state': 'active in configuration',
},
'TenGigE0/0/1/3': {
'running_state': 'disabled',
'configuration_state': 'active in configuration',
},
'TenGigE0/5/0/1': {
'running_state': 'disabled',
'configuration_state': 'active in configuration',
},
},
},
},
},
},
}
golden_output_2 = {'execute.return_value': '''
+++ genie-Router: executing command 'show isis' +++
show isis
Mon Oct 7 16:22:11.993 EDT
IS-IS Router: Cisco
System Id: 1781.81ff.43c7
Instance Id: 0
IS Levels: level-2-only
Manual area address(es):
49.0000
Routing for area address(es):
49.0000
Non-stop forwarding: Disabled
Most recent startup mode: Cold Restart
TE connection status: Up
Topologies supported by IS-IS:
IPv4 Unicast
Level-2
Metric style (generate/accept): Wide/Wide
Metric: 100000
ISPF status: Disabled
Redistributing:
Connected
Static
OSPF process 65001
OSPF process 65002
OSPF process 65003
Distance: 115
Advertise Passive Interface Prefixes Only: Yes
SRLB allocated: 15000 - 15999
SRGB allocated: 16000 - 81534
Interfaces supported by IS-IS:
Bundle-Ether1 is running suppressed (active in configuration)
Bundle-Ether2 is running suppressed (active in configuration)
Loopback0 is running passively (passive in configuration)
TenGigE0/0/1/2 is running suppressed (active in configuration)
TenGigE0/0/1/3 is disabled (active in configuration)
TenGigE0/5/0/1 is disabled (active in configuration)
RP/0/RSP0/CPU0:genie-Router#
'''}
golden_parsed_output_3 = {
'instance': {
'test': {
'process_id': 'test',
'instance': '0',
'vrf': {
'default': {
'system_id': '4444.44ff.8888',
'is_levels': 'level-1',
'manual_area_address': ['49.0002'],
'routing_area_address': ['49.0002'],
'non_stop_forwarding': 'Disabled',
'most_recent_startup_mode': 'Cold Restart',
'te_connection_status': 'Down',
'topology': {
'IPv4 Unicast': {
'vrf': {
'default': {
'level': {
1: {
'generate_style': 'Wide',
'accept_style': 'Wide',
'metric': 10,
'ispf_status': 'Disabled',
},
},
'protocols_redistributed': False,
'distance': 115,
'adv_passive_only': False,
},
},
},
'IPv6 Unicast': {
'vrf': {
'default': {
'level': {
1: {
'metric': 10,
'ispf_status': 'Disabled',
},
},
'protocols_redistributed': False,
'distance': 115,
'adv_passive_only': False,
},
},
},
},
'interfaces': {
'Loopback0': {
'running_state': 'running actively',
'configuration_state': 'active in configuration',
},
'GigabitEthernet0/0/0/0': {
'running_state': 'running actively',
'configuration_state': 'active in configuration',
},
'GigabitEthernet0/0/0/1': {
'running_state': 'running actively',
'configuration_state': 'active in configuration',
},
},
},
},
},
},
}
golden_output_3 = {'execute.return_value': '''
show isis
Mon Nov 25 22:23:10.670 UTC
IS-IS Router: test
System Id: 4444.44ff.8888
Instance Id: 0
IS Levels: level-1
Manual area address(es):
49.0002
Routing for area address(es):
49.0002
Non-stop forwarding: Disabled
Most recent startup mode: Cold Restart
TE connection status: Down
Topologies supported by IS-IS:
IPv4 Unicast
Level-1
Metric style (generate/accept): Wide/Wide
Metric: 10
ISPF status: Disabled
No protocols redistributed
Distance: 115
Advertise Passive Interface Prefixes Only: No
IPv6 Unicast
Level-1
Metric: 10
ISPF status: Disabled
No protocols redistributed
Distance: 115
Advertise Passive Interface Prefixes Only: No
SRLB not allocated
SRGB not allocated
Interfaces supported by IS-IS:
Loopback0 is running actively (active in configuration)
GigabitEthernet0/0/0/0 is running actively (active in configuration)
GigabitEthernet0/0/0/1 is running actively (active in configuration)
'''}
def test_show_isis_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIsis(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_show_isis_1(self):
self.device = Mock(**self.golden_output_1)
obj = ShowIsis(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
def test_show_isis_2(self):
self.device = Mock(**self.golden_output_2)
obj = ShowIsis(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_2)
def test_show_isis_3(self):
self.device = Mock(**self.golden_output_3)
obj = ShowIsis(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_3)
class TestShowIsisSpfLog(unittest.TestCase):
''' Unit Tests for command/parser
* show isis spf-log/ShowIsisSpfLog
'''
maxDiff = None
empty_output = {'execute.return_value': ''}
parsed_output_1 = {
"instance": {
"TEST": {
"address_family": {
"IPv4 Unicast": {
"spf_log": {
1: {
"start_timestamp": "Mon Oct 7 2019 23:12:51.401",
"level": 2,
"type": "PPFRR",
"time_ms": 0,
"total_nodes": 64,
"trigger_count": 1,
"triggers": "PERPREFIXFRR",
},
2: {
"start_timestamp": "Mon Oct 7 2019 23:27:50.960",
"level": 2,
"type": "FSPF",
"time_ms": 0,
"total_nodes": 64,
"trigger_count": 1,
"triggers": "PERIODIC",
},
3: {
"start_timestamp": "Tue Oct 8 2019 00:00:17.514",
"level": 2,
"type": "PRC",
"time_ms": 0,
"total_nodes": 64,
"trigger_count": 6,
"first_trigger_lsp": "bla-host1.12-34",
"triggers": "PREFIXBAD",
},
4: {
"start_timestamp": "Tue Oct 8 2019 00:02:24.523",
"level": 2,
"type": "PRC",
"time_ms": 0,
"total_nodes": 64,
"trigger_count": 6,
"first_trigger_lsp": "bla-host2.13-34",
"triggers": "PREFIXGOOD",
},
5: {
"start_timestamp": "Tue Oct 8 2019 00:02:25.025",
"level": 2,
"type": "PPFRR",
"time_ms": 0,
"total_nodes": 64,
"trigger_count": 1,
"triggers": "PERPREFIXFRR",
},
6: {
"start_timestamp": "Tue Oct 8 2019 08:15:04.265",
"level": 2,
"type": "PRC",
"time_ms": 0,
"total_nodes": 64,
"trigger_count": 1,
"first_trigger_lsp": "bla-9.blahlab-cld.12-34",
"triggers": "PREFIXBAD",
},
7: {
"start_timestamp": "Tue Oct 8 2019 08:15:04.418",
"level": 2,
"type": "PRC",
"time_ms": 0,
"total_nodes": 64,
"trigger_count": 1,
"first_trigger_lsp": "bla-9.blahlab-cld.12-34",
"triggers": "PREFIXGOOD",
},
8: {
"start_timestamp": "Tue Oct 8 2019 08:17:55.366",
"level": 2,
"type": "PRC",
"time_ms": 0,
"total_nodes": 64,
"trigger_count": 1,
"first_trigger_lsp": "bla-9.blahlab-cld.12-34",
"triggers": "PREFIXBAD",
},
}
}
}
}
}
}
golden_output_1 = {'execute.return_value': '''
#show isis spf-log
Tue Oct 8 17:37:35.029 EDT
IS-IS TEST Level 2 IPv4 Unicast Route Calculation Log
Time Total Trig.
Timestamp Type (ms) Nodes Count First Trigger LSP Triggers
------------ ----- ----- ----- ----- -------------------- -----------------------
--- Mon Oct 7 2019 ---
23:12:51.401 PPFRR 0 64 1 PERPREFIXFRR
23:27:50.960 FSPF 0 64 1 PERIODIC
--- Tue Oct 8 2019 ---
00:00:17.514 PRC 0 64 6 bla-host1.12-34 PREFIXBAD
00:02:24.523 PRC 0 64 6 bla-host2.13-34 PREFIXGOOD
00:02:25.025 PPFRR 0 64 1 PERPREFIXFRR
08:15:04.265 PRC 0 64 1 bla-9.blahlab-cld.12-34 PREFIXBAD
08:15:04.418 PRC 0 64 1 bla-9.blahlab-cld.12-34 PREFIXGOOD
08:17:55.366 PRC 0 64 1 bla-9.blahlab-cld.12-34 PREFIXBAD
'''}
parsed_output_2 = {
"instance": {
"1": {
"address_family": {
"IPv4 Unicast": {
"spf_log": {
1: {
"start_timestamp": "Thurs Aug 19 2004 12:00:50.787",
"level": 1,
"type": "FSPF",
"time_ms": 1,
"total_nodes": 1,
"trigger_count": 3,
"first_trigger_lsp": "ensoft-grs7.00-00",
"triggers": "LSPHEADER TLVCODE",
},
2: {
"start_timestamp": "Thurs Aug 19 2004 12:00:52.846",
"level": 1,
"type": "FSPF",
"time_ms": 1,
"total_nodes": 1,
"trigger_count": 1,
"first_trigger_lsp": "ensoft-grs7.00-00",
"triggers": "LSPHEADER",
},
3: {
"start_timestamp": "Thurs Aug 19 2004 12:00:56.049",
"level": 1,
"type": "FSPF",
"time_ms": 1,
"total_nodes": 1,
"trigger_count": 1,
"first_trigger_lsp": "ensoft-grs7.00-00",
"triggers": "TLVCODE",
},
4: {
"start_timestamp": "Thurs Aug 19 2004 12:01:02.620",
"level": 1,
"type": "FSPF",
"time_ms": 1,
"total_nodes": 1,
"trigger_count": 2,
"first_trigger_lsp": "ensoft-grs7.00-00",
"triggers": "NEWADJ LINKTLV",
},
5: {
"start_timestamp": "Mon Aug 19 2004 12:00:50.790",
"level": 1,
"type": "FSPF",
"time_ms": 0,
"total_nodes": 1,
"trigger_count": 4,
"first_trigger_lsp": "ensoft-grs7.00-00",
"triggers": "LSPHEADER TLVCODE",
},
6: {
"start_timestamp": "Mon Aug 19 2004 12:00:54.043",
"level": 1,
"type": "FSPF",
"time_ms": 1,
"total_nodes": 1,
"trigger_count": 2,
"first_trigger_lsp": "ensoft-grs7.00-00",
"triggers": "NEWADJ LSPHEADER",
},
7: {
"start_timestamp": "Mon Aug 19 2004 12:00:55.922",
"level": 1,
"type": "FSPF",
"time_ms": 1,
"total_nodes": 2,
"trigger_count": 1,
"first_trigger_lsp": "ensoft-grs7.00-00",
"triggers": "NEWLSPO",
},
}
}
}
}
}
}
# From ncs5k/ncs6k/asr9k documentation
golden_output_2 = {'execute.return_value': '''
# show isis spf-log
IS-IS 1 Level 1 IPv4 Unicast Route Calculation Log
Time Total Trig
Timestamp Type (ms) Nodes Count First Trigger LSP Triggers
----------- ---- ---- ----- ----- ----- ------- --- --------
--- Thurs Aug 19 2004 ---
12:00:50.787 FSPF 1 1 3 ensoft-grs7.00-00 LSPHEADER TLVCODE
12:00:52.846 FSPF 1 1 1 ensoft-grs7.00-00 LSPHEADER
12:00:56.049 FSPF 1 1 1 ensoft-grs7.00-00 TLVCODE
12:01:02.620 FSPF 1 1 2 ensoft-grs7.00-00 NEWADJ LINKTLV
IS-IS 1 Level 1 IPv4 Unicast Route Calculation Log
Time Total Trig
Timestamp Type (ms) Nodes Count First Trigger LSP Triggers
----------- ---- ---- ----- ----- ----- ------- --- --------
--- Mon Aug 19 2004 ---
12:00:50.790 FSPF 0 1 4 ensoft-grs7.00-00 LSPHEADER TLVCODE
12:00:54.043 FSPF 1 1 2 ensoft-grs7.00-00 NEWADJ LSPHEADER
12:00:55.922 FSPF 1 2 1 ensoft-grs7.00-00 NEWLSPO
'''}
def test_empty_output(self):
device = Mock(**self.empty_output)
obj = ShowIsisSpfLog(device=device)
with self.assertRaises(SchemaEmptyParserError):
obj.parse()
def test_golden_output_1(self):
device = Mock(**self.golden_output_1)
obj = ShowIsisSpfLog(device=device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.parsed_output_1)
def test_golden_output_2(self):
device = Mock(**self.golden_output_2)
obj = ShowIsisSpfLog(device=device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.parsed_output_2)
class TestShowIsisSpfLogDetail(unittest.TestCase):
''' Unit tests for commands/parsers
* show isis spf-log detail/ShowIsisSpfLogDetail
'''
maxDiff = None
empty_output = {'execute.return_value': ''}
parsed_output_1 = {
'instance': {
'isp': {
'address_family': {
'IPv4 Unicast': {
'spf_log': {
1: {
'type': 'FSPF',
'time_ms': 1,
'level': 1,
'total_nodes': 1,
'trigger_count': 1,
'first_trigger_lsp': '12a5.00-00',
'triggers': 'NEWLSP0',
'start_timestamp': 'Mon Aug 16 2004 19:25:35.140',
'delay': {
'since_first_trigger_ms': 51,
},
'spt_calculation': {
'cpu_time_ms': 0,
'real_time_ms': 0,
},
'prefix_update': {
'cpu_time_ms': 1,
'real_time_ms': 1,
},
'new_lsp_arrivals': 0,
'next_wait_interval_ms': 200,
'results': {
'nodes': {
'reach': 1,
'unreach': 0,
'total': 1,
},
'prefixes': {
'items': {
'critical_priority': {
'reach': 0,
'unreach': 0,
'total': 0,
},
'high_priority': {
'reach': 0,
'unreach': 0,
'total': 0,
},
'medium_priority': {
'reach': 0,
'unreach': 0,
'total': 0,
},
'low_priority': {
'reach': 0,
'unreach': 0,
'total': 0,
},
'all_priority': {
'reach': 0,
'unreach': 0,
'total': 0,
},
},
'routes': {
'critical_priority': {
'reach': 0,
'total': 0,
},
'high_priority': {
'reach': 0,
'total': 0,
},
'medium_priority': {
'reach': 0,
'total': 0,
},
'low_priority': {
'reach': 0,
'total': 0,
},
'all_priority': {
'reach': 0,
'total': 0,
},
},
},
},
},
},
},
},
},
},
}
golden_output_1 = {'execute.return_value': '''
# show isis spf-log detail
ISIS isp Level 1 IPv4 Unicast Route Calculation Log
Time Total Trig
Timestamp Type (ms) Nodes Count First Trigger LSP Triggers
Mon Aug 16 2004
19:25:35.140 FSPF 1 1 1 12a5.00-00 NEWLSP0
Delay: 51ms (since first trigger)
SPT Calculation
CPU Time: 0ms
Real Time: 0ms
Prefix Updates
CPU Time: 1ms
Real Time: 1ms
New LSP Arrivals: 0
Next Wait Interval: 200ms
Results
Reach Unreach Total
Nodes: 1 0 1
Prefixes (Items)
Critical Priority: 0 0 0
High Priority: 0 0 0
Medium Priority 0 0 0
Low Priority 0 0 0
All Priorities 0 0 0
Prefixes (Routes)
Critical Priority: 0 - 0
High Priority: 0 - 0
Medium Priority 0 - 0
Low Priority: 0 - 0
All Priorities 0 - 0
'''}
golden_output_2 = {'execute.return_value': '''
show isis spf-log detail
Mon Oct 21 10:41:33.540 EDT
IS-IS Genie Level 2 IPv4 Unicast Route Calculation Log
Time Total Trig.
Timestamp Type (ms) Nodes Count First Trigger LSP Triggers
------------ ----- ----- ----- ----- -------------------- -----------------------
--- Sun Oct 20 2019 ---
15:53:18.505 PPFRR 5 71 1 PERPREFIXFRR
Delay: 0ms (since first trigger)
New LSP Arrivals: 0
SR uloop: No
Next Wait Interval: 0ms
Interrupted: No
RIB Batches: 0
Timings (ms): +--Total--+
Real CPU
SPT Calculation: 5 5
Route Update: 0 0
----- -----
Full Calculation: 5 5
16:08:18.055 FSPF 0 71 1 PERIODIC
Delay: 50ms (since first trigger)
899545ms (since end of last calculation)
New LSP Arrivals: 0
SR uloop: No
Next Wait Interval: 150ms
RIB Batches: 0 (0 critical, 0 high, 0 medium, 0 low)
Timings (ms): +--Total--+
Real CPU
SPT Calculation: 0 0
Route Update: 0 0
----- -----
Full Calculation: 0 0
16:08:18.555 PPFRR 5 71 1 PERPREFIXFRR
Delay: 0ms (since first trigger)
500ms (since end of last calculation)
New LSP Arrivals: 0
SR uloop: No
Next Wait Interval: 0ms
Interrupted: No
RIB Batches: 0
Timings (ms): +--Total--+
Real CPU
SPT Calculation: 5 5
Route Update: 0 0
----- -----
Full Calculation: 5 5
'''}
parsed_output_2 = {
'instance': {
'Genie': {
'address_family': {
'IPv4 Unicast': {
'spf_log': {
1: {
'type': 'PPFRR',
'time_ms': 5,
'level': 2,
'total_nodes': 71,
'trigger_count': 1,
'triggers': 'PERPREFIXFRR',
'start_timestamp': 'Timestamp Type 15:53:18.505',
'delay': {
'since_first_trigger_ms': 0,
},
'new_lsp_arrivals': 0,
'sr_uloop': 'No',
'next_wait_interval_ms': 0,
'interrupted': 'No',
'rib_batches': {
'total': '0',
},
'spt_calculation': {
'cpu_time_ms': 5,
'real_time_ms': 5,
},
'prefix_update': {
'cpu_time_ms': 0,
'real_time_ms': 0,
},
'full_calculation': {
'cpu_time_ms': 5,
'real_time_ms': 5,
},
},
2: {
'type': 'FSPF',
'time_ms': 0,
'level': 2,
'total_nodes': 71,
'trigger_count': 1,
'triggers': 'PERIODIC',
'start_timestamp': 'Real CPU 16:08:18.055',
'delay': {
'since_first_trigger_ms': 50,
'since_end_of_last_calculation': 899545,
},
'new_lsp_arrivals': 0,
'sr_uloop': 'No',
'next_wait_interval_ms': 150,
'rib_batches': {
'total': '0',
'critical': '0',
'high': '0',
'medium': '0',
'low': '0',
},
'spt_calculation': {
'cpu_time_ms': 0,
'real_time_ms': 0,
},
'prefix_update': {
'cpu_time_ms': 0,
'real_time_ms': 0,
},
'full_calculation': {
'cpu_time_ms': 0,
'real_time_ms': 0,
},
},
3: {
'type': 'PPFRR',
'time_ms': 5,
'level': 2,
'total_nodes': 71,
'trigger_count': 1,
'triggers': 'PERPREFIXFRR',
'start_timestamp': 'Real CPU 16:08:18.555',
'delay': {
'since_first_trigger_ms': 0,
'since_end_of_last_calculation': 500,
},
'new_lsp_arrivals': 0,
'sr_uloop': 'No',
'next_wait_interval_ms': 0,
'interrupted': 'No',
'rib_batches': {
'total': '0',
},
'spt_calculation': {
'cpu_time_ms': 5,
'real_time_ms': 5,
},
'prefix_update': {
'cpu_time_ms': 0,
'real_time_ms': 0,
},
'full_calculation': {
'cpu_time_ms': 5,
'real_time_ms': 5,
},
},
},
},
},
},
},
}
def test_empty_output(self):
device = Mock(**self.empty_output)
obj = ShowIsisSpfLogDetail(device=device)
with self.assertRaises(SchemaEmptyParserError):
obj.parse()
def test_golden_output_1(self):
device = Mock(**self.golden_output_1)
obj = ShowIsisSpfLogDetail(device=device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.parsed_output_1)
def test_golden_output_2(self):
device = Mock(**self.golden_output_2)
obj = ShowIsisSpfLogDetail(device=device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.parsed_output_2)
class TestIsisHostname(unittest.TestCase):
''' Unit tests for commands:
* show isis hostname / ShowIsisHostname
* show isis instance {instance} hostname / ShowIsisHostname
'''
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output_1 = {
'isis': {
'TEST1': {
'vrf': {
'default': {
'level': {
2: {
'system_id': {
'5286.44ff.91b9': {
'dynamic_hostname': 'host-1.bla-site3'},
'9839.23ff.9c50': {
'dynamic_hostname': 'host3-bla'},
'3549.63ff.9ab5': {
'dynamic_hostname': 'abc-3.bla-site4'},
'0670.70ff.b1b1': {
'dynamic_hostname': 'host2-abc'},
'9853.99ff.fb21': {
'dynamic_hostname': 'abc2-xyz',
'local_router': True}}}}}}}}}
golden_output_1 = {'execute.return_value': '''
show isis hostname
Thu Oct 3 10:53:16.534 EDT
IS-IS TEST1 hostnames
Level System ID Dynamic Hostname
2 5286.44ff.91b9 host-1.bla-site3
2 9839.23ff.9c50 host3-bla
2 3549.63ff.9ab5 abc-3.bla-site4
2 0670.70ff.b1b1 host2-abc
2 * 9853.99ff.fb21 abc2-xyz
'''}
golden_parsed_output_2 = {
"isis": {
"test": {
"vrf": {
"default": {
"level": {
2: {
"system_id": {
"2222.22ff.4444": {
"dynamic_hostname": "R2"},
"8888.88ff.1111": {
"dynamic_hostname": "R8"},
"7777.77ff.eeee": {
"dynamic_hostname": "R7"},
"3333.33ff.6666": {
"dynamic_hostname": "R3",
"local_router": True,
},
"5555.55ff.aaaa": {
"dynamic_hostname": "R5"},
"9999.99ff.3333": {
"dynamic_hostname": "R9"},
}
},
1: {
"system_id": {
"4444.44ff.8888": {
"dynamic_hostname": "R4"},
"6666.66ff.cccc": {
"dynamic_hostname": "R6"},
"7777.77ff.eeee": {
"dynamic_hostname": "R7"},
"3333.33ff.6666": {
"dynamic_hostname": "R3",
"local_router": True,
},
"5555.55ff.aaaa": {
"dynamic_hostname": "R5"},
}
},
}
}
}
}
}
}
golden_output_2 = {'execute.return_value': '''
show isis hostname
IS-IS test hostnames
Level System ID Dynamic Hostname
2 2222.22ff.4444 R2
1 4444.44ff.8888 R4
1 6666.66ff.cccc R6
2 8888.88ff.1111 R8
1,2 7777.77ff.eeee R7
1,2 * 3333.33ff.6666 R3
1,2 5555.55ff.aaaa R5
2 9999.99ff.3333 R9
'''}
def test_empty_output(self):
self.device = Mock(**self.empty_output)
obj = ShowIsisHostname(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
obj.parse()
def test_golden_output_1(self):
self.device = Mock(**self.golden_output_1)
obj = ShowIsisHostname(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
def test_golden_output_2(self):
self.device = Mock(**self.golden_output_2)
obj = ShowIsisHostname(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_2)
class TestShowIsisStatistics(unittest.TestCase):
''' Unit tests for commands/parsers
* show isis statistics/ShowIsisStatistics
'''
maxDiff = None
empty_output = {'execute.return_value': ''}
parsed_output_1 = {
"isis": {
"test": {
"psnp_cache": {
"hits": 21,
"tries": 118},
"csnp_cache": {
"hits": 1398,
"tries": 1501,
"updates": 204},
"lsp": {
"checksum_errors_received": 0,
"dropped": 0},
"snp": {
"dropped": 0},
"upd": {
"max_queue_size": 3,
"queue_size": 0},
"transmit_time": {
"hello": {
"average_transmit_time_sec": 0,
"average_transmit_time_nsec": 66473,
"rate_per_sec": 15,
},
"csnp": {
"average_transmit_time_sec": 0,
"average_transmit_time_nsec": 45979,
"rate_per_sec": 2,
},
"psnp": {
"average_transmit_time_sec": 0,
"average_transmit_time_nsec": 4113,
"rate_per_sec": 0,
},
"lsp": {
"average_transmit_time_sec": 0,
"average_transmit_time_nsec": 14392,
"rate_per_sec": 0,
},
},
"process_time": {
"hello": {
"average_process_time_sec": 0,
"average_process_time_nsec": 51163,
"rate_per_sec": 9,
},
"csnp": {
"average_process_time_sec": 0,
"average_process_time_nsec": 26914,
"rate_per_sec": 1,
},
"psnp": {
"average_process_time_sec": 0,
"average_process_time_nsec": 39758,
"rate_per_sec": 0,
},
"lsp": {
"average_process_time_sec": 0,
"average_process_time_nsec": 52706,
"rate_per_sec": 0,
},
},
"level": {
1: {
"lsp": {
"new": 11,
"refresh": 15},
"address_family": {
"IPv4 Unicast": {
"total_spf_calculation": 18,
"full_spf_calculation": 16,
"ispf_calculation": 0,
"next_hop_calculation": 0,
"partial_route_calculation": 2,
"periodic_spf_calculation": 3,
},
"IPv6 Unicast": {
"total_spf_calculation": 19,
"full_spf_calculation": 17,
"ispf_calculation": 0,
"next_hop_calculation": 0,
"partial_route_calculation": 2,
"periodic_spf_calculation": 3,
},
},
},
2: {
"lsp": {
"new": 13,
"refresh": 11},
"address_family": {
"IPv4 Unicast": {
"total_spf_calculation": 23,
"full_spf_calculation": 15,
"ispf_calculation": 0,
"next_hop_calculation": 0,
"partial_route_calculation": 8,
"periodic_spf_calculation": 4,
},
"IPv6 Unicast": {
"total_spf_calculation": 22,
"full_spf_calculation": 14,
"ispf_calculation": 0,
"next_hop_calculation": 0,
"partial_route_calculation": 8,
"periodic_spf_calculation": 4,
},
},
},
},
"interface": {
"Loopback0": {
"level": {
1: {
"lsps_sourced": {
"sent": 0,
"received": 0,
"flooding_duplicates": 51,
"arrival_time_throttled": 0,
},
"csnp": {
"sent": 0,
"received": 0},
"psnp": {
"sent": 0,
"received": 0},
},
2: {
"lsps_sourced": {
"sent": 0,
"received": 0,
"flooding_duplicates": 46,
"arrival_time_throttled": 0,
},
"csnp": {
"sent": 0,
"received": 0},
"psnp": {
"sent": 0,
"received": 0},
},
}
},
"GigabitEthernet0/0/0/0": {
"level": {
1: {
"hello": {
"received": 594,
"sent": 593},
"dr": {
"elections": 1},
"lsps_sourced": {
"sent": 0,
"received": 0,
"flooding_duplicates": 51,
"arrival_time_throttled": 0,
},
"csnp": {
"sent": 0,
"received": 0},
"psnp": {
"sent": 0,
"received": 0},
},
2: {
"hello": {
"received": 1779,
"sent": 594},
"dr": {
"elections": 1},
"lsps_sourced": {
"sent": 63,
"received": 7,
"flooding_duplicates": 0,
"arrival_time_throttled": 0,
},
"csnp": {
"sent": 595,
"received": 0},
"psnp": {
"sent": 0,
"received": 0},
},
}
},
"GigabitEthernet0/0/0/1": {
"level": {
1: {
"hello": {
"received": 1294,
"sent": 604},
"dr": {
"elections": 5},
"lsps_sourced": {
"sent": 47,
"received": 15,
"flooding_duplicates": 8,
"arrival_time_throttled": 0,
},
"csnp": {
"sent": 339,
"received": 0},
"psnp": {
"sent": 0,
"received": 1},
},
2: {
"hello": {
"received": 724,
"sent": 281},
"dr": {
"elections": 5},
"lsps_sourced": {
"sent": 0,
"received": 0,
"flooding_duplicates": 42,
"arrival_time_throttled": 0,
},
"csnp": {
"sent": 0,
"received": 0},
"psnp": {
"sent": 0,
"received": 0},
},
}
},
"GigabitEthernet0/0/0/2": {
"level": {
1: {
"hello": {
"received": 1739,
"sent": 572},
"dr": {
"elections": 3},
"lsps_sourced": {
"sent": 51,
"received": 31,
"flooding_duplicates": 0,
"arrival_time_throttled": 0,
},
"csnp": {
"sent": 567,
"received": 0},
"psnp": {
"sent": 0,
"received": 0},
},
2: {
"hello": {
"received": 597,
"sent": 0},
"dr": {
"elections": 1},
"lsps_sourced": {
"sent": 0,
"received": 0,
"flooding_duplicates": 46,
"arrival_time_throttled": 0,
},
"csnp": {
"sent": 0,
"received": 0},
"psnp": {
"sent": 0,
"received": 0},
},
}
},
"GigabitEthernet0/0/0/3": {
"level": {
1: {
"hello": {
"received": 598,
"sent": 1115},
"dr": {
"elections": 3},
"lsps_sourced": {
"sent": 38,
"received": 26,
"flooding_duplicates": 5,
"arrival_time_throttled": 0,
},
"csnp": {
"sent": 0,
"received": 370},
"psnp": {
"sent": 0,
"received": 0},
},
2: {
"hello": {
"received": 596,
"sent": 1113},
"dr": {
"elections": 3},
"lsps_sourced": {
"sent": 18,
"received": 39,
"flooding_duplicates": 3,
"arrival_time_throttled": 0,
},
"csnp": {
"sent": 0,
"received": 370},
"psnp": {
"sent": 0,
"received": 0},
},
}
},
},
}
}
}
golden_output_1 = {'execute.return_value': '''
IS-IS test statistics:
Fast PSNP cache (hits/tries): 21/118
Fast CSNP cache (hits/tries): 1398/1501
Fast CSNP cache updates: 204
LSP checksum errors received: 0
LSP Dropped: 0
SNP Dropped: 0
UPD Max Queue size: 3
UPD Queue size: 0
Average transmit times and rate:
Hello: 0 s, 66473 ns, 15/s
CSNP: 0 s, 45979 ns, 2/s
PSNP: 0 s, 4113 ns, 0/s
LSP: 0 s, 14392 ns, 0/s
Average process times and rate:
Hello: 0 s, 51163 ns, 9/s
CSNP: 0 s, 26914 ns, 1/s
PSNP: 0 s, 39758 ns, 0/s
LSP: 0 s, 52706 ns, 0/s
Level-1:
LSPs sourced (new/refresh): 11/15
IPv4 Unicast
Total SPF calculations : 18
Full SPF calculations : 16
ISPF calculations : 0
Next Hop Calculations : 0
Partial Route Calculations : 2
Periodic SPF calculations : 3
IPv6 Unicast
Total SPF calculations : 19
Full SPF calculations : 17
ISPF calculations : 0
Next Hop Calculations : 0
Partial Route Calculations : 2
Periodic SPF calculations : 3
Level-2:
LSPs sourced (new/refresh): 13/11
IPv4 Unicast
Total SPF calculations : 23
Full SPF calculations : 15
ISPF calculations : 0
Next Hop Calculations : 0
Partial Route Calculations : 8
Periodic SPF calculations : 4
IPv6 Unicast
Total SPF calculations : 22
Full SPF calculations : 14
ISPF calculations : 0
Next Hop Calculations : 0
Partial Route Calculations : 8
Periodic SPF calculations : 4
Interface Loopback0:
Level-1 LSPs (sent/rcvd) : 0/0
Level-1 CSNPs (sent/rcvd) : 0/0
Level-1 PSNPs (sent/rcvd) : 0/0
Level-1 LSP Flooding Duplicates : 51
Level-1 LSPs Arrival Time Throttled : 0
Level-2 LSPs (sent/rcvd) : 0/0
Level-2 CSNPs (sent/rcvd) : 0/0
Level-2 PSNPs (sent/rcvd) : 0/0
Level-2 LSP Flooding Duplicates : 46
Level-2 LSPs Arrival Time Throttled : 0
Interface GigabitEthernet0/0/0/0:
Level-1 Hellos (sent/rcvd): 594/593
Level-1 DR Elections : 1
Level-1 LSPs (sent/rcvd) : 0/0
Level-1 CSNPs (sent/rcvd) : 0/0
Level-1 PSNPs (sent/rcvd) : 0/0
Level-1 LSP Flooding Duplicates : 51
Level-1 LSPs Arrival Time Throttled : 0
Level-2 Hellos (sent/rcvd): 1779/594
Level-2 DR Elections : 1
Level-2 LSPs (sent/rcvd) : 63/7
Level-2 CSNPs (sent/rcvd) : 595/0
Level-2 PSNPs (sent/rcvd) : 0/0
Level-2 LSP Flooding Duplicates : 0
Level-2 LSPs Arrival Time Throttled : 0
Interface GigabitEthernet0/0/0/1:
Level-1 Hellos (sent/rcvd): 1294/604
Level-1 DR Elections : 5
Level-1 LSPs (sent/rcvd) : 47/15
Level-1 CSNPs (sent/rcvd) : 339/0
Level-1 PSNPs (sent/rcvd) : 0/1
Level-1 LSP Flooding Duplicates : 8
Level-1 LSPs Arrival Time Throttled : 0
Level-2 Hellos (sent/rcvd): 724/281
Level-2 DR Elections : 5
Level-2 LSPs (sent/rcvd) : 0/0
Level-2 CSNPs (sent/rcvd) : 0/0
Level-2 PSNPs (sent/rcvd) : 0/0
Level-2 LSP Flooding Duplicates : 42
Level-2 LSPs Arrival Time Throttled : 0
Interface GigabitEthernet0/0/0/2:
Level-1 Hellos (sent/rcvd): 1739/572
Level-1 DR Elections : 3
Level-1 LSPs (sent/rcvd) : 51/31
Level-1 CSNPs (sent/rcvd) : 567/0
Level-1 PSNPs (sent/rcvd) : 0/0
Level-1 LSP Flooding Duplicates : 0
Level-1 LSPs Arrival Time Throttled : 0
Level-2 Hellos (sent/rcvd): 597/0
Level-2 DR Elections : 1
Level-2 LSPs (sent/rcvd) : 0/0
Level-2 CSNPs (sent/rcvd) : 0/0
Level-2 PSNPs (sent/rcvd) : 0/0
Level-2 LSP Flooding Duplicates : 46
Level-2 LSPs Arrival Time Throttled : 0
Interface GigabitEthernet0/0/0/3:
Level-1 Hellos (sent/rcvd): 598/1115
Level-1 DR Elections : 3
Level-1 LSPs (sent/rcvd) : 38/26
Level-1 CSNPs (sent/rcvd) : 0/370
Level-1 PSNPs (sent/rcvd) : 0/0
Level-1 LSP Flooding Duplicates : 5
Level-1 LSPs Arrival Time Throttled : 0
Level-2 Hellos (sent/rcvd): 596/1113
Level-2 DR Elections : 3
Level-2 LSPs (sent/rcvd) : 18/39
Level-2 CSNPs (sent/rcvd) : 0/370
Level-2 PSNPs (sent/rcvd) : 0/0
Level-2 LSP Flooding Duplicates : 3
Level-2 LSPs Arrival Time Throttled : 0
'''}
parsed_output_2 = {
'isis': {
'COEUR': {
'csnp_cache': {
'hits': 0,
'tries': 49,
'updates': 66,
},
'interface': {
'Bundle-Ether10': {
'level': {
1: {
'csnp': {
'received': 24,
'sent': 24,
},
'lsps_sourced': {
'arrival_time_throttled': 0,
'flooding_duplicates': 162,
'received': 57776373,
'sent': 2218410,
},
'psnp': {
'received': 1576294,
'sent': 33297781,
},
},
},
},
'Bundle-Ether11': {
'level': {
1: {
'csnp': {
'received': 36,
'sent': 25,
},
'lsps_sourced': {
'arrival_time_throttled': 0,
'flooding_duplicates': 15,
'received': 57701052,
'sent': 2724240,
},
'psnp': {
'received': 1761310,
'sent': 33274400,
},
},
},
},
'Loopback0': {
},
'Loopback6': {
},
},
'level': {
1: {
'address_family': {
'IPv4 Unicast': {
'full_spf_calculation': 331056,
'ispf_calculation': 0,
'next_hop_calculation': 4,
'partial_route_calculation': 891257,
'periodic_spf_calculation': 39298,
'total_spf_calculation': 1222317,
},
'IPv6 Unicast': {
'full_spf_calculation': 177541,
'ispf_calculation': 0,
'next_hop_calculation': 4,
'partial_route_calculation': 57170,
'periodic_spf_calculation': 43596,
'total_spf_calculation': 234715,
},
},
'lsp': {
'new': 9140,
'refresh': 117187,
},
},
},
'lsp': {
'checksum_errors_received': 0,
'dropped': 0,
},
'process_time': {
'csnp': {
'average_process_time_nsec': 1249805,
'average_process_time_sec': 0,
'rate_per_sec': 0,
},
'hello': {
'average_process_time_nsec': 999833,
'average_process_time_sec': 0,
'rate_per_sec': 0,
},
'lsp': {
'average_process_time_nsec': 999840,
'average_process_time_sec': 0,
'rate_per_sec': 0,
},
'psnp': {
'average_process_time_nsec': 999835,
'average_process_time_sec': 0,
'rate_per_sec': 0,
},
},
'psnp_cache': {
'hits': 57508538,
'tries': 115477425,
},
'snp': {
'dropped': 0,
},
'transmit_time': {
'csnp': {
'average_transmit_time_nsec': 0,
'average_transmit_time_sec': 0,
'rate_per_sec': 0,
},
'hello': {
'average_transmit_time_nsec': 999840,
'average_transmit_time_sec': 0,
'rate_per_sec': 0,
},
'lsp': {
'average_transmit_time_nsec': 999840,
'average_transmit_time_sec': 0,
'rate_per_sec': 0,
},
'psnp': {
'average_transmit_time_nsec': 999836,
'average_transmit_time_sec': 0,
'rate_per_sec': 0,
},
},
'upd': {
'max_queue_size': 20,
},
},
},
}
golden_output_2 = {'execute.return_value': '''
RP/0/RSP0/CPU0:XXXX#sh isis stat
Fri Sep 25 18:17:07.477 FRANCE
IS-IS COEUR statistics:
Fast PSNP cache (hits/tries): 57508538/115477425
Fast CSNP cache (hits/tries): 0/49
Fast CSNP cache updates: 66
LSP checksum errors received: 0
LSP Dropped: 0
SNP Dropped: 0
UPD Max Queue size: 20
Average transmit times and rate:
Hello: 0 s, 999840 ns, 0/s
CSNP: 0 s, 0 ns, 0/s
PSNP: 0 s, 999836 ns, 0/s
LSP: 0 s, 999840 ns, 0/s
Average process times and rate:
Hello: 0 s, 999833 ns, 0/s
CSNP: 0 s, 1249805 ns, 0/s
PSNP: 0 s, 999835 ns, 0/s
LSP: 0 s, 999840 ns, 0/s
Level-1:
LSPs sourced (new/refresh): 9140/117187
IPv4 Unicast
Total SPF calculations : 1222317
Full SPF calculations : 331056
ISPF calculations : 0
Next Hop Calculations : 4
Partial Route Calculations : 891257
Periodic SPF calculations : 39298
IPv6 Unicast
Total SPF calculations : 234715
Full SPF calculations : 177541
ISPF calculations : 0
Next Hop Calculations : 4
Partial Route Calculations : 57170
Periodic SPF calculations : 43596
Interface Bundle-Ether10:
PTP Hellos (sent/rcvd) : 26609054/5327323
LSP Retransmissions : 11
Level-1 LSPs (sent/rcvd) : 2218410/57776373
Level-1 CSNPs (sent/rcvd) : 24/24
Level-1 PSNPs (sent/rcvd) : 33297781/1576294
Level-1 LSP Flooding Duplicates : 162
Level-1 LSPs Arrival Time Throttled : 0
Interface Bundle-Ether11:
PTP Hellos (sent/rcvd) : 26608031/5327345
LSP Retransmissions : 15
Level-1 LSPs (sent/rcvd) : 2724240/57701052
Level-1 CSNPs (sent/rcvd) : 25/36
Level-1 PSNPs (sent/rcvd) : 33274400/1761310
Level-1 LSP Flooding Duplicates : 15
Level-1 LSPs Arrival Time Throttled : 0
Interface Loopback0:
Interface Loopback6:
RP/0/RSP0/CPU0:XXXX#
'''}
def test_empty_output(self):
self.device = Mock(**self.empty_output)
obj = ShowIsisStatistics(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
obj.parse()
def test_golden_output_1(self):
self.device = Mock(**self.golden_output_1)
obj = ShowIsisStatistics(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.parsed_output_1)
def test_golden_output_2(self):
self.device = Mock(**self.golden_output_2)
obj = ShowIsisStatistics(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.parsed_output_2)
class TestShowIsisProtocol(unittest.TestCase):
''' Unit tests for command/parser
* show isis protocol / ShowIsisProtocol
'''
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output_1 = {
"instance": {
"TEST": {
"process_id": "TEST",
"instance": "0",
"vrf": {
"default": {
"system_id": "0123.45ff.f077",
"is_levels": "level-2-only",
"manual_area_address": ["90.0000"],
"routing_area_address": ["90.0000"],
"non_stop_forwarding": "Disabled",
"most_recent_startup_mode": "Cold Restart",
"te_connection_status": "Up",
"topology": {
"IPv4 Unicast": {
'vrf': {
'default': {
"level": {
2: {
"generate_style": "Wide",
"accept_style": "Wide",
"metric": 100000,
"ispf_status": "Disabled",
}
},
"protocols_redistributed": False,
"distance": 115,
"adv_passive_only": True,
}
}
}
},
"srlb": {
"start": 15000,
"end": 15999},
"srgb": {
"start": 16000,
"end": 81534},
"interfaces": {
"GigabitEthernet0/0/0/1": {
"running_state": "running suppressed",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/2": {
"running_state": "running suppressed",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/3": {
"running_state": "running suppressed",
"configuration_state": "active in configuration",
},
"Loopback0": {
"running_state": "running passively",
"configuration_state": "passive in configuration",
},
"GigabitEthernet0/0/0/4": {
"running_state": "running suppressed",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/5": {
"running_state": "running suppressed",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/6": {
"running_state": "disabled",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/7": {
"running_state": "disabled",
"configuration_state": "active in configuration",
},
},
}
},
}
}
}
golden_output_1 = {'execute.return_value': '''
#show isis protocol
Wed Oct 9 13:07:59.452 EDT
IS-IS Router: TEST
System Id: 0123.45ff.f077
Instance Id: 0
IS Levels: level-2-only
Manual area address(es):
90.0000
Routing for area address(es):
90.0000
Non-stop forwarding: Disabled
Most recent startup mode: Cold Restart
TE connection status: Up
Topologies supported by IS-IS:
IPv4 Unicast
Level-2
Metric style (generate/accept): Wide/Wide
Metric: 100000
ISPF status: Disabled
No protocols redistributed
Distance: 115
Advertise Passive Interface Prefixes Only: Yes
SRLB allocated: 15000 - 15999
SRGB allocated: 16000 - 81534
Interfaces supported by IS-IS:
GigabitEthernet0/0/0/1 is running suppressed (active in configuration)
GigabitEthernet0/0/0/2 is running suppressed (active in configuration)
GigabitEthernet0/0/0/3 is running suppressed (active in configuration)
Loopback0 is running passively (passive in configuration)
GigabitEthernet0/0/0/4 is running suppressed (active in configuration)
GigabitEthernet0/0/0/5 is running suppressed (active in configuration)
GigabitEthernet0/0/0/6 is disabled (active in configuration)
GigabitEthernet0/0/0/7 is disabled (active in configuration)
'''}
golden_parsed_output_2 = {
"instance": {
"test": {
"process_id": "test",
"instance": "0",
"vrf": {
"default": {
"system_id": "2222.22ff.4444",
"is_levels": "level-1-2",
"manual_area_address": ["49.0001"],
"routing_area_address": ["49.0001"],
"non_stop_forwarding": "Disabled",
"most_recent_startup_mode": "Cold Restart",
"te_connection_status": "Down",
"topology": {
"IPv4 Unicast": {
"vrf": {
"default": {
"level": {
1: {
"generate_style": "Wide",
"accept_style": "Wide",
"metric": 10,
"ispf_status": "Disabled",
},
2: {
"generate_style": "Wide",
"accept_style": "Wide",
"metric": 10,
"ispf_status": "Disabled",
},
},
"protocols_redistributed": False,
"distance": 115,
"adv_passive_only": False,
}
}
},
"IPv6 Unicast": {
"vrf": {
"default": {
"level": {
1: {
"metric": 10,
"ispf_status": "Disabled"},
2: {
"metric": 10,
"ispf_status": "Disabled"},
},
"protocols_redistributed": False,
"distance": 115,
"adv_passive_only": False,
}
}
},
},
"interfaces": {
"Loopback0": {
"running_state": "running actively",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/0.115": {
"running_state": "running actively",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/1.115": {
"running_state": "running actively",
"configuration_state": "active in configuration",
},
},
}
},
},
"test1": {
"process_id": "test1",
"instance": "0",
"vrf": {
"VRF1": {
"system_id": "2222.22ff.4444",
"is_levels": "level-1-2",
"manual_area_address": ["49.0001"],
"routing_area_address": ["49.0001"],
"non_stop_forwarding": "Disabled",
"most_recent_startup_mode": "Cold Restart",
"te_connection_status": "Down",
"topology": {
"IPv4 Unicast": {
"vrf": {
"VRF1": {
"level": {
1: {
"generate_style": "Wide",
"accept_style": "Wide",
"metric": 10,
"ispf_status": "Disabled",
},
2: {
"generate_style": "Wide",
"accept_style": "Wide",
"metric": 10,
"ispf_status": "Disabled",
},
},
"protocols_redistributed": False,
"distance": 115,
"adv_passive_only": False,
}
}
},
"IPv6 Unicast": {
"vrf": {
"VRF1": {
"level": {
1: {
"metric": 10,
"ispf_status": "Disabled"},
2: {
"metric": 10,
"ispf_status": "Disabled"},
},
"protocols_redistributed": False,
"distance": 115,
"adv_passive_only": False,
}
}
},
},
"interfaces": {
"Loopback300": {
"running_state": "running actively",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/0.415": {
"running_state": "running actively",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/1.415": {
"running_state": "running actively",
"configuration_state": "active in configuration",
},
},
}
},
},
"test2": {
"process_id": "test2",
"instance": "0",
"vrf": {
"VRF1": {
"system_id": "0000.0000.0000",
"is_levels": "level-1-2",
"non_stop_forwarding": "Disabled",
"most_recent_startup_mode": "Cold Restart",
"te_connection_status": "Down",
}
},
},
}
}
golden_output_2 = {'execute.return_value': '''
# show isis protocol
IS-IS Router: test
System Id: 2222.22ff.4444
Instance Id: 0
IS Levels: level-1-2
Manual area address(es):
49.0001
Routing for area address(es):
49.0001
Non-stop forwarding: Disabled
Most recent startup mode: Cold Restart
TE connection status: Down
Topologies supported by IS-IS:
IPv4 Unicast
Level-1
Metric style (generate/accept): Wide/Wide
Metric: 10
ISPF status: Disabled
Level-2
Metric style (generate/accept): Wide/Wide
Metric: 10
ISPF status: Disabled
No protocols redistributed
Distance: 115
Advertise Passive Interface Prefixes Only: No
IPv6 Unicast
Level-1
Metric: 10
ISPF status: Disabled
Level-2
Metric: 10
ISPF status: Disabled
No protocols redistributed
Distance: 115
Advertise Passive Interface Prefixes Only: No
SRLB not allocated
SRGB not allocated
Interfaces supported by IS-IS:
Loopback0 is running actively (active in configuration)
GigabitEthernet0/0/0/0.115 is running actively (active in configuration)
GigabitEthernet0/0/0/1.115 is running actively (active in configuration)
IS-IS Router: test1
VRF context: VRF1
System Id: 2222.22ff.4444
Instance Id: 0
IS Levels: level-1-2
Manual area address(es):
49.0001
Routing for area address(es):
49.0001
Non-stop forwarding: Disabled
Most recent startup mode: Cold Restart
TE connection status: Down
Topologies supported by IS-IS:
IPv4 Unicast VRF VRF1
Level-1
Metric style (generate/accept): Wide/Wide
Metric: 10
ISPF status: Disabled
Level-2
Metric style (generate/accept): Wide/Wide
Metric: 10
ISPF status: Disabled
No protocols redistributed
Distance: 115
Advertise Passive Interface Prefixes Only: No
IPv6 Unicast VRF VRF1
Level-1
Metric: 10
ISPF status: Disabled
Level-2
Metric: 10
ISPF status: Disabled
No protocols redistributed
Distance: 115
Advertise Passive Interface Prefixes Only: No
SRLB not allocated
SRGB not allocated
Interfaces supported by IS-IS:
Loopback300 is running actively (active in configuration)
GigabitEthernet0/0/0/0.415 is running actively (active in configuration)
GigabitEthernet0/0/0/1.415 is running actively (active in configuration)
IS-IS Router: test2
VRF context: VRF1
System Id: 0000.0000.0000 (Not configured, protocol disabled)
Instance Id: 0
IS Levels: level-1-2
Manual area address(es):
Routing for area address(es):
Non-stop forwarding: Disabled
Most recent startup mode: Cold Restart
TE connection status: Down
Topologies supported by IS-IS:
none
SRLB not allocated
SRGB not allocated
Interfaces supported by IS-IS:
'''}
def test_empty_output(self):
device = Mock(**self.empty_output)
obj = ShowIsisProtocol(device=device)
with self.assertRaises(SchemaEmptyParserError):
obj.parse()
def test_golden_parsed_output_1(self):
device = Mock(**self.golden_output_1)
obj = ShowIsisProtocol(device=device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
def test_golden_parsed_output_2(self):
device = Mock(**self.golden_output_2)
obj = ShowIsisProtocol(device=device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_2)
class TestShowIsisLspLog(unittest.TestCase):
''' UT for commands/parsers:
* show isis lsp-log / ShowIsisLspLog
'''
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output_1 = {
"instance": {
"TEST": {
"lsp_log": {
1: {
"count": 1,
"level": 2,
"triggers": "IPEXT",
"received_timestamp": "Thu Sep 26 2019 09:39:16.648",
},
2: {
"count": 1,
"level": 2,
"triggers": "IPEXT",
"received_timestamp": "Thu Sep 26 2019 10:29:02.303",
},
3: {
"count": 1,
"level": 2,
"triggers": "IPEXT",
"received_timestamp": "Mon Sep 30 2019 00:00:17.274",
},
4: {
"count": 1,
"level": 2,
"triggers": "IPEXT",
"received_timestamp": "Mon Sep 30 2019 00:02:25.263",
},
5: {
"count": 2,
"level": 2,
"interface": "Bundle-Ether2",
"triggers": "DELADJ",
"received_timestamp": "Fri Oct 4 2019 16:10:11.734",
},
6: {
"count": 2,
"level": 2,
"interface": "Bundle-Ether2",
"triggers": "ADJSIDADD",
"received_timestamp": "Fri Oct 4 2019 16:17:45.821",
},
}
}
}
}
golden_output_1 = {'execute.return_value': '''
#show isis lsp-log
Tue Oct 8 17:38:16.254 EDT
IS-IS TEST Level 2 LSP log
When Count Interface Triggers
--- Thu Sep 26 2019 ---
09:39:16.648 1 IPEXT
10:29:02.303 1 IPEXT
--- Mon Sep 30 2019 ---
00:00:17.274 1 IPEXT
00:02:25.263 1 IPEXT
--- Fri Oct 4 2019 ---
16:10:11.734 2 BE2 DELADJ
16:17:45.821 2 BE2 ADJSIDADD
'''}
golden_parsed_output_2 = {
"instance": {
"isp": {
"lsp_log": {
1: {
"count": 1,
"level": 1,
"received_timestamp": "00:02:36"},
2: {
"count": 1,
"level": 1,
"triggers": "LSPREGEN",
"received_timestamp": "00:02:31",
},
3: {
"count": 1,
"level": 1,
"interface": "Port-channel4/1",
"triggers": "NEWADJ",
"received_timestamp": "00:02:24",
},
4: {
"count": 1,
"level": 1,
"interface": "GigabitEthernet5/0",
"triggers": "DIS",
"received_timestamp": "00:02:23",
},
5: {
"count": 1,
"level": 1,
"interface": "Loopback0",
"triggers": "IPUP",
"received_timestamp": "00:01:12",
},
6: {
"count": 1,
"level": 2,
"received_timestamp": "00:02:36"},
7: {
"count": 1,
"level": 2,
"triggers": "LSPREGEN",
"received_timestamp": "00:02:30",
},
8: {
"count": 1,
"level": 2,
"interface": "GigabitEthernet5/0",
"triggers": "DIS",
"received_timestamp": "00:02:23",
},
9: {
"count": 1,
"level": 2,
"interface": "Loopback0",
"triggers": "IPUP",
"received_timestamp": "00:01:12",
},
}
}
}
}
# From asr9k docs
golden_output_2 = {'execute.return_value': '''
# show isis lsp-log
ISIS isp Level 1 LSP log
When Count Interface Triggers
00:02:36 1
00:02:31 1 LSPREGEN
00:02:24 1 PO4/1 NEWADJ
00:02:23 1 Gi5/0 DIS
00:01:12 1 Lo0 IPUP
ISIS isp Level 2 LSP log
When Count Interface Triggers
00:02:36 1
00:02:30 1 LSPREGEN
00:02:23 1 Gi5/0 DIS
00:01:12 1 Lo0 IPUP
'''}
golden_parsed_output_3 = {
"instance": {
"": {
"lsp_log": {
1: {
"count": 3,
"level": 1,
"triggers": "CONFIG NEWADJ DIS",
"received_timestamp": "07:05:18",
},
2: {
"count": 2,
"level": 1,
"interface": "Ethernet0",
"triggers": "NEWADJ DIS",
"received_timestamp": "07:05:13",
},
3: {
"count": 2,
"level": 2,
"triggers": "CONFIG NEWADJ",
"received_timestamp": "07:05:24",
},
4: {
"count": 1,
"level": 2,
"interface": "Ethernet0",
"triggers": "NEWADJ",
"received_timestamp": "07:05:23",
},
5: {
"count": 3,
"level": 2,
"interface": "Loopback0",
"triggers": "CONFIG DELADJ",
"received_timestamp": "07:01:39",
},
}
}
}
}
# From ncs6k docs
golden_output_3 = {'execute.return_value': '''
Router# show isis lsp-log
Level 1 LSP log
When Count Interface Triggers
07:05:18 3 CONFIG NEWADJ DIS
07:05:13 2 Ethernet0 NEWADJ DIS
Level 2 LSP log
When Count Interface Triggers
07:05:24 2 CONFIG NEWADJ
07:05:23 1 Ethernet0 NEWADJ
07:01:39 3 Loopback0 CONFIG DELADJ
'''}
golden_parsed_output_4 = {
"instance": {
"isp": {
"lsp_log": {
1: {
"count": 1,
"level": 1,
"received_timestamp": "00:02:36"},
2: {
"count": 1,
"level": 1,
"triggers": "LSPREGEN",
"received_timestamp": "00:02:31",
},
3: {
"count": 1,
"level": 1,
"interface": "Port-channel4/1",
"triggers": "DELADJ",
"received_timestamp": "00:02:26",
},
4: {
"count": 1,
"level": 1,
"interface": "Port-channel4/1",
"triggers": "NEWADJ",
"received_timestamp": "00:02:24",
},
5: {
"count": 1,
"level": 2,
"received_timestamp": "00:02:36"},
6: {
"count": 1,
"level": 2,
"triggers": "LSPREGEN",
"received_timestamp": "00:02:30",
},
7: {
"count": 1,
"level": 2,
"interface": "Port-channel4/1",
"triggers": "DELADJ",
"received_timestamp": "00:02:26",
},
8: {
"count": 1,
"level": 2,
"interface": "Loopback0",
"triggers": "IPUP",
"received_timestamp": "00:01:12",
},
}
}
}
}
# from ncs5k docs
golden_output_4 = {'execute.return_value': '''
#show isis lsp-log
ISIS isp Level 1 LSP log
When Count Interface Triggers
00:02:36 1
00:02:31 1 LSPREGEN
00:02:26 1 PO4/1 DELADJ
00:02:24 1 PO4/1 NEWADJ
ISIS isp Level 2 LSP log
When Count Interface Triggers
00:02:36 1
00:02:30 1 LSPREGEN
00:02:26 1 PO4/1 DELADJ
00:01:12 1 Lo0 IPUP
'''}
def test_empty_output(self):
device = Mock(**self.empty_output)
obj = ShowIsisLspLog(device=device)
with self.assertRaises(SchemaEmptyParserError):
obj.parse()
def test_golden_output_1(self):
device = Mock(**self.golden_output_1)
obj = ShowIsisLspLog(device=device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
def test_golden_output_2(self):
device = Mock(**self.golden_output_2)
obj = ShowIsisLspLog(device=device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_2)
def test_golden_output_3(self):
device = Mock(**self.golden_output_3)
obj = ShowIsisLspLog(device=device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_3)
def test_golden_output_4(self):
device = Mock(**self.golden_output_4)
obj = ShowIsisLspLog(device=device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_4)
class TestShowIsisInterface(unittest.TestCase):
''' Unit test for commands:
* show isis interface -> ShowIsisInterface
'''
maxDiff = None
empty_output = {'execute.return_value': ''}
parsed_output_1 = {
"instance": {
"test": {
"interface": {
"Loopback0": {
"state": "Enabled",
"adjacency_formation": "Enabled",
"prefix_advertisement": "Enabled",
"ipv4_bfd": False,
"ipv6_bfd": False,
"bfd_min_interval": 150,
"bfd_multiplier": 3,
"bandwidth": 0,
"circuit_type": "level-1-2",
"media_type": "Loop",
"circuit_number": 0,
"level": {
1: {
"adjacency_count": 0,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
},
2: {
"adjacency_count": 0,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
},
},
"clns_io": {"protocol_state": "Up", "mtu": 1500},
"topology": {
"ipv4 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Running",
"metric": {"level": {1: 10, 2: 10}},
"weight": {"level": {1: 0, 2: 0}},
"mpls": {
"mpls_max_label_stack": "1/3/10 (PRI/BKP/SRTE)",
"ldp_sync": {"level": {1: "Disabled", 2: "Disabled"}},
},
"frr": {
"level": {
1: {"state": "Not Enabled", "type": "None"},
2: {"state": "Not Enabled", "type": "None"},
}
},
},
"ipv6 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Running",
"metric": {"level": {1: 10, 2: 10}},
"weight": {"level": {1: 0, 2: 0}},
"mpls": {
"mpls_max_label_stack": "1/3/10 (PRI/BKP/SRTE)",
"ldp_sync": {"level": {1: "Disabled", 2: "Disabled"}},
},
"frr": {
"level": {
1: {"state": "Not Enabled", "type": "None"},
2: {"state": "Not Enabled", "type": "None"},
}
},
},
},
"address_family": {
"IPv4": {
"state": "Enabled",
"protocol_state": "Up",
"forwarding_address": ["0.0.0.0"],
"global_prefix": ["10.36.3.0/24"],
},
"IPv6": {
"state": "Enabled",
"protocol_state": "Up",
"forwarding_address": ["::"],
"global_prefix": ["2001:db8:3:3:3::3/128"],
},
},
"lsp": {
"transmit_timer_expires_ms": 0,
"transmission_state": "idle",
"lsp_transmit_back_to_back_limit_window_msec": 0,
"lsp_transmit_back_to_back_limit": 10,
},
},
"GigabitEthernet0/0/0/0": {
"state": "Enabled",
"adjacency_formation": "Enabled",
"prefix_advertisement": "Enabled",
"ipv4_bfd": False,
"ipv6_bfd": False,
"bfd_min_interval": 150,
"bfd_multiplier": 3,
"bandwidth": 1000000,
"circuit_type": "level-1-2",
"media_type": "LAN",
"circuit_number": 7,
"level": {
1: {
"adjacency_count": 0,
"lan_id": "R3.07",
"priority": {"local": "64", "dis": "none (no DIS elected)"},
"next_lan_iih_sec": 5,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
},
2: {
"adjacency_count": 1,
"lan_id": "R3.07",
"priority": {"local": "64", "dis": "64"},
"next_lan_iih_sec": 3,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
},
},
"clns_io": {
"protocol_state": "Up",
"mtu": 1497,
"snpa": "fa16.3eff.52be",
"layer2_mcast_groups_membership": {
"all_level_1_iss": "Yes",
"all_level_2_iss": "Yes",
},
},
"topology": {
"ipv4 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Running",
"metric": {"level": {1: 10, 2: 10}},
"weight": {"level": {1: 0, 2: 0}},
"mpls": {
"mpls_max_label_stack": "1/3/10 (PRI/BKP/SRTE)",
"ldp_sync": {"level": {1: "Disabled", 2: "Disabled"}},
},
"frr": {
"level": {
1: {"state": "Not Enabled", "type": "None"},
2: {"state": "Not Enabled", "type": "None"},
}
},
},
"ipv6 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Running",
"metric": {"level": {1: 10, 2: 10}},
"weight": {"level": {1: 0, 2: 0}},
"mpls": {
"mpls_max_label_stack": "1/3/10 (PRI/BKP/SRTE)",
"ldp_sync": {"level": {1: "Disabled", 2: "Disabled"}},
},
"frr": {
"level": {
1: {"state": "Not Enabled", "type": "None"},
2: {"state": "Not Enabled", "type": "None"},
}
},
},
},
"address_family": {
"IPv4": {
"state": "Enabled",
"protocol_state": "Up",
"forwarding_address": ["10.2.3.3"],
"global_prefix": ["10.2.3.0/24"],
},
"IPv6": {
"state": "Enabled",
"protocol_state": "Up",
"forwarding_address": ["fe80::f816:3eff:feff:52be"],
"global_prefix": ["2001:db8:10:2::/64"],
},
},
"lsp": {
"transmit_timer_expires_ms": 0,
"transmission_state": "idle",
"lsp_transmit_back_to_back_limit_window_msec": 0,
"lsp_transmit_back_to_back_limit": 9,
},
},
"GigabitEthernet0/0/0/1": {
"state": "Enabled",
"adjacency_formation": "Enabled",
"prefix_advertisement": "Enabled",
"ipv4_bfd": False,
"ipv6_bfd": False,
"bfd_min_interval": 150,
"bfd_multiplier": 3,
"bandwidth": 1000000,
"circuit_type": "level-1-2",
"media_type": "LAN",
"circuit_number": 5,
"level": {
1: {
"adjacency_count": 1,
"lan_id": "R3.05",
"priority": {"local": "64", "dis": "64"},
"next_lan_iih_sec": 2,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
},
2: {
"adjacency_count": 0,
"lan_id": "R3.05",
"priority": {"local": "64", "dis": "none (no DIS elected)"},
"next_lan_iih_sec": 6,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
},
},
"clns_io": {
"protocol_state": "Up",
"mtu": 1497,
"snpa": "fa16.3eff.86bf",
"layer2_mcast_groups_membership": {
"all_level_1_iss": "Yes",
"all_level_2_iss": "Yes",
},
},
"topology": {
"ipv4 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Running",
"metric": {"level": {1: 10, 2: 10}},
"weight": {"level": {1: 0, 2: 0}},
"mpls": {
"mpls_max_label_stack": "1/3/10 (PRI/BKP/SRTE)",
"ldp_sync": {"level": {1: "Disabled", 2: "Disabled"}},
},
"frr": {
"level": {
1: {"state": "Not Enabled", "type": "None"},
2: {"state": "Not Enabled", "type": "None"},
}
},
},
"ipv6 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Running",
"metric": {"level": {1: 10, 2: 10}},
"weight": {"level": {1: 0, 2: 0}},
"mpls": {
"mpls_max_label_stack": "1/3/10 (PRI/BKP/SRTE)",
"ldp_sync": {"level": {1: "Disabled", 2: "Disabled"}},
},
"frr": {
"level": {
1: {"state": "Not Enabled", "type": "None"},
2: {"state": "Not Enabled", "type": "None"},
}
},
},
},
"address_family": {
"IPv4": {
"state": "Enabled",
"protocol_state": "Up",
"forwarding_address": ["10.3.6.3"],
"global_prefix": ["10.3.6.0/24"],
},
"IPv6": {
"state": "Enabled",
"protocol_state": "Up",
"forwarding_address": ["fe80::f816:3eff:feff:86bf"],
"global_prefix": ["2001:db8:10:3::/64"],
},
},
"lsp": {
"transmit_timer_expires_ms": 0,
"transmission_state": "idle",
"lsp_transmit_back_to_back_limit_window_msec": 0,
"lsp_transmit_back_to_back_limit": 9,
},
},
"GigabitEthernet0/0/0/2": {
"state": "Enabled",
"adjacency_formation": "Enabled",
"prefix_advertisement": "Enabled",
"ipv4_bfd": False,
"ipv6_bfd": False,
"bfd_min_interval": 150,
"bfd_multiplier": 3,
"bandwidth": 1000000,
"circuit_type": "level-1-2",
"media_type": "LAN",
"circuit_number": 3,
"level": {
1: {
"adjacency_count": 1,
"lan_id": "R3.03",
"priority": {"local": "64", "dis": "64"},
"next_lan_iih_sec": 1,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
},
2: {
"adjacency_count": 0,
"lan_id": "R3.03",
"priority": {"local": "64", "dis": "none (no DIS elected)"},
"next_lan_iih_sec": 6,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
},
},
"clns_io": {
"protocol_state": "Up",
"mtu": 1497,
"snpa": "fa16.3eff.d6b3",
"layer2_mcast_groups_membership": {
"all_level_1_iss": "Yes",
"all_level_2_iss": "Yes",
},
},
"topology": {
"ipv4 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Running",
"metric": {"level": {1: 10, 2: 10}},
"weight": {"level": {1: 0, 2: 0}},
"mpls": {
"mpls_max_label_stack": "1/3/10 (PRI/BKP/SRTE)",
"ldp_sync": {"level": {1: "Disabled", 2: "Disabled"}},
},
"frr": {
"level": {
1: {"state": "Not Enabled", "type": "None"},
2: {"state": "Not Enabled", "type": "None"},
}
},
},
"ipv6 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Running",
"metric": {"level": {1: 10, 2: 10}},
"weight": {"level": {1: 0, 2: 0}},
"mpls": {
"mpls_max_label_stack": "1/3/10 (PRI/BKP/SRTE)",
"ldp_sync": {"level": {1: "Disabled", 2: "Disabled"}},
},
"frr": {
"level": {
1: {"state": "Not Enabled", "type": "None"},
2: {"state": "Not Enabled", "type": "None"},
}
},
},
},
"address_family": {
"IPv4": {
"state": "Enabled",
"protocol_state": "Up",
"forwarding_address": ["10.3.4.3"],
"global_prefix": ["10.3.4.0/24"],
},
"IPv6": {
"state": "Enabled",
"protocol_state": "Up",
"forwarding_address": ["fe80::f816:3eff:feff:d6b3"],
"global_prefix": [
"None (No global addresses are configured)"
],
},
},
"lsp": {
"transmit_timer_expires_ms": 0,
"transmission_state": "idle",
"lsp_transmit_back_to_back_limit_window_msec": 0,
"lsp_transmit_back_to_back_limit": 9,
},
},
"GigabitEthernet0/0/0/3": {
"state": "Enabled",
"adjacency_formation": "Enabled",
"prefix_advertisement": "Enabled",
"ipv4_bfd": False,
"ipv6_bfd": False,
"bfd_min_interval": 150,
"bfd_multiplier": 3,
"bandwidth": 1000000,
"circuit_type": "level-1-2",
"media_type": "LAN",
"circuit_number": 1,
"level": {
1: {
"adjacency_count": 1,
"lan_id": "R5.01",
"priority": {"local": "64", "dis": "64"},
"next_lan_iih_sec": 3,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
},
2: {
"adjacency_count": 1,
"lan_id": "R5.01",
"priority": {"local": "64", "dis": "64"},
"next_lan_iih_sec": 2,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
},
},
"clns_io": {
"protocol_state": "Up",
"mtu": 1497,
"snpa": "fa16.3eff.f442",
"layer2_mcast_groups_membership": {
"all_level_1_iss": "Yes",
"all_level_2_iss": "Yes",
},
},
"topology": {
"ipv4 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Running",
"metric": {"level": {1: 10, 2: 10}},
"weight": {"level": {1: 0, 2: 0}},
"mpls": {
"mpls_max_label_stack": "1/3/10 (PRI/BKP/SRTE)",
"ldp_sync": {"level": {1: "Disabled", 2: "Disabled"}},
},
"frr": {
"level": {
1: {"state": "Not Enabled", "type": "None"},
2: {"state": "Not Enabled", "type": "None"},
}
},
},
"ipv6 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Running",
"metric": {"level": {1: 10, 2: 10}},
"weight": {"level": {1: 0, 2: 0}},
"mpls": {
"mpls_max_label_stack": "1/3/10 (PRI/BKP/SRTE)",
"ldp_sync": {"level": {1: "Disabled", 2: "Disabled"}},
},
"frr": {
"level": {
1: {"state": "Not Enabled", "type": "None"},
2: {"state": "Not Enabled", "type": "None"},
}
},
},
},
"address_family": {
"IPv4": {
"state": "Enabled",
"protocol_state": "Up",
"forwarding_address": ["10.3.5.3"],
"global_prefix": ["10.3.5.0/24"],
},
"IPv6": {
"state": "Enabled",
"protocol_state": "Up",
"forwarding_address": ["fe80::f816:3eff:feff:f442"],
"global_prefix": [
"None (No global addresses are configured)"
],
},
},
"lsp": {
"transmit_timer_expires_ms": 0,
"transmission_state": "idle",
"lsp_transmit_back_to_back_limit_window_msec": 0,
"lsp_transmit_back_to_back_limit": 9,
},
},
"tunnel-te105": {
"state": "Enabled",
"adjacency_formation": "Disabled",
"prefix_advertisement": "Enabled",
"ipv4_bfd": False,
"ipv6_bfd": False,
"bfd_min_interval": 150,
"bfd_multiplier": 3,
"rsi_srlg": "Registered",
"bandwidth": 0,
"circuit_type": "level-2-only",
"media_type": "P2P",
"circuit_number": 0,
"clns_io": {
"protocol_state": "Down (IMD did not notify that node exists)",
"mtu": -1,
},
"topology": {
"ipv4 unicast": {
"state": "Enabled",
"adjacency_formation": "Disabled",
"prefix_advertisement": "Running",
"metric": {"level": {1: 10, 2: 10}},
"weight": {"level": {1: 0, 2: 0}},
"mpls": {
"mpls_max_label_stack": "1/1/10/10 (PRI/BKP/SRTE/SRAT)",
"ldp_sync": {"level": {1: "Enabled", 2: "Enabled"}},
},
"frr": {
"level": {
1: {"state": "Not Enabled", "type": "None"},
2: {"state": "Not Enabled", "type": "None"},
}
},
}
},
"address_family": {
"IPv4": {
"state": "Enabled",
"protocol_state": "Up",
"forwarding_address": ["10.3.5.3"],
"global_prefix": ["None (Interface is unnumbered)"],
}
},
},
}
}
}
}
golden_parsed_output_1 = {'execute.return_value': '''
IS-IS test Interfaces
Loopback0 Enabled
Adjacency Formation: Enabled
Prefix Advertisement: Enabled
IPv4 BFD: Disabled
IPv6 BFD: Disabled
BFD Min Interval: 150
BFD Multiplier: 3
Bandwidth: 0
Circuit Type: level-1-2
Media Type: Loop
Circuit Number: 0
Level-1
Adjacency Count: 0
LSP Pacing Interval: 33 ms
PSNP Entry Queue Size: 0
Hello Interval: 10 s
Hello Multiplier: 3
Level-2
Adjacency Count: 0
LSP Pacing Interval: 33 ms
PSNP Entry Queue Size: 0
Hello Interval: 10 s
Hello Multiplier: 3
CLNS I/O
Protocol State: Up
MTU: 1500
IPv4 Unicast Topology: Enabled
Adjacency Formation: Running
Prefix Advertisement: Running
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 1/3/10 (PRI/BKP/SRTE)
MPLS LDP Sync (L1/L2): Disabled/Disabled
FRR (L1/L2): L1 Not Enabled L2 Not Enabled
FRR Type: None None
IPv6 Unicast Topology: Enabled
Adjacency Formation: Running
Prefix Advertisement: Running
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 1/3/10 (PRI/BKP/SRTE)
MPLS LDP Sync (L1/L2): Disabled/Disabled
FRR (L1/L2): L1 Not Enabled L2 Not Enabled
FRR Type: None None
IPv4 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): 0.0.0.0
Global Prefix(es): 10.36.3.0/24
IPv6 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): ::
Global Prefix(es): 2001:db8:3:3:3::3/128
LSP transmit timer expires in 0 ms
LSP transmission is idle
Can send up to 10 back-to-back LSPs in the next 0 ms
GigabitEthernet0/0/0/0 Enabled
Adjacency Formation: Enabled
Prefix Advertisement: Enabled
IPv4 BFD: Disabled
IPv6 BFD: Disabled
BFD Min Interval: 150
BFD Multiplier: 3
Bandwidth: 1000000
Circuit Type: level-1-2
Media Type: LAN
Circuit Number: 7
Level-1
Adjacency Count: 0
LAN ID: R3.07
Priority (Local/DIS): 64/none (no DIS elected)
Next LAN IIH in: 5 s
LSP Pacing Interval: 33 ms
PSNP Entry Queue Size: 0
Hello Interval: 10 s
Hello Multiplier: 3
Level-2
Adjacency Count: 1
LAN ID: R3.07
Priority (Local/DIS): 64/64
Next LAN IIH in: 3 s
LSP Pacing Interval: 33 ms
PSNP Entry Queue Size: 0
Hello Interval: 10 s
Hello Multiplier: 3
CLNS I/O
Protocol State: Up
MTU: 1497
SNPA: fa16.3eff.52be
Layer-2 MCast Groups Membership:
All Level-1 ISs: Yes
All Level-2 ISs: Yes
IPv4 Unicast Topology: Enabled
Adjacency Formation: Running
Prefix Advertisement: Running
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 1/3/10 (PRI/BKP/SRTE)
MPLS LDP Sync (L1/L2): Disabled/Disabled
FRR (L1/L2): L1 Not Enabled L2 Not Enabled
FRR Type: None None
IPv6 Unicast Topology: Enabled
Adjacency Formation: Running
Prefix Advertisement: Running
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 1/3/10 (PRI/BKP/SRTE)
MPLS LDP Sync (L1/L2): Disabled/Disabled
FRR (L1/L2): L1 Not Enabled L2 Not Enabled
FRR Type: None None
IPv4 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): 10.2.3.3
Global Prefix(es): 10.2.3.0/24
IPv6 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): fe80::f816:3eff:feff:52be
Global Prefix(es): 2001:db8:10:2::/64
LSP transmit timer expires in 0 ms
LSP transmission is idle
Can send up to 9 back-to-back LSPs in the next 0 ms
GigabitEthernet0/0/0/1 Enabled
Adjacency Formation: Enabled
Prefix Advertisement: Enabled
IPv4 BFD: Disabled
IPv6 BFD: Disabled
BFD Min Interval: 150
BFD Multiplier: 3
Bandwidth: 1000000
Circuit Type: level-1-2
Media Type: LAN
Circuit Number: 5
Level-1
Adjacency Count: 1
LAN ID: R3.05
Priority (Local/DIS): 64/64
Next LAN IIH in: 2 s
LSP Pacing Interval: 33 ms
PSNP Entry Queue Size: 0
Hello Interval: 10 s
Hello Multiplier: 3
Level-2
Adjacency Count: 0
LAN ID: R3.05
Priority (Local/DIS): 64/none (no DIS elected)
Next LAN IIH in: 6 s
LSP Pacing Interval: 33 ms
PSNP Entry Queue Size: 0
Hello Interval: 10 s
Hello Multiplier: 3
CLNS I/O
Protocol State: Up
MTU: 1497
SNPA: fa16.3eff.86bf
Layer-2 MCast Groups Membership:
All Level-1 ISs: Yes
All Level-2 ISs: Yes
IPv4 Unicast Topology: Enabled
Adjacency Formation: Running
Prefix Advertisement: Running
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 1/3/10 (PRI/BKP/SRTE)
MPLS LDP Sync (L1/L2): Disabled/Disabled
FRR (L1/L2): L1 Not Enabled L2 Not Enabled
FRR Type: None None
IPv6 Unicast Topology: Enabled
Adjacency Formation: Running
Prefix Advertisement: Running
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 1/3/10 (PRI/BKP/SRTE)
MPLS LDP Sync (L1/L2): Disabled/Disabled
FRR (L1/L2): L1 Not Enabled L2 Not Enabled
FRR Type: None None
IPv4 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): 10.3.6.3
Global Prefix(es): 10.3.6.0/24
IPv6 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): fe80::f816:3eff:feff:86bf
Global Prefix(es): 2001:db8:10:3::/64
LSP transmit timer expires in 0 ms
LSP transmission is idle
Can send up to 9 back-to-back LSPs in the next 0 ms
GigabitEthernet0/0/0/2 Enabled
Adjacency Formation: Enabled
Prefix Advertisement: Enabled
IPv4 BFD: Disabled
IPv6 BFD: Disabled
BFD Min Interval: 150
BFD Multiplier: 3
Bandwidth: 1000000
Circuit Type: level-1-2
Media Type: LAN
Circuit Number: 3
Level-1
Adjacency Count: 1
LAN ID: R3.03
Priority (Local/DIS): 64/64
Next LAN IIH in: 1 s
LSP Pacing Interval: 33 ms
PSNP Entry Queue Size: 0
Hello Interval: 10 s
Hello Multiplier: 3
Level-2
Adjacency Count: 0
LAN ID: R3.03
Priority (Local/DIS): 64/none (no DIS elected)
Next LAN IIH in: 6 s
LSP Pacing Interval: 33 ms
PSNP Entry Queue Size: 0
Hello Interval: 10 s
Hello Multiplier: 3
CLNS I/O
Protocol State: Up
MTU: 1497
SNPA: fa16.3eff.d6b3
Layer-2 MCast Groups Membership:
All Level-1 ISs: Yes
All Level-2 ISs: Yes
IPv4 Unicast Topology: Enabled
Adjacency Formation: Running
Prefix Advertisement: Running
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 1/3/10 (PRI/BKP/SRTE)
MPLS LDP Sync (L1/L2): Disabled/Disabled
FRR (L1/L2): L1 Not Enabled L2 Not Enabled
FRR Type: None None
IPv6 Unicast Topology: Enabled
Adjacency Formation: Running
Prefix Advertisement: Running
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 1/3/10 (PRI/BKP/SRTE)
MPLS LDP Sync (L1/L2): Disabled/Disabled
FRR (L1/L2): L1 Not Enabled L2 Not Enabled
FRR Type: None None
IPv4 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): 10.3.4.3
Global Prefix(es): 10.3.4.0/24
IPv6 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): fe80::f816:3eff:feff:d6b3
Global Prefix(es): None (No global addresses are configured)
LSP transmit timer expires in 0 ms
LSP transmission is idle
Can send up to 9 back-to-back LSPs in the next 0 ms
GigabitEthernet0/0/0/3 Enabled
Adjacency Formation: Enabled
Prefix Advertisement: Enabled
IPv4 BFD: Disabled
IPv6 BFD: Disabled
BFD Min Interval: 150
BFD Multiplier: 3
Bandwidth: 1000000
Circuit Type: level-1-2
Media Type: LAN
Circuit Number: 1
Level-1
Adjacency Count: 1
LAN ID: R5.01
Priority (Local/DIS): 64/64
Next LAN IIH in: 3 s
LSP Pacing Interval: 33 ms
PSNP Entry Queue Size: 0
Hello Interval: 10 s
Hello Multiplier: 3
Level-2
Adjacency Count: 1
LAN ID: R5.01
Priority (Local/DIS): 64/64
Next LAN IIH in: 2 s
LSP Pacing Interval: 33 ms
PSNP Entry Queue Size: 0
Hello Interval: 10 s
Hello Multiplier: 3
CLNS I/O
Protocol State: Up
MTU: 1497
SNPA: fa16.3eff.f442
Layer-2 MCast Groups Membership:
All Level-1 ISs: Yes
All Level-2 ISs: Yes
IPv4 Unicast Topology: Enabled
Adjacency Formation: Running
Prefix Advertisement: Running
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 1/3/10 (PRI/BKP/SRTE)
MPLS LDP Sync (L1/L2): Disabled/Disabled
FRR (L1/L2): L1 Not Enabled L2 Not Enabled
FRR Type: None None
IPv6 Unicast Topology: Enabled
Adjacency Formation: Running
Prefix Advertisement: Running
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 1/3/10 (PRI/BKP/SRTE)
MPLS LDP Sync (L1/L2): Disabled/Disabled
FRR (L1/L2): L1 Not Enabled L2 Not Enabled
FRR Type: None None
IPv4 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): 10.3.5.3
Global Prefix(es): 10.3.5.0/24
IPv6 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): fe80::f816:3eff:feff:f442
Global Prefix(es): None (No global addresses are configured)
LSP transmit timer expires in 0 ms
LSP transmission is idle
Can send up to 9 back-to-back LSPs in the next 0 ms
tunnel-te105 Enabled
Adjacency Formation: Disabled (CLNS I/O unavailable/down on the intf)
Prefix Advertisement: Enabled
IPv4 BFD: Disabled
IPv6 BFD: Disabled
BFD Min Interval: 150
BFD Multiplier: 3
RSI SRLG: Registered
Bandwidth: 0
Circuit Type: level-2-only (Interface circuit type is level-1-2)
Media Type: P2P
Circuit Number: 0
CLNS I/O
Protocol State: Down (IMD did not notify that node exists)
MTU: Invalid (MTU invalid or too small)
IPv4 Unicast Topology: Enabled
Adjacency Formation: Disabled (CLNS I/O service unavailable on intf)
Prefix Advertisement: Running
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 1/1/10/10 (PRI/BKP/SRTE/SRAT)
MPLS LDP Sync (L1/L2): Enabled/Enabled
LDPv4 Sync Status: Achieved
FRR (L1/L2): L1 Not Enabled L2 Not Enabled
FRR Type: None None
IPv4 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): 10.3.5.3
Global Prefix(es): None (Interface is unnumbered)
'''}
parsed_output_2 = {
"instance": {
"Genie": {
"interface": {
"Bundle-Ether2": {
"state": "Enabled",
"adjacency_formation": "Enabled",
"prefix_advertisement": "Disabled (Suppressed in IS-IS cfg)",
"ipv4_bfd": False,
"ipv6_bfd": False,
"bfd_min_interval": 150,
"bfd_multiplier": 3,
"rsi_srlg": "Registered",
"bandwidth": 100000000,
"circuit_type": "level-2-only",
"media_type": "P2P",
"circuit_number": 0,
"extended_circuit_number": 113,
"next_p2p_iih_in": 4,
"lsp_rexmit_queue_size": 1,
"level": {
2: {
"adjacency_count": 1,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
}
},
"clns_io": {
"protocol_state": "Up",
"mtu": 9199,
"snpa": "008a.96ff.1790",
"layer2_mcast_groups_membership": {
"all_level_1_iss": "Yes",
"all_level_2_iss": "Yes",
},
},
"topology": {
"ipv4 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Disabled (Intf suppressed in IS-IS cfg)",
"metric": {"level": {1: 10, 2: 10}},
"weight": {"level": {1: 0, 2: 0}},
"mpls": {
"mpls_max_label_stack": "3/3/12/0 (PRI/BKP/SRTE/SRAT)",
"ldp_sync": {"level": {1: "Disabled", 2: "Disabled"}},
},
"frr": {
"level": {
1: {
"state": "Enabled",
"type": "per-prefix",
"direct_lfa": {"state": "Enabled"},
"remote_lfa": {
"state": "Not Enabled",
"tie_breaker": "Default",
"line_card_disjoint": "30",
"lowest_backup_metric": "20",
"node_protecting": "40",
"primary_path": "10",
},
"ti_lfa": {
"state": "Enabled",
"tie_breaker": "Default",
"link_protecting": "Enabled",
"line_card_disjoint": "0",
"node_protecting": "100",
"srlg_disjoint": "0",
},
},
2: {
"state": "Enabled",
"type": "per-prefix",
"direct_lfa": {"state": "Enabled"},
"remote_lfa": {
"state": "Not Enabled",
"tie_breaker": "Default",
"line_card_disjoint": "30",
"lowest_backup_metric": "20",
"node_protecting": "40",
"primary_path": "10",
},
"ti_lfa": {
"state": "Enabled",
"tie_breaker": "Default",
"link_protecting": "Enabled",
"line_card_disjoint": "0",
"node_protecting": "100",
"srlg_disjoint": "0",
},
},
}
},
}
},
"address_family": {
"IPv4": {
"state": "Enabled",
"protocol_state": "Up",
"forwarding_address": ["172.18.0.1"],
"global_prefix": ["Unknown (Intf suppressed in IS-IS cfg)"],
}
},
"lsp": {
"transmit_timer_expires_ms": 0,
"transmission_state": "idle",
"lsp_transmit_back_to_back_limit_window_msec": 0,
"lsp_transmit_back_to_back_limit": 9,
},
"underlying_interface": {"HundredGigE0/0/0/1": {"index": "0x55"}},
},
"TenGigE0/0/0/0/0": {"state": "Disabled"},
"TenGigE0/0/0/4/0": {
"state": "Enabled",
"adjacency_formation": "Enabled",
"prefix_advertisement": "Disabled (Suppressed in IS-IS cfg)",
"ipv4_bfd": True,
"ipv6_bfd": False,
"bfd_min_interval": 250,
"bfd_multiplier": 3,
"rsi_srlg": "Registered",
"bandwidth": 10000000,
"circuit_type": "level-2-only",
"media_type": "P2P",
"circuit_number": 0,
"extended_circuit_number": 27,
"next_p2p_iih_in": 5,
"lsp_rexmit_queue_size": 0,
"level": {
2: {
"adjacency_count": 1,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
}
},
"clns_io": {
"protocol_state": "Up",
"mtu": 9199,
"snpa": "008a.96ff.131b",
"layer2_mcast_groups_membership": {
"all_level_1_iss": "Yes",
"all_level_2_iss": "Yes",
},
},
"topology": {
"ipv4 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Disabled (Intf suppressed in IS-IS cfg)",
"metric": {"level": {1: 10, 2: 10}},
"weight": {"level": {1: 0, 2: 0}},
"mpls": {
"mpls_max_label_stack": "3/3/12/0 (PRI/BKP/SRTE/SRAT)",
"ldp_sync": {"level": {1: "Disabled", 2: "Disabled"}},
},
"frr": {
"level": {
1: {"state": "Not Enabled", "type": "None"},
2: {"state": "Not Enabled", "type": "None"},
}
},
}
},
"address_family": {
"IPv4": {
"state": "Enabled",
"protocol_state": "Up",
"forwarding_address": ["172.16.2.133"],
"global_prefix": ["Unknown (Intf suppressed in IS-IS cfg)"],
}
},
"lsp": {
"transmit_timer_expires_ms": 0,
"transmission_state": "idle",
"lsp_transmit_back_to_back_limit_window_msec": 0,
"lsp_transmit_back_to_back_limit": 9,
},
},
}
}
}
}
golden_parsed_output_2 = {'execute.return_value': '''
+++ genie-Router: executing command 'show isis interface' +++
show isis interface
Mon Oct 21 10:46:56.224 EDT
IS-IS Genie Interfaces
Bundle-Ether2 Enabled
Adjacency Formation: Enabled
Prefix Advertisement: Disabled (Suppressed in IS-IS cfg)
IPv4 BFD: Disabled
IPv6 BFD: Disabled
BFD Min Interval: 150
BFD Multiplier: 3
RSI SRLG: Registered
Bandwidth: 100000000
Circuit Type: level-2-only (Interface circuit type is level-1-2)
Media Type: P2P
Circuit Number: 0
Extended Circuit Number: 113
Next P2P IIH in: 4 s
LSP Rexmit Queue Size: 1
Level-2
Adjacency Count: 1
LSP Pacing Interval: 33 ms
PSNP Entry Queue Size: 0
Hello Interval: 10 s
Hello Multiplier: 3
CLNS I/O
Protocol State: Up
MTU: 9199
SNPA: 008a.96ff.1790
Layer-2 MCast Groups Membership:
All ISs: Yes
IPv4 Unicast Topology: Enabled
Adjacency Formation: Running
Prefix Advertisement: Disabled (Intf suppressed in IS-IS cfg)
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 3/3/12/0 (PRI/BKP/SRTE/SRAT)
MPLS LDP Sync (L1/L2): Disabled/Disabled
FRR (L1/L2): L1 Enabled L2 Enabled
FRR Type: per-prefix per-prefix
Direct LFA: Enabled Enabled
Remote LFA: Not Enabled Not Enabled
Tie Breaker Default Default
Line-card disjoint 30 30
Lowest backup metric 20 20
Node protecting 40 40
Primary path 10 10
TI LFA: Enabled Enabled
Tie Breaker Default Default
Link Protecting Enabled Enabled
Line-card disjoint 0 0
Node protecting 100 100
SRLG disjoint 0 0
IPv4 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): 172.18.0.1
Global Prefix(es): Unknown (Intf suppressed in IS-IS cfg)
LSP transmit timer expires in 0 ms
LSP transmission is idle
Can send up to 9 back-to-back LSPs in the next 0 ms
Underlying Interface List
IfName: Hu0/0/0/1 IfIndex: 0x55
TenGigE0/0/0/0/0 Disabled (No topologies cfg on the intf)
TenGigE0/0/0/4/0 Enabled
Adjacency Formation: Enabled
Prefix Advertisement: Disabled (Suppressed in IS-IS cfg)
IPv4 BFD: Enabled
IPv6 BFD: Disabled
BFD Min Interval: 250
BFD Multiplier: 3
RSI SRLG: Registered
Bandwidth: 10000000
Circuit Type: level-2-only (Interface circuit type is level-1-2)
Media Type: P2P
Circuit Number: 0
Extended Circuit Number: 27
Next P2P IIH in: 5 s
LSP Rexmit Queue Size: 0
Level-2
Adjacency Count: 1
LSP Pacing Interval: 33 ms
PSNP Entry Queue Size: 0
Hello Interval: 10 s
Hello Multiplier: 3
CLNS I/O
Protocol State: Up
MTU: 9199
SNPA: 008a.96ff.131b
Layer-2 MCast Groups Membership:
All ISs: Yes
IPv4 Unicast Topology: Enabled
Adjacency Formation: Running
Prefix Advertisement: Disabled (Intf suppressed in IS-IS cfg)
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 3/3/12/0 (PRI/BKP/SRTE/SRAT)
MPLS LDP Sync (L1/L2): Disabled/Disabled
FRR (L1/L2): L1 Not Enabled L2 Not Enabled
FRR Type: None None
IPv4 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): 172.16.2.133
Global Prefix(es): Unknown (Intf suppressed in IS-IS cfg)
LSP transmit timer expires in 0 ms
LSP transmission is idle
Can send up to 9 back-to-back LSPs in the next 0 ms
RP/0/RP0/CPU0:genie-Router#
'''}
def test_empty_output(self):
self.device = Mock(**self.empty_output)
obj = ShowIsisInterface(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
obj.parse()
def test_golden_output_1(self):
self.device = Mock(**self.golden_parsed_output_1)
obj = ShowIsisInterface(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.parsed_output_1)
def test_golden_output_2(self):
self.device = Mock(**self.golden_parsed_output_2)
obj = ShowIsisInterface(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.parsed_output_2)
class TestShowIsisDatabaseDetail(unittest.TestCase):
''' Unit tests for commands/parser:
* show isis database detail / ShowIsisDatabaseDetail
'''
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output_1 = {
"instance": {
"test": {
"level": {
1: {
"lspid": {
"R3.00-00": {
"lsp": {
"seq_num": "0x0000000d",
"checksum": "0x0476",
"local_router": True,
"holdtime": 578,
"attach_bit": 1,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49.0002",
"nlpid": ["0xcc", "0x8e"],
"ip_address": "10.36.3.3",
"extended_ipv4_reachability": {
"10.36.3.0/24": {
"ip_prefix": "10.36.3.0",
"prefix_length": "24",
"metric": 10,
},
"10.2.3.0/24": {
"ip_prefix": "10.2.3.0",
"prefix_length": "24",
"metric": 10,
},
},
"hostname": "R3",
"ipv6_address": "2001:db8:3:3:3::3",
"mt_ipv6_reachability": {
"2001:db8:3:3:3::3/128": {
"ip_prefix": "2001:db8:3:3:3::3",
"prefix_length": "128",
"metric": 10,
},
"2001:db8:10:2::/64": {
"ip_prefix": "2001:db8:10:2::",
"prefix_length": "64",
"metric": 10,
},
},
"mt_entries": {
"Standard (IPv4 Unicast)": {},
"IPv6 Unicast": {
"attach_bit": 1,
"p_bit": 0,
"overload_bit": 0,
},
},
"extended_is_neighbor": {
"R3.03": {"metric": 10},
"R5.01": {"metric": 10},
},
"mt_is_neighbor": {
"R3.03": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"},
"R5.01": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"},
},
},
"R3.03-00": {
"lsp": {
"seq_num": "0x00000007",
"checksum": "0x8145",
"local_router": False,
"holdtime": 988,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"extended_is_neighbor": {
"R3.00": {
"metric": 0},
"R4.00": {
"metric": 0},
},
},
"R3.05-00": {
"lsp": {
"seq_num": "0x00000004",
"checksum": "0x7981",
"local_router": False,
"holdtime": 600,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"extended_is_neighbor": {
"R3.00": {
"metric": 0},
"R6.00": {
"metric": 0},
},
},
"R4.00-00": {
"lsp": {
"seq_num": "0x0000000c",
"checksum": "0x5c39",
"local_router": False,
"holdtime": 1115,
"received": 1200,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49.0002",
"extended_is_neighbor": {
"R3.03": {
"metric": 10},
"R4.01": {
"metric": 10},
},
"nlpid": ["0xcc", "0x8e"],
"ip_address": "10.64.4.4",
"extended_ipv4_reachability": {
"10.64.4.4/32": {
"ip_prefix": "10.64.4.4",
"prefix_length": "32",
"metric": 10,
},
"10.3.4.0/24": {
"ip_prefix": "10.3.4.0",
"prefix_length": "24",
"metric": 10,
},
},
"hostname": "R4",
"mt_is_neighbor": {
"R3.03": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"},
"R4.01": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"},
},
"ipv6_address": "2001:db8:4:4:4::4",
"mt_ipv6_reachability": {
"2001:db8:4:4:4::4/128": {
"ip_prefix": "2001:db8:4:4:4::4",
"prefix_length": "128",
"metric": 10,
},
"2001:db8:10:3::/64": {
"ip_prefix": "2001:db8:10:3::",
"prefix_length": "64",
"metric": 10,
},
},
"mt_entries": {
"Standard (IPv4 Unicast)": {},
"IPv6 Unicast": {
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
},
},
"R4.01-00": {
"lsp": {
"seq_num": "0x00000004",
"checksum": "0xf9a0",
"local_router": False,
"holdtime": 616,
"received": 1200,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"extended_is_neighbor": {
"R4.00": {
"metric": 0},
"R5.00": {
"metric": 0},
},
},
"R5.00-00": {
"lsp": {
"seq_num": "0x00000009",
"checksum": "0x09f9",
"local_router": False,
"holdtime": 980,
"received": 1199,
"attach_bit": 1,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49.0002",
"nlpid": ["0xcc", "0x8e"],
"mt_entries": {
"Standard (IPv4 Unicast)": {},
"IPv6 Unicast": {
"attach_bit": 1,
"p_bit": 0,
"overload_bit": 0,
},
},
"hostname": "R5",
"extended_is_neighbor": {
"R5.01": {
"metric": 10},
"R4.01": {
"metric": 10},
},
"mt_is_neighbor": {
"R5.01": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"},
"R4.01": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"},
},
"ip_address": "10.100.5.5",
"extended_ipv4_reachability": {
"10.100.5.5/32": {
"ip_prefix": "10.100.5.5",
"prefix_length": "32",
"metric": 10,
},
"10.3.5.0/24": {
"ip_prefix": "10.3.5.0",
"prefix_length": "24",
"metric": 10,
},
},
"ipv6_address": "2001:db8:5:5:5::5",
"mt_ipv6_reachability": {
"2001:db8:5:5:5::5/128": {
"ip_prefix": "2001:db8:5:5:5::5",
"prefix_length": "128",
"metric": 10,
},
"2001:db8:10:3::/64": {
"ip_prefix": "2001:db8:10:3::",
"prefix_length": "64",
"metric": 10,
},
},
},
"R5.01-00": {
"lsp": {
"seq_num": "0x00000004",
"checksum": "0x4ac5",
"local_router": False,
"holdtime": 521,
"received": 1199,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"extended_is_neighbor": {
"R5.00": {
"metric": 0},
"R3.00": {
"metric": 0},
},
},
"R5.03-00": {
"lsp": {
"seq_num": "0x00000004",
"checksum": "0x3c38",
"local_router": False,
"holdtime": 1023,
"received": 1199,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"extended_is_neighbor": {
"R5.00": {
"metric": 0},
"R7.00": {
"metric": 0},
},
},
"R6.00-00": {
"lsp": {
"seq_num": "0x00000008",
"checksum": "0x1869",
"local_router": False,
"holdtime": 923,
"received": 1199,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49.0002",
"nlpid": ["0xcc", "0x8e"],
"router_id": "10.144.6.6",
"ip_address": "10.144.6.6",
"mt_entries": {
"IPv6 Unicast": {
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"Standard (IPv4 Unicast)": {},
},
"hostname": "R6",
"mt_is_neighbor": {
"R7.02": {
"metric": 40,
"mt_id": "MT (IPv6 Unicast)"},
"R3.05": {
"metric": 40,
"mt_id": "MT (IPv6 Unicast)"},
},
"extended_is_neighbor": {
"R7.02": {
"metric": 40},
"R3.05": {
"metric": 40},
},
"extended_ipv4_reachability": {
"10.144.6.0/24": {
"ip_prefix": "10.144.6.0",
"prefix_length": "24",
"metric": 1,
},
"10.6.7.0/24": {
"ip_prefix": "10.6.7.0",
"prefix_length": "24",
"metric": 40,
},
"10.3.6.0/24": {
"ip_prefix": "10.3.6.0",
"prefix_length": "24",
"metric": 40,
},
},
"mt_ipv6_reachability": {
"2001:db8:6:6:6::6/128": {
"ip_prefix": "2001:db8:6:6:6::6",
"prefix_length": "128",
"metric": 1,
},
"2001:db8:10:6::/64": {
"ip_prefix": "2001:db8:10:6::",
"prefix_length": "64",
"metric": 40,
},
},
},
"R7.00-00": {
"lsp": {
"seq_num": "0x00000008",
"checksum": "0xaba8",
"local_router": False,
"holdtime": 965,
"received": 1198,
"attach_bit": 1,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49.0002",
"nlpid": ["0xcc", "0x8e"],
"router_id": "10.196.7.7",
"ip_address": "10.196.7.7",
"mt_entries": {
"IPv6 Unicast": {
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"Standard (IPv4 Unicast)": {},
},
"hostname": "R7",
"mt_is_neighbor": {
"R7.02": {
"metric": 40,
"mt_id": "MT (IPv6 Unicast)"},
"R5.03": {
"metric": 40,
"mt_id": "MT (IPv6 Unicast)"},
},
"extended_is_neighbor": {
"R7.02": {
"metric": 40},
"R5.03": {
"metric": 40},
},
'ip_interarea': {
'10.7.8.0/24': {
'address_family': {
'ipv4 unicast': {
'metric': 40,
},
},
},
'2001:db8:10:7::/64': {
'address_family': {
'IPv6 Unicast': {
'metric': 40,
},
},
},
},
"extended_ipv4_reachability": {
"10.196.7.7/32": {
"ip_prefix": "10.196.7.7",
"prefix_length": "32",
"metric": 1,
},
"10.7.9.0/24": {
"ip_prefix": "10.7.9.0",
"prefix_length": "24",
"metric": 40,
},
},
"mt_ipv6_reachability": {
"2001:db8:7:7:7::7/128": {
"ip_prefix": "2001:db8:7:7:7::7",
"prefix_length": "128",
"metric": 1,
}
},
},
"R7.02-00": {
"lsp": {
"seq_num": "0x00000005",
"checksum": "0x8c3d",
"local_router": False,
"holdtime": 884,
"received": 1198,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"extended_is_neighbor": {
"R6.00": {"metric": 0},
"R7.00": {"metric": 0},
},
},
},
"total_lsp_count": 11,
"local_lsp_count": 1,
},
2: {
"lspid": {
"R2.00-00": {
"lsp": {
"seq_num": "0x00000009",
"checksum": "0x5188",
"local_router": False,
"holdtime": 1082,
"received": 1199,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49.0001",
"nlpid": ["0xcc", "0x8e"],
"mt_entries": {
"Standard (IPv4 Unicast)": {},
"IPv6 Unicast": {
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
},
"hostname": "R2",
"extended_is_neighbor": {
"R3.07": {
"metric": 10}},
"mt_is_neighbor": {
"R3.07": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"}
},
"ip_address": "10.16.2.2",
"extended_ipv4_reachability": {
"10.16.2.2/32": {
"ip_prefix": "10.16.2.2",
"prefix_length": "32",
"metric": 10,
},
"10.1.2.0/24": {
"ip_prefix": "10.1.2.0",
"prefix_length": "24",
"metric": 10,
},
},
"ipv6_address": "2001:db8:2:2:2::2",
"mt_ipv6_reachability": {
"2001:db8:2:2:2::2/128": {
"ip_prefix": "2001:db8:2:2:2::2",
"prefix_length": "128",
"metric": 10,
},
"2001:db8:10:1::/64": {
"ip_prefix": "2001:db8:10:1::",
"prefix_length": "64",
"metric": 10,
},
},
},
"R3.00-00": {
"lsp": {
"seq_num": "0x00000011",
"checksum": "0x4c4c",
"local_router": True,
"holdtime": 979,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49.0002",
"extended_is_neighbor": {
"R3.07": {
"metric": 10},
"R5.01": {
"metric": 10},
},
"nlpid": ["0xcc", "0x8e"],
"ip_address": "10.36.3.3",
"extended_ipv4_reachability": {
"10.36.3.0/24": {
"ip_prefix": "10.36.3.0",
"prefix_length": "24",
"metric": 10,
},
"10.2.3.0/24": {
"ip_prefix": "10.2.3.0",
"prefix_length": "24",
"metric": 10,
},
},
"hostname": "R3",
"mt_is_neighbor": {
"R3.07": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"},
"R5.01": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"},
},
"ipv6_address": "2001:db8:3:3:3::3",
"mt_ipv6_reachability": {
"2001:db8:3:3:3::3/128": {
"ip_prefix": "2001:db8:3:3:3::3",
"prefix_length": "128",
"metric": 10,
},
"2001:db8:10:2::/64": {
"ip_prefix": "2001:db8:10:2::",
"prefix_length": "64",
"metric": 10,
},
},
"mt_entries": {
"Standard (IPv4 Unicast)": {},
"IPv6 Unicast": {
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
},
},
"R3.07-00": {
"lsp": {
"seq_num": "0x00000007",
"checksum": "0x652a",
"local_router": False,
"holdtime": 604,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"extended_is_neighbor": {
"R3.00": {
"metric": 0},
"R2.00": {
"metric": 0},
},
},
"R5.00-00": {
"lsp": {
"seq_num": "0x0000000b",
"checksum": "0x93bc",
"local_router": False,
"holdtime": 903,
"received": 1199,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49.0002",
"nlpid": ["0xcc", "0x8e"],
"mt_entries": {
"Standard (IPv4 Unicast)": {},
"IPv6 Unicast": {
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
},
"hostname": "R5",
"extended_is_neighbor": {
"R5.01": {
"metric": 10},
"R5.03": {
"metric": 10},
},
"mt_is_neighbor": {
"R5.01": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"},
"R5.03": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"},
},
"ip_address": "10.100.5.5",
"extended_ipv4_reachability": {
"10.100.5.5/32": {
"ip_prefix": "10.100.5.5",
"prefix_length": "32",
"metric": 10,
},
"10.3.5.0/24": {
"ip_prefix": "10.3.5.0",
"prefix_length": "24",
"metric": 10,
},
},
"ipv6_address": "2001:db8:5:5:5::5",
"mt_ipv6_reachability": {
"2001:db8:5:5:5::5/128": {
"ip_prefix": "2001:db8:5:5:5::5",
"prefix_length": "128",
"metric": 10,
},
"2001:db8:10:3::/64": {
"ip_prefix": "2001:db8:10:3::",
"prefix_length": "64",
"metric": 10,
},
},
},
"R5.01-00": {
"lsp": {
"seq_num": "0x00000004",
"checksum": "0x6236",
"local_router": False,
"holdtime": 426,
"received": 1199,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"extended_is_neighbor": {
"R5.00": {
"metric": 0},
"R3.00": {
"metric": 0},
},
},
"R5.03-00": {
"lsp": {
"seq_num": "0x00000004",
"checksum": "0x54a8",
"local_router": False,
"holdtime": 965,
"received": 1199,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"extended_is_neighbor": {
"R5.00": {
"metric": 0},
"R7.00": {
"metric": 0},
},
},
"R7.00-00": {
"lsp": {
"seq_num": "0x00000009",
"checksum": "0x7d78",
"local_router": False,
"holdtime": 766,
"received": 1198,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49.0002",
"nlpid": ["0xcc", "0x8e"],
"router_id": "10.196.7.7",
"ip_address": "10.196.7.7",
"mt_entries": {
"IPv6 Unicast": {
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"Standard (IPv4 Unicast)": {},
},
"hostname": "R7",
"mt_is_neighbor": {
"R9.01": {
"metric": 40,
"mt_id": "MT (IPv6 Unicast)"},
"R8.01": {
"metric": 40,
"mt_id": "MT (IPv6 Unicast)"},
},
"extended_is_neighbor": {
"R9.01": {
"metric": 40},
"R8.01": {
"metric": 40},
},
"extended_ipv4_reachability": {
"10.6.7.0/24": {
"ip_prefix": "10.6.7.0",
"prefix_length": "24",
"metric": 40,
},
"10.196.7.7/32": {
"ip_prefix": "10.196.7.7",
"prefix_length": "32",
"metric": 1,
},
},
"mt_ipv6_reachability": {
"2001:db8:10:6::/64": {
"ip_prefix": "2001:db8:10:6::",
"prefix_length": "64",
"metric": 40,
},
"2001:db8:7:7:7::7/128": {
"ip_prefix": "2001:db8:7:7:7::7",
"prefix_length": "128",
"metric": 1,
},
},
},
"R8.00-00": {
"lsp": {
"seq_num": "0x00000005",
"checksum": "0x1309",
"local_router": False,
"holdtime": 453,
"received": 1198,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49.0003",
"nlpid": ["0xcc", "0x8e"],
"mt_entries": {
"Standard (IPv4 Unicast)": {},
"IPv6 Unicast": {
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
},
"hostname": "R8",
"extended_is_neighbor": {
"R8.01": {
"metric": 10}},
"mt_is_neighbor": {
"R8.01": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"}
},
"ip_address": "10.1.8.8",
"extended_ipv4_reachability": {
"10.1.8.8/32": {
"ip_prefix": "10.1.8.8",
"prefix_length": "32",
"metric": 10,
},
"10.7.8.0/24": {
"ip_prefix": "10.7.8.0",
"prefix_length": "24",
"metric": 10,
},
},
"ipv6_address": "2001:db8:8:8:8::8",
"mt_ipv6_reachability": {
"2001:db8:8:8:8::8/128": {
"ip_prefix": "2001:db8:8:8:8::8",
"prefix_length": "128",
"metric": 10,
},
"2001:db8:10:7::/64": {
"ip_prefix": "2001:db8:10:7::",
"prefix_length": "64",
"metric": 10,
},
},
},
"R8.01-00": {
"lsp": {
"seq_num": "0x00000004",
"checksum": "0x9503",
"local_router": False,
"holdtime": 1143,
"received": 1198,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"extended_is_neighbor": {
"R8.00": {
"metric": 0},
"R7.00": {
"metric": 0},
},
},
"R9.00-00": {
"lsp": {
"seq_num": "0x00000006",
"checksum": "0xfd4e",
"local_router": False,
"holdtime": 800,
"received": 1198,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49.0004",
"nlpid": ["0xcc", "0x8e"],
"mt_entries": {
"Standard (IPv4 Unicast)": {},
"IPv6 Unicast": {
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
},
"hostname": "R9",
"extended_is_neighbor": {
"R9.01": {
"metric": 10}},
"mt_is_neighbor": {
"R9.01": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"}
},
"ip_address": "10.69.9.9",
"extended_ipv4_reachability": {
"10.69.9.9/32": {
"ip_prefix": "10.69.9.9",
"prefix_length": "32",
"metric": 10,
},
"10.7.9.0/24": {
"ip_prefix": "10.7.9.0",
"prefix_length": "24",
"metric": 10,
},
"10.9.10.0/24": {
"ip_prefix": "10.9.10.0",
"prefix_length": "24",
"metric": 10,
},
"10.10.10.10/32": {
"ip_prefix": "10.10.10.10",
"prefix_length": "32",
"metric": 20,
},
},
"ipv6_address": "2001:db8:9:9:9::9",
"mt_ipv6_reachability": {
"2001:db8:9:9:9::9/128": {
"ip_prefix": "2001:db8:9:9:9::9",
"prefix_length": "128",
"metric": 10,
},
"2001:db8:10:7::/64": {
"ip_prefix": "2001:db8:10:7::",
"prefix_length": "64",
"metric": 10,
},
},
"ipv6_reachability": {
"2001:2:2:2::2/128": {
"ip_prefix": "2001:2:2:2::2",
"prefix_length": "128",
"metric": "10",
}
},
},
"R9.01-00": {
"lsp": {
"seq_num": "0x00000003",
"checksum": "0xfdce",
"local_router": False,
"holdtime": 706,
"received": 1198,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"extended_is_neighbor": {
"R9.00": {
"metric": 0},
"R7.00": {
"metric": 0},
},
},
},
"total_lsp_count": 11,
"local_lsp_count": 1,
},
}
}
}
}
golden_output_1 = {'execute.return_value': '''
RP/0/RP0/CPU0:R3#show isis database detail
Wed Jan 30 22:07:52.759 UTC
IS-IS test (Level-1) Link State Database
LSPID LSP Seq Num LSP Checksum LSP Holdtime/Rcvd ATT/P/OL
R3.00-00 * 0x0000000d 0x0476 578 /* 1/0/0
Area Address: 49.0002
NLPID: 0xcc
NLPID: 0x8e
IP Address: 10.36.3.3
Metric: 10 IP-Extended 10.36.3.0/24
Metric: 10 IP-Extended 10.2.3.0/24
Hostname: R3
IPv6 Address: 2001:db8:3:3:3::3
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:3:3:3::3/128
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:10:2::/64
MT: Standard (IPv4 Unicast)
MT: IPv6 Unicast 1/0/0
Metric: 10 IS-Extended R3.03
Metric: 10 IS-Extended R5.01
Metric: 10 MT (IPv6 Unicast) IS-Extended R3.03
Metric: 10 MT (IPv6 Unicast) IS-Extended R5.01
R3.03-00 0x00000007 0x8145 988 /* 0/0/0
Metric: 0 IS-Extended R3.00
Metric: 0 IS-Extended R4.00
R3.05-00 0x00000004 0x7981 600 /* 0/0/0
Metric: 0 IS-Extended R3.00
Metric: 0 IS-Extended R6.00
R4.00-00 0x0000000c 0x5c39 1115 /1200 0/0/0
Area Address: 49.0002
Metric: 10 IS-Extended R3.03
Metric: 10 IS-Extended R4.01
NLPID: 0xcc
NLPID: 0x8e
IP Address: 10.64.4.4
Metric: 10 IP-Extended 10.64.4.4/32
Metric: 10 IP-Extended 10.3.4.0/24
Hostname: R4
Metric: 10 MT (IPv6 Unicast) IS-Extended R3.03
Metric: 10 MT (IPv6 Unicast) IS-Extended R4.01
IPv6 Address: 2001:db8:4:4:4::4
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:4:4:4::4/128
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:10:3::/64
MT: Standard (IPv4 Unicast)
MT: IPv6 Unicast 0/0/0
R4.01-00 0x00000004 0xf9a0 616 /1200 0/0/0
Metric: 0 IS-Extended R4.00
Metric: 0 IS-Extended R5.00
R5.00-00 0x00000009 0x09f9 980 /1199 1/0/0
Area Address: 49.0002
NLPID: 0xcc
NLPID: 0x8e
MT: Standard (IPv4 Unicast)
MT: IPv6 Unicast 1/0/0
Hostname: R5
Metric: 10 IS-Extended R5.01
Metric: 10 IS-Extended R4.01
Metric: 10 MT (IPv6 Unicast) IS-Extended R5.01
Metric: 10 MT (IPv6 Unicast) IS-Extended R4.01
IP Address: 10.100.5.5
Metric: 10 IP-Extended 10.100.5.5/32
Metric: 10 IP-Extended 10.3.5.0/24
IPv6 Address: 2001:db8:5:5:5::5
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:5:5:5::5/128
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:10:3::/64
R5.01-00 0x00000004 0x4ac5 521 /1199 0/0/0
Metric: 0 IS-Extended R5.00
Metric: 0 IS-Extended R3.00
R5.03-00 0x00000004 0x3c38 1023 /1199 0/0/0
Metric: 0 IS-Extended R5.00
Metric: 0 IS-Extended R7.00
R6.00-00 0x00000008 0x1869 923 /1199 0/0/0
Area Address: 49.0002
NLPID: 0xcc
NLPID: 0x8e
Router ID: 10.144.6.6
IP Address: 10.144.6.6
MT: IPv6 Unicast 0/0/0
MT: Standard (IPv4 Unicast)
Hostname: R6
Metric: 40 MT (IPv6 Unicast) IS-Extended R7.02
Metric: 40 MT (IPv6 Unicast) IS-Extended R3.05
Metric: 40 IS-Extended R7.02
Metric: 40 IS-Extended R3.05
Metric: 1 IP-Extended 10.144.6.0/24
Metric: 40 IP-Extended 10.6.7.0/24
Metric: 40 IP-Extended 10.3.6.0/24
Metric: 1 MT (IPv6 Unicast) IPv6 2001:db8:6:6:6::6/128
Metric: 40 MT (IPv6 Unicast) IPv6 2001:db8:10:6::/64
R7.00-00 0x00000008 0xaba8 965 /1198 1/0/0
Area Address: 49.0002
NLPID: 0xcc
NLPID: 0x8e
Router ID: 10.196.7.7
IP Address: 10.196.7.7
MT: IPv6 Unicast 0/0/0
MT: Standard (IPv4 Unicast)
Hostname: R7
Metric: 40 MT (IPv6 Unicast) IS-Extended R7.02
Metric: 40 MT (IPv6 Unicast) IS-Extended R5.03
Metric: 40 IS-Extended R7.02
Metric: 40 IS-Extended R5.03
Metric: 40 IP-Extended-Interarea 10.7.8.0/24
Metric: 1 IP-Extended 10.196.7.7/32
Metric: 40 IP-Extended 10.7.9.0/24
Metric: 40 MT (IPv6 Unicast) IPv6-Interarea 2001:db8:10:7::/64
Metric: 1 MT (IPv6 Unicast) IPv6 2001:db8:7:7:7::7/128
R7.02-00 0x00000005 0x8c3d 884 /1198 0/0/0
Metric: 0 IS-Extended R6.00
Metric: 0 IS-Extended R7.00
Total Level-1 LSP count: 11 Local Level-1 LSP count: 1
IS-IS test (Level-2) Link State Database
LSPID LSP Seq Num LSP Checksum LSP Holdtime/Rcvd ATT/P/OL
R2.00-00 0x00000009 0x5188 1082 /1199 0/0/0
Area Address: 49.0001
NLPID: 0xcc
NLPID: 0x8e
MT: Standard (IPv4 Unicast)
MT: IPv6 Unicast 0/0/0
Hostname: R2
Metric: 10 IS-Extended R3.07
Metric: 10 MT (IPv6 Unicast) IS-Extended R3.07
IP Address: 10.16.2.2
Metric: 10 IP-Extended 10.16.2.2/32
Metric: 10 IP-Extended 10.1.2.0/24
IPv6 Address: 2001:db8:2:2:2::2
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:2:2:2::2/128
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:10:1::/64
R3.00-00 * 0x00000011 0x4c4c 979 /* 0/0/0
Area Address: 49.0002
Metric: 10 IS-Extended R3.07
Metric: 10 IS-Extended R5.01
NLPID: 0xcc
NLPID: 0x8e
IP Address: 10.36.3.3
Metric: 10 IP-Extended 10.36.3.0/24
Metric: 10 IP-Extended 10.2.3.0/24
Hostname: R3
Metric: 10 MT (IPv6 Unicast) IS-Extended R3.07
Metric: 10 MT (IPv6 Unicast) IS-Extended R5.01
IPv6 Address: 2001:db8:3:3:3::3
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:3:3:3::3/128
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:10:2::/64
MT: Standard (IPv4 Unicast)
MT: IPv6 Unicast 0/0/0
R3.07-00 0x00000007 0x652a 604 /* 0/0/0
Metric: 0 IS-Extended R3.00
Metric: 0 IS-Extended R2.00
R5.00-00 0x0000000b 0x93bc 903 /1199 0/0/0
Area Address: 49.0002
NLPID: 0xcc
NLPID: 0x8e
MT: Standard (IPv4 Unicast)
MT: IPv6 Unicast 0/0/0
Hostname: R5
Metric: 10 IS-Extended R5.01
Metric: 10 IS-Extended R5.03
Metric: 10 MT (IPv6 Unicast) IS-Extended R5.01
Metric: 10 MT (IPv6 Unicast) IS-Extended R5.03
IP Address: 10.100.5.5
Metric: 10 IP-Extended 10.100.5.5/32
Metric: 10 IP-Extended 10.3.5.0/24
IPv6 Address: 2001:db8:5:5:5::5
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:5:5:5::5/128
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:10:3::/64
R5.01-00 0x00000004 0x6236 426 /1199 0/0/0
Metric: 0 IS-Extended R5.00
Metric: 0 IS-Extended R3.00
R5.03-00 0x00000004 0x54a8 965 /1199 0/0/0
Metric: 0 IS-Extended R5.00
Metric: 0 IS-Extended R7.00
R7.00-00 0x00000009 0x7d78 766 /1198 0/0/0
Area Address: 49.0002
NLPID: 0xcc
NLPID: 0x8e
Router ID: 10.196.7.7
IP Address: 10.196.7.7
MT: IPv6 Unicast 0/0/0
MT: Standard (IPv4 Unicast)
Hostname: R7
Metric: 40 MT (IPv6 Unicast) IS-Extended R9.01
Metric: 40 MT (IPv6 Unicast) IS-Extended R8.01
Metric: 40 IS-Extended R9.01
Metric: 40 IS-Extended R8.01
Metric: 40 IP-Extended 10.6.7.0/24
Metric: 1 IP-Extended 10.196.7.7/32
Metric: 40 MT (IPv6 Unicast) IPv6 2001:db8:10:6::/64
Metric: 1 MT (IPv6 Unicast) IPv6 2001:db8:7:7:7::7/128
R8.00-00 0x00000005 0x1309 453 /1198 0/0/0
Area Address: 49.0003
NLPID: 0xcc
NLPID: 0x8e
MT: Standard (IPv4 Unicast)
MT: IPv6 Unicast 0/0/0
Hostname: R8
Metric: 10 IS-Extended R8.01
Metric: 10 MT (IPv6 Unicast) IS-Extended R8.01
IP Address: 10.1.8.8
Metric: 10 IP-Extended 10.1.8.8/32
Metric: 10 IP-Extended 10.7.8.0/24
IPv6 Address: 2001:db8:8:8:8::8
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:8:8:8::8/128
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:10:7::/64
R8.01-00 0x00000004 0x9503 1143 /1198 0/0/0
Metric: 0 IS-Extended R8.00
Metric: 0 IS-Extended R7.00
R9.00-00 0x00000006 0xfd4e 800 /1198 0/0/0
Area Address: 49.0004
NLPID: 0xcc
NLPID: 0x8e
MT: Standard (IPv4 Unicast)
MT: IPv6 Unicast 0/0/0
Hostname: R9
Metric: 10 IS-Extended R9.01
Metric: 10 MT (IPv6 Unicast) IS-Extended R9.01
IP Address: 10.69.9.9
Metric: 10 IP-Extended 10.69.9.9/32
Metric: 10 IP-Extended 10.7.9.0/24
Metric: 10 IP-Extended 10.9.10.0/24
Metric: 20 IP-Extended 10.10.10.10/32
IPv6 Address: 2001:db8:9:9:9::9
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:9:9:9::9/128
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:10:7::/64
Metric: 10 IPv6 2001:2:2:2::2/128
R9.01-00 0x00000003 0xfdce 706 /1198 0/0/0
Metric: 0 IS-Extended R9.00
Metric: 0 IS-Extended R7.00
Total Level-2 LSP count: 11 Local Level-2 LSP count: 1
'''}
golden_parsed_output_2 = {
"instance": {
"isp": {
"level": {
1: {
"lspid": {
"router-5.00-00": {
"lsp": {
"seq_num": "0x00000003",
"checksum": "0x8074460",
"local_router": False,
"holdtime": 457,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49",
"nlpid": ["0xcc"],
"hostname": "router-5",
"ip_address": "172.16.186.5",
"ip_neighbor": {
"172.16.115.0/24": {
"ip_prefix": "172.16.115.0",
"prefix_length": "24",
"metric": 0,
},
"172.16.166.0/24": {
"ip_prefix": "172.16.166.0",
"prefix_length": "24",
"metric": 10,
},
"172.16.166.0/24": {
"ip_prefix": "172.16.166.0",
"prefix_length": "24",
"metric": 10,
},
},
"is_neighbor": {
"router-11.00": {
"metric": 10},
"router-11.01": {
"metric": 10},
},
},
"router-11.00-00": {
"lsp": {
"seq_num": "0x0000000b",
"checksum": "0x8074460",
"local_router": True,
"holdtime": 1161,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49",
"nlpid": ["0xcc"],
"hostname": "router-11",
"ip_address": "172.16.196.11",
"ip_neighbor": {
"172.16.76.0/24": {
"ip_prefix": "172.16.76.0",
"prefix_length": "24",
"metric": 0,
},
"172.16.166.0/24": {
"ip_prefix": "172.16.166.0",
"prefix_length": "24",
"metric": 10,
},
"172.16.166.0/24": {
"ip_prefix": "172.16.166.0",
"prefix_length": "24",
"metric": 10,
},
},
"is_neighbor": {
"router-11.01": {
"metric": 10},
"router-5.00": {
"metric": 10},
},
},
"router-11.01-00": {
"lsp": {
"seq_num": "0x00000001",
"checksum": "0x80770ec",
"local_router": True,
"holdtime": 457,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"is_neighbor": {
"router-11.00": {
"metric": 0},
"router-5.00": {
"metric": 0},
},
},
},
"total_lsp_count": 3,
"local_lsp_count": 2,
},
2: {
"lspid": {
"router-5.00-00": {
"lsp": {
"seq_num": "0x00000005",
"checksum": "0x807997c",
"local_router": False,
"holdtime": 457,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49",
"nlpid": ["0xcc"],
"hostname": "router-5",
"ip_address": "172.16.166.5",
"ip_neighbor": {
"172.16.115.0/24": {
"ip_prefix": "172.16.115.0",
"prefix_length": "24",
"metric": 0,
},
"172.16.166.0/24": {
"ip_prefix": "172.16.166.0",
"prefix_length": "24",
"metric": 10,
},
"172.16.94.0/24": {
"ip_prefix": "172.16.94.0",
"prefix_length": "24",
"metric": 10,
},
"172.16.21.0/24": {
"ip_prefix": "172.16.21.0",
"prefix_length": "24",
"metric": 10,
},
},
"is_neighbor": {
"router-11.00": {
"metric": 10},
"router-11.01": {
"metric": 10},
},
},
"router-11.00-00": {
"lsp": {
"seq_num": "0x0000000d",
"checksum": "0x807997c",
"local_router": True,
"holdtime": 1184,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49",
"nlpid": ["0xcc"],
"hostname": "router-11",
"ip_address": "172.28.111.111",
"ip_neighbor": {
"172.16.21.0/24": {
"ip_prefix": "172.16.21.0",
"prefix_length": "24",
"metric": 0,
},
"172.16.166.0/24": {
"ip_prefix": "172.16.166.0",
"prefix_length": "24",
"metric": 10,
},
"172.16.166.0/24": {
"ip_prefix": "172.16.166.0",
"prefix_length": "24",
"metric": 10,
},
"172.16.115.0/24": {
"ip_prefix": "172.16.115.0",
"prefix_length": "24",
"metric": 10,
},
},
"is_neighbor": {
"router-11.01": {
"metric": 10},
"router-5.00": {
"metric": 10},
},
},
"router-gsr11.01-00": {
"lsp": {
"seq_num": "0x00000001",
"checksum": "0x80770ec",
"local_router": True,
"holdtime": 457,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"is_neighbor": {
"router-11.00": {
"metric": 0},
"router-5.00": {
"metric": 0},
},
},
},
"total_lsp_count": 3,
"local_lsp_count": 2,
},
}
}
}
}
# asr9k
golden_output_2 = {'execute.return_value': '''
router# show isis database detail
IS-IS isp (Level-1) Link State Database
LSPID LSP Seq Num LSP Checksum LSP Holdtime ATT/P/OL
router-5.00-00 0x00000003 0x8074460 457 0/0/0
Area Address: 49
NLPID: 0xcc
Hostname: router-5
IP Address: 172.16.186.5
Metric: 0 IP 172.16.115.0/24
Metric: 10 IP 172.16.166.0/24
Metric: 10 IP 172.16.166.0/24
Metric: 10 IS router-11.00
Metric: 10 IS router-11.01
router-11.00-00 * 0x0000000b 0x8074460 1161 0/0/0
Area Address: 49
NLPID: 0xcc
Hostname: router-11
IP Address: 172.16.196.11
Metric: 0 IP 172.16.76.0/24
Metric: 10 IP 172.16.166.0/24
Metric: 10 IP 172.16.166.0/24
Metric: 10 IS router-11.01
Metric: 10 IS router-5.00
router-11.01-00 * 0x00000001 0x80770ec 457 0/0/0
Metric: 0 IS router-11.00
Metric: 0 IS router-5.00
Total LSP count: 3 (L1: 3, L2 0, local L1: 2, local L2 0)
IS-IS isp (Level-2) Link State Database
LSPID LSP Seq Num LSP Checksum LSP Holdtime ATT/P/OL
router-5.00-00 0x00000005 0x807997c 457 0/0/0
Area Address: 49
NLPID: 0xcc
Hostname: router-5
IP Address: 172.16.166.5
Metric: 0 IP 172.16.115.0/24
Metric: 10 IP 172.16.166.0/24
Metric: 10 IP 172.16.94.0/24
Metric: 10 IS router-11.00
Metric: 10 IS router-11.01
Metric: 10 IP 172.16.21.0/24
router-11.00-00 * 0x0000000d 0x807997c 1184 0/0/0
Area Address: 49
NLPID: 0xcc
Hostname: router-11
IP Address: 172.28.111.111
Metric: 0 IP 172.16.21.0/24
Metric: 10 IP 172.16.166.0/24
Metric: 10 IP 172.16.166.0/24
Metric: 10 IS router-11.01
Metric: 10 IS router-5.00
Metric: 10 IP 172.16.115.0/24
router-gsr11.01-00 * 0x00000001 0x80770ec 457 0/0/0
Metric: 0 IS router-11.00
Metric: 0 IS router-5.00
Total LSP count: 3 (L1: 0, L2 3, local L1: 0, local L2 2)
'''}
golden_parsed_output_3 = {
"instance": {
"": {
"level": {
1: {
"lspid": {
"0000.0CFF.0C35.00-00": {
"lsp": {
"seq_num": "0x0000000C",
"checksum": "0x5696",
"local_router": False,
"holdtime": 325,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "39.0001",
"is_neighbor": {
"0000.0CFF.62E6.03": {
"metric": 10}},
"es_neighbor": {
"0000.0CFF.0C35": {
"metric": 0}},
},
"0000.0CFF.40AF.00-00": {
"lsp": {
"seq_num": "0x00000009",
"checksum": "0x8452",
"local_router": True,
"holdtime": 608,
"attach_bit": 1,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "47.0004.00FF.4D4E",
"topology": ["IPv4 (0x0)", "IPv6 (0x2)"],
"nlpid": ["0x8E"],
"ip_address": "172.16.21.49",
"is_neighbor": {
"0800.2BFF.3A01.01": {
"metric": 10},
"0000.0CFF.62E6.03": {
"metric": 10},
"cisco.03": {
"metric": 10},
},
"es_neighbor": {
"0000.0CFF.40AF": {
"metric": 0}},
"ipv6_address": "2001:0DB8::/32",
"ipv6_reachability": {
"2001:0DB8::/64": {
"ip_prefix": "2001:0DB8::",
"prefix_length": "64",
"metric": "10",
}
},
"extended_is_neighbor": {
"cisco.03": {
"metric": 5},
"cisco1.03": {
"metric": 10},
},
},
}
}
}
}
}
}
# ncs5k
golden_output_3 = {'execute.return_value': '''
IS-IS Level-1 Link State Database
LSPID LSP Seq Num LSP Checksum LSP Holdtime ATT/P/OL
0000.0CFF.0C35.00-00 0x0000000C 0x5696 325 0/0/0
Area Address: 47.0004.00FF.4D4E
Area Address: 39.0001
Metric: 10 IS 0000.0CFF.62E6.03
Metric: 0 ES 0000.0CFF.0C35
0000.0CFF.40AF.00-00* 0x00000009 0x8452 608 1/0/0
Area Address: 47.0004.00FF.4D4E
Topology: IPv4 (0x0) IPv6 (0x2)
NLPID: 0xCC 0x8E
IP Address: 172.16.21.49
Metric: 10 IS 0800.2BFF.3A01.01
Metric: 10 IS 0000.0CFF.62E6.03
Metric: 0 ES 0000.0CFF.40AF
IPv6 Address: 2001:0DB8::/32
Metric: 10 IPv6 (MT-IPv6) 2001:0DB8::/64
Metric: 5 IS-Extended cisco.03
Metric: 10 IS-Extended cisco1.03
Metric: 10 IS (MT-IPv6) cisco.03
'''}
golden_parsed_output_4 = {
'instance': {
'Genie': {
'level': {
2: {
'lspid': {
'core1-genie.00-00': {
'lsp': {
'seq_num': '0x0000a302',
'checksum': '0x1a0e',
'local_router': False,
'holdtime': 58285,
'received': 65534,
'attach_bit': 0,
'p_bit': 0,
'overload_bit': 0,
},
'area_address': '49.0000',
'nlpid': ['0xcc'],
'ip_address': '10.154.219.57',
'hostname': 'core1-genie',
'router_cap': '10.154.219.57 D:0 S:0',
'extended_ipv4_reachability': {
'10.154.219.57/32': {
'ip_prefix': '10.154.219.57',
'prefix_length': '32',
'metric': 0,
},
},
'extended_is_neighbor': {
'core2-genie.00': {
'metric': 50,
},
'tcore4-genie.00': {
'metric': 250,
},
'bl1-genie.00': {
'metric': 1000,
},
'bl2-genie.00': {
'metric': 1000,
},
},
},
'core2-genie.00-00': {
'lsp': {
'seq_num': '0x0000a15b',
'checksum': '0xfcfe',
'local_router': False,
'holdtime': 60939,
'received': 65534,
'attach_bit': 0,
'p_bit': 0,
'overload_bit': 0,
},
'area_address': '49.0000',
'nlpid': ['0xcc'],
'ip_address': '10.154.219.58',
'hostname': 'core2-genie',
'router_cap': '10.154.219.58 D:0 S:0',
'extended_ipv4_reachability': {
'10.154.219.58/32': {
'ip_prefix': '10.154.219.58',
'prefix_length': '32',
'metric': 0,
},
},
'extended_is_neighbor': {
'core1-genie.00': {
'metric': 50,
},
'bl2-genie.00': {
'metric': 1000,
},
'bl1-genie.00': {
'metric': 1000,
},
'tcore3-genie.00': {
'metric': 250,
},
},
},
'dis17-genie_RE1.00-00': {
'lsp': {
'seq_num': '0x00000215',
'checksum': '0xf5f4',
'local_router': False,
'holdtime': 32551,
'received': 65535,
'attach_bit': 0,
'p_bit': 0,
'overload_bit': 0,
},
'area_address': '49.0000',
'tlv': 14,
'tlv_length': 2,
'nlpid': ['0xcc', '0x8e'],
'router_id': '10.154.219.102',
'ip_address': '10.154.219.102',
'hostname': 'dis17-genie_RE1',
'extended_is_neighbor': {
'tcore4-genie.00': {
'metric': 100,
},
'tcore3-genie.00': {
'metric': 100,
},
},
'extended_ipv4_reachability': {
'10.154.219.102/32': {
'ip_prefix': '10.154.219.102',
'prefix_length': '32',
'metric': 0,
},
},
'router_cap': '10.154.219.102 D:0 S:0',
},
},
},
},
},
},
}
golden_output_4 = {'execute.return_value': '''
show isis database detail
Mon Oct 22 10:40:56.529 EDT
IS-IS Genie (Level-2) Link State Database
LSPID LSP Seq Num LSP Checksum LSP Holdtime/Rcvd ATT/P/OL
core1-genie.00-00 0x0000a302 0x1a0e 58285/65534 0/0/0
Area Address: 49.0000
NLPID: 0xcc
IP Address: 10.154.219.57
Hostname: core1-genie
Router Cap: 10.154.219.57 D:0 S:0
Metric: 0 IP-Extended 10.154.219.57/32
Metric: 50 IS-Extended core2-genie.00
Metric: 250 IS-Extended tcore4-genie.00
Metric: 1000 IS-Extended bl1-genie.00
Metric: 1000 IS-Extended bl2-genie.00
core2-genie.00-00 0x0000a15b 0xfcfe 60939/65534 0/0/0
Area Address: 49.0000
NLPID: 0xcc
IP Address: 10.154.219.58
Hostname: core2-genie
Router Cap: 10.154.219.58 D:0 S:0
Metric: 0 IP-Extended 10.154.219.58/32
Metric: 50 IS-Extended core1-genie.00
Metric: 1000 IS-Extended bl2-genie.00
Metric: 1000 IS-Extended bl1-genie.00
Metric: 250 IS-Extended tcore3-genie.00
dis17-genie_RE1.00-00 0x00000215 0xf5f4 32551/65535 0/0/0
Area Address: 49.0000
TLV 14: Length: 2
NLPID: 0xcc
NLPID: 0x8e
Router ID: 10.154.219.102
IP Address: 10.154.219.102
Hostname: dis17-genie_RE1
Metric: 100 IS-Extended tcore4-genie.00
Metric: 100 IS-Extended tcore3-genie.00
Metric: 0 IP-Extended 10.154.219.102/32
Router Cap: 10.154.219.102 D:0 S:0
'''}
def test_empty_output(self):
self.device = Mock(**self.empty_output)
obj = ShowIsisDatabaseDetail(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_output_1(self):
self.device = Mock(**self.golden_output_1)
obj = ShowIsisDatabaseDetail(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
def test_output_2(self):
self.device = Mock(**self.golden_output_2)
obj = ShowIsisDatabaseDetail(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_2)
def test_output_3(self):
self.device = Mock(**self.golden_output_3)
obj = ShowIsisDatabaseDetail(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_3)
def test_output_4(self):
self.device = Mock(**self.golden_output_4)
obj = ShowIsisDatabaseDetail(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_4)
class TestShowIsisPrivateAll(unittest.TestCase):
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
'instance': {
'TEST': {
'cfg_refcount': 57,
'isis_is_level': 'level-2-only',
'ignore_cksum_errs': True,
'cfg_log_drops': False,
'nsf_cfg_purgetime': 90,
'nsf2_t1_delay': 1,
'nsf2_t1_max_num_exp': 10,
'nsf_cfg_interval': 300,
'address_family_table': {
'IPv4': {
'ref_count': 1,
},
'IPv6': {
'ref_count': 0,
},
},
'link_topology_table': {
'Standard (IPv4 Unicast)': {
'ref_count': 1,
'index': 0,
'is_running': True,
'list_linkage': {
'next': '0x0',
'previous': '0x44b3f24',
},
},
},
'topology_table': {
'IPv4 Unicast': {
'configuration': {
'check_adjacencies': '(not set)',
'attached_bit': '(not set)',
'max_paths': '(not set)',
'is_mcast_intact_set': False,
'mcast_intact': False,
'is_igp_intact_set': False,
'igp_intact': False,
'is_first_hop_source_set': False,
'first_hop_source': False,
},
'ref_count': 23,
'index': 0,
'ltopo_index': 0,
'list_linkage': {
'next': '0x0',
'previous': '0x44b51fc',
},
},
},
'area_configuration_table': {
'cross_level': {
'is_lsp_gen_interval_set': True,
'lsp_gen_interval': {
'initial_wait_msecs': 20,
'secondary_wait_msecs': 100,
'maximum_wait_msecs': 5000,
},
'is_lsp_arrivaltime_parameter_set': False,
'lsp_arrivaltime_parameter': {
'backoff_cfg': {
'initial_wait_msecs': 50,
'secondary_wait_msecs': 200,
'maximum_wait_msecs': 5000,
},
'max_count': 0,
'max_window_size_msec': 120001,
},
'is_lsp_checksum_interval_set': False,
'lsp_checksum_interval_secs': 0,
'is_lsp_refresh_interval_set': True,
'lsp_refresh_interval_secs': 35000,
'is_lsp_lifetime_set': True,
'lsp_lifetime_secs': 65535,
'is_lsp_mtu_set': False,
'lsp_mtu': 0,
'is_auth_cfg_ctx_set': False,
'auth_cfg_ctx': {
'alg': 'None',
'failure_mode': 'Drop',
'password': '0xdecafbad',
'accept_password': '0xdecafbad',
},
'is_snp_authentication_options_set': False,
'snp_authentication_options': 0,
'is_overload_set': False,
'overload_mode': -1,
'overload_on_startup_secs': 0,
'per_topo': {
'IPv4 Unicast': {
'is_metric_style_set': True,
'generate_metric_mask': 2,
'accept_metric_mask': 2,
'summary_table': '0x15431f50',
'metric': 100000,
'is_spf_interval_set': True,
'spf_interval': {
'initial_wait_msecs': 50,
'secondary_wait_msecs': 150,
'maximum_wait_msecs': 5000,
},
'spf_periodic_interval_secs': '(not set)',
'ispf_state': '(not set)',
'max_redist_prefixes': '(not set)',
'topo_index': {
0: {
'is_spf_prefix_priority_acl_names_set': False,
'spf_prefix_priority_acl_names': '0x0',
'is_spf_prefix_priority_tags_set': False,
'spf_prefix_priority_tags': 0,
},
1: {
'is_spf_prefix_priority_acl_names_set': True,
'spf_prefix_priority_acl_names': '0x154b92c4',
'is_spf_prefix_priority_tags_set': False,
'spf_prefix_priority_tags': 0,
},
2: {
'is_spf_prefix_priority_acl_names_set': True,
'spf_prefix_priority_acl_names': '0x155a0e3c',
'is_spf_prefix_priority_tags_set': False,
'spf_prefix_priority_tags': 0,
},
3: {
'is_spf_prefix_priority_acl_names_set': False,
'spf_prefix_priority_acl_names': '0x0',
'is_spf_prefix_priority_tags_set': False,
'spf_prefix_priority_tags': 0,
},
},
},
},
},
'level-1': {
'is_lsp_gen_interval_set': False,
'lsp_gen_interval': {
'initial_wait_msecs': 50,
'secondary_wait_msecs': 200,
'maximum_wait_msecs': 5000,
},
'is_lsp_arrivaltime_parameter_set': False,
'lsp_arrivaltime_parameter': {
'backoff_cfg': {
'initial_wait_msecs': 50,
'secondary_wait_msecs': 200,
'maximum_wait_msecs': 5000,
},
'max_count': 0,
'max_window_size_msec': 120001,
},
'is_lsp_checksum_interval_set': False,
'lsp_checksum_interval_secs': 0,
'is_lsp_refresh_interval_set': False,
'lsp_refresh_interval_secs': 0,
'is_lsp_lifetime_set': False,
'lsp_lifetime_secs': 0,
'is_lsp_mtu_set': False,
'lsp_mtu': 0,
'is_auth_cfg_ctx_set': False,
'auth_cfg_ctx': {
'alg': 'None',
'failure_mode': 'Drop',
'password': '0xdecafbad',
'accept_password': '0xdecafbad',
},
'is_snp_authentication_options_set': False,
'snp_authentication_options': 0,
'is_overload_set': False,
'overload_mode': 0,
'overload_on_startup_secs': 0,
'per_topo': {
'IPv4 Unicast': {
'is_metric_style_set': False,
'generate_metric_mask': 0,
'accept_metric_mask': 0,
'summary_table': '0x15431fac',
'metric': '(not set)',
'is_spf_interval_set': False,
'spf_interval': {
'initial_wait_msecs': 50,
'secondary_wait_msecs': 200,
'maximum_wait_msecs': 5000,
},
'spf_periodic_interval_secs': '(not set)',
'ispf_state': '(not set)',
'max_redist_prefixes': '(not set)',
'topo_index': {
0: {
'is_spf_prefix_priority_acl_names_set': False,
'spf_prefix_priority_acl_names': '0x0',
'is_spf_prefix_priority_tags_set': False,
'spf_prefix_priority_tags': 0,
},
1: {
'is_spf_prefix_priority_acl_names_set': False,
'spf_prefix_priority_acl_names': '0x0',
'is_spf_prefix_priority_tags_set': False,
'spf_prefix_priority_tags': 0,
},
2: {
'is_spf_prefix_priority_acl_names_set': False,
'spf_prefix_priority_acl_names': '0x0',
'is_spf_prefix_priority_tags_set': False,
'spf_prefix_priority_tags': 0,
},
3: {
'is_spf_prefix_priority_acl_names_set': False,
'spf_prefix_priority_acl_names': '0x0',
'is_spf_prefix_priority_tags_set': False,
'spf_prefix_priority_tags': 0,
},
},
},
},
},
'level-2': {
'is_lsp_gen_interval_set': False,
'lsp_gen_interval': {
'initial_wait_msecs': 50,
'secondary_wait_msecs': 200,
'maximum_wait_msecs': 5000,
},
'is_lsp_arrivaltime_parameter_set': False,
'lsp_arrivaltime_parameter': {
'backoff_cfg': {
'initial_wait_msecs': 50,
'secondary_wait_msecs': 200,
'maximum_wait_msecs': 5000,
},
'max_count': 0,
'max_window_size_msec': 120001,
},
'is_lsp_checksum_interval_set': False,
'lsp_checksum_interval_secs': 0,
'is_lsp_refresh_interval_set': False,
'lsp_refresh_interval_secs': 0,
'is_lsp_lifetime_set': False,
'lsp_lifetime_secs': 0,
'is_lsp_mtu_set': False,
'lsp_mtu': 0,
'is_auth_cfg_ctx_set': False,
'auth_cfg_ctx': {
'alg': 'None',
'failure_mode': 'Drop',
'password': '0xdecafbad',
'accept_password': '0xdecafbad',
},
'is_snp_authentication_options_set': False,
'snp_authentication_options': 0,
'is_overload_set': False,
'overload_mode': 0,
'overload_on_startup_secs': 0,
'per_topo': {
'IPv4 Unicast': {
'is_metric_style_set': False,
'generate_metric_mask': 0,
'accept_metric_mask': 0,
'summary_table': '0x1539cef4',
'metric': '(not set)',
'is_spf_interval_set': False,
'spf_interval': {
'initial_wait_msecs': 50,
'secondary_wait_msecs': 200,
'maximum_wait_msecs': 5000,
},
'spf_periodic_interval_secs': '(not set)',
'ispf_state': '(not set)',
'max_redist_prefixes': '(not set)',
'topo_index': {
0: {
'is_spf_prefix_priority_acl_names_set': False,
'spf_prefix_priority_acl_names': '0x0',
'is_spf_prefix_priority_tags_set': False,
'spf_prefix_priority_tags': 0,
},
1: {
'is_spf_prefix_priority_acl_names_set': False,
'spf_prefix_priority_acl_names': '0x0',
'is_spf_prefix_priority_tags_set': False,
'spf_prefix_priority_tags': 0,
},
2: {
'is_spf_prefix_priority_acl_names_set': False,
'spf_prefix_priority_acl_names': '0x0',
'is_spf_prefix_priority_tags_set': False,
'spf_prefix_priority_tags': 0,
},
3: {
'is_spf_prefix_priority_acl_names_set': False,
'spf_prefix_priority_acl_names': '0x0',
'is_spf_prefix_priority_tags_set': False,
'spf_prefix_priority_tags': 0,
},
},
},
},
},
},
'area_tables': {
'level-2': {
'index': 1,
'idb_list': {
'sll_head': '0x151942e0',
'sll_tail': '0x15193fd4',
'sll_count': 8,
'sll_maximum': 0,
},
'list_linkage': {
'next': '0x0',
'previous': '0x44b2534',
},
'adj_db': '0x1540cee4',
'adj_log': '0x1539b844',
'uni_db_log': '0x15411024',
'upd_db': {
'lock': {
'rwlock': {
'active': 0,
'spare': '0x0',
'blockedwriters': 0,
'blockedreaders': 0,
'heavy': 0,
'lock': {
'count': -2147483648,
'owner': 0,
},
'owner': 4294967294,
},
'description': '0x15393cf0',
},
'tree': {
'root': '0x0',
'key_size': 8,
'size': 0,
'node_alloc_data': '0x15393cd0',
'node_alloc_fn': '0x42fd024',
'node_free_fn': '0x42fd08a',
'data_to_str_fn': '0x42fd094',
},
'tree_node_chunks': {
'name': '0x448764c',
'size': 28,
'flags': 1297,
'chunk': '0x1543146c',
'num_allocated_elements': 0,
},
'area': '0x15393bfc',
'log': '0x15432024',
'name': 'L2 Update DB',
},
'nsf_ietf_csnp_rcvd': False,
'overload_bit_on_startup_timer': '0x15017530',
'overload_bit_trigger_expired': True,
'upd_periodic_timer': '0x150174d0',
'checksum_ptimer': {
'tv_sec': 3657420,
'tv_nsec': 458761224,
},
'dec_db': {
'lock': {
'rwlock': {
'active': 0,
'spare': '0x0',
'blockedwriters': 0,
'blockedreaders': 0,
'heavy': 0,
'lock': {
'count': -2147483648,
'owner': 0,
},
'owner': 4294967294,
},
'description': '0x153942b0',
},
'tree': {
'root': '0x1539f9d4',
'key_size': 8,
'size': 82,
'node_alloc_data': '0x15394290',
'node_alloc_fn': '0x42fd024',
'node_free_fn': '0x42fd08a',
'data_to_str_fn': '0x42fd094',
},
'tree_node_chunks': {
'name': '0x448764c',
'size': 28,
'flags': 1297,
'chunk': '0x1539f844',
'num_allocated_elements': 82,
},
'area': '0x15393bfc',
'log': '0x15453024',
'name': 'L2 Decision DB',
},
'node_db': {
'node_created_fn': '0x424fd84',
'node_destroyed_fn': '0x424ffa6',
'node_ltopo_created_fn': '0x42500b6',
'node_ltopo_destroyed_fn': '0x42503ba',
'node_topo_created_fn': '0x4250536',
'node_topo_destroyed_fn': '0x42506b4',
'callback_context': '0x15393bfc',
'root_element': '0x151fb9bc',
'num_nodes': 64,
},
'stats': {
'ta_lsp_build': 850,
'ta_lsp_refresh': 219,
},
'trap_stats': {
'corr_lsps': 0,
'auth_type_fails': 0,
'auth_fails': 0,
'lsp_dbase_oloads': 4,
'man_addr_drop_from_areas': 0,
'attmpt_to_ex_max_seq_nums': 0,
'seq_num_skips': 1,
'own_lsp_purges': 3,
'id_field_len_mismatches': 0,
'lsp_errors': 0,
},
'per_ltopo': {
'Standard (IPv4 Unicast)': {
'area': '0x15393bfc',
'ltopo_index': 'Standard (IPv4 Unicast)',
'roca_event': {
'mutex': {
'mutex': {
'count': -2147483648,
'owner': 0,
},
'description': '0x1500ee28',
},
'timer': {
'timer': '0x150179bc',
'num_execution_events': 1,
'is_pending': False,
'is_executing': False,
'postponed_schedule_time': {
'tv_sec': 0,
'tv_nsec': 0,
},
'last_execution_time': {
'tv_sec': 3657197,
'tv_nsec': 824108467,
},
},
'log': '0x15474024',
'class': '<error>',
},
'spf_periodic_timer': '0x1501798c',
'paths': {
'classification': 0,
'is_sorted': False,
'array': '0x1540d45c',
'num_elements': 64,
},
'unreached': {
'classification': 0,
'is_sorted': False,
'array': '0x1540d4b4',
'num_elements': 0,
},
'firsthopchanged': {
'classification': 0,
'is_sorted': True,
'array': '0x1540d4e0',
'num_elements': 0,
},
'linkchanged': {
'classification': 2,
'is_sorted': True,
'array': '0x1540d66c',
'num_elements': 0,
},
'reachable_area_addresses': '0x1540d430',
'stats': {
'num_spfs': 5004,
'num_ispfs': 0,
'num_nhcs': 10,
'num_prcs': 1219,
'num_periodic_spfs': 3876,
},
},
},
'per_topo': {
'IPv4 Unicast': {
'area': '0x15393bfc',
'topo_index': 'IPv4 Unicast',
'te': {
'link_holddown_timer': '0x150181cc',
'purge_link_info_timer': '0x1501819c',
'log': '0x153a8d24',
'tunnel_table': '0x153ab844',
'info_from_te': '0x0',
'pce_info_from_te': '0x0',
'is_pce_ready': False,
},
'overloaded_count': 0,
'overload_bit_trigger_running': False,
'bgp_converged_notify_h': '0x0',
'added_first_hops': '0x0',
'deleted_first_hops': '0x0',
'postponed_added_first_hops': '0x0',
'postponed_deleted_first_hops': '0x0',
'prefixeschanged': '0x0',
'nodechanged': '0x0',
'prefix_priority_acl': {
'critical': '0x0',
'high': '0x15604868',
'medium': '0x156047dc',
'low': '0x0',
},
'num_redist_prefixes': 166,
'max_redist_prefixes_exceeded': False,
'max_redist_prefixes_alarm_on': False,
'has_prefix_policy_changed': False,
},
},
'per_af': {
'IPv4': {
'router_id': '0x15192388',
},
'IPv6': {
'router_id': '0x0',
},
},
},
},
'interfaces': {
'TenGigE0/0/1/3': {
'im_handle': '0x180',
'name': 'TenGigE0_0_1_3',
'ref_count': 2,
'index': 4,
'snmp_index': 21,
'chkpt': {
'objid': '0x0',
},
'cfg': {
'refcount': 7,
'is_p2p': True,
'enabled_mode': 'Active',
'circuit_type': 'level-1-2',
'ipv4_bfd_enabled': True,
'ipv6_bfd_enabled': False,
'bfd_interval': 250,
'bfd_multiplier': 3,
'topos': 'IPv4 Unicast',
'cross_levels': {
'per_topo': {
'IPv4 Unicast': {
'metric': 10,
'weight': '(not set)',
'ldp_sync_cfg': '(not set)',
'admin_tag': '(not set)',
'frr_type': '(not set)',
'is_lkgp_set': 0,
},
},
'is_auth_cfg_ctx_set': False,
'auth_cfg_ctx': {
'alg': 'None',
'failure_mode': 'Drop',
'password': '0x0',
'accept_password': '0x0',
},
'hello_interval_msecs': '(not set)',
'hello_multiplier': '(not set)',
'csnp_interval_secs': '(not set)',
'lsp_pacing_interval_msecs': '(not set)',
'lsp_fast_flood_threshold': '(not set)',
'lsp_rexmit_interval_secs': '(not set)',
'min_lsp_rexmit_interval_msecs': '(not set)',
'dr_priority': '(not set)',
'is_hello_padding_set': False,
'hello_padding': 'Never',
},
'per_level': {
'Level-1': {
'per_topo': {
'IPv4 Unicast': {
'metric': '(not set)',
'weight': '(not set)',
'ldp_sync_cfg': '(not set)',
'admin_tag': '(not set)',
'frr_type': '(not set)',
'is_lkgp_set': 0,
},
},
'is_auth_cfg_ctx_set': False,
'auth_cfg_ctx': {
'alg': 'None',
'failure_mode': 'Drop',
'password': '0x0',
'accept_password': '0x0',
},
'hello_interval_msecs': '(not set)',
'hello_multiplier': '(not set)',
'csnp_interval_secs': '(not set)',
'lsp_pacing_interval_msecs': '(not set)',
'lsp_fast_flood_threshold': '(not set)',
'lsp_rexmit_interval_secs': '(not set)',
'min_lsp_rexmit_interval_msecs': '(not set)',
'dr_priority': '(not set)',
'is_hello_padding_set': False,
'hello_padding': 'Never',
},
'Level-2': {
'per_topo': {
'IPv4 Unicast': {
'metric': '(not set)',
'weight': '(not set)',
'ldp_sync_cfg': '(not set)',
'admin_tag': '(not set)',
'frr_type': '(not set)',
'is_lkgp_set': 0,
},
},
'is_auth_cfg_ctx_set': False,
'auth_cfg_ctx': {
'alg': 'None',
'failure_mode': 'Drop',
'password': '0x0',
'accept_password': '0x0',
},
'hello_interval_msecs': '(not set)',
'hello_multiplier': '(not set)',
'csnp_interval_secs': '(not set)',
'lsp_pacing_interval_msecs': '(not set)',
'lsp_fast_flood_threshold': '(not set)',
'lsp_rexmit_interval_secs': '(not set)',
'min_lsp_rexmit_interval_msecs': '(not set)',
'dr_priority': '(not set)',
'is_hello_padding_set': False,
'hello_padding': 'Never',
},
},
},
'per_topo': {
'IPv4 Unicast': {
'refcount': 2,
},
},
'topos_enabled_active': 'IPv4 Unicast',
'per_area': {
'Level-2': {
'area_linkage': '0x15194244',
'idb': '0x151916d8',
'area': '0x15393bfc',
'adj_filter': '0x0',
'csnp_control': {
'timer': '0x0',
'next_lsp_id': '0000.0000.0000.00-00',
'building_packets': False,
},
'psnp_timer': '0x0',
'nsf_ietf': {
'full_csnp_set_rcvd': False,
'csnp_set_rcvd': {
'list_head': '0x0',
'list_size': 0,
},
},
'adj_up_count': 0,
'lan_adj_up_count': 0,
'adj_list': '0x0',
'per_ltopo': {
'Standard (IPv4 Unicast)': {
'num_requested_adjs': 0,
'num_adjs': 0,
},
},
'tmrs_active': False,
'adj_filter_match_all': False,
'lsp_count': {
'in': 24185,
'out': 140529,
},
'csnp_count': {
'in': 17,
'out': 17,
},
'psnp_count': {
'in': 134275,
'out': 23143,
},
'lsp_flooding_dup_count': 3,
'lsp_drop_count': 0,
},
},
'media': {
'0x440cbe0': {
'caps_id': 30,
'media_class': 'LAN',
'encaps_overhead': 3,
},
},
'media_specific': {
'p2p': {
'hello_timer': '0x156bace8',
'last_hello': {
'tv_sec': 0,
'tv_nsec': 0,
},
'recent_hello_send_count': 0,
'adj_state': 2,
'do_ietf_3way': True,
'received_ietf_3way': False,
'neighbor_extended_circuit_number': 0,
'neighbor_system_id': '0000.0000.0000',
'mib_counters': {
'circuit_type': 0,
'adj_changes': 29,
'num_adj': 0,
'init_fails': 0,
'rej_adjs': 0,
'id_field_len_mismatches': 0,
'max_area_addr_mismatches': 0,
'auth_type_fails': 0,
'auth_fails': 0,
'lan_des_is_canges': 0,
'index': 0,
},
'init_csnp_wait': {
'tv_sec': 0,
'tv_nsec': 0,
},
'lsp_rexmit_queue': {
'sll_head': '0x0',
'sll_tail': '0x0',
'sll_count': 0,
'sll_maximum': 0,
},
'lsp_rexmit_timer': '0x157111ac',
'nsf_ietf': {
't1_timer': '0x156bacb8',
'num_t1_expiries': 0,
'first_t1_expiry_seen': False,
'rr_sent': False,
'ra_rcvd': False,
'all_ra_seen': False,
'ra_required_nbr_count': 0,
},
'stats': {
'iih_count': {
'in': 160726,
'out': 160689,
},
'iih_nomem': 0,
'lsp_retransmits': 72,
},
'p2p_over_lan': {
'mcast_state': {
'is_mcast_group_member': True,
'mcast_join_reason': 2,
},
'snpa_info': {
'im_attr_macaddr_notify_handle': '0x1514d188',
'snpa': '00c1.64ff.4ef2',
'is_snpa_ok': True,
},
},
},
},
'clns': {
'im_node': {
'exist_registered': True,
'node_exists': True,
'state_registered': True,
'node_up': False,
},
'mtu': 9199,
},
'per_af': {
'IPv4': {
'im_node': {
'exist_registered': True,
'node_exists': True,
'state_registered': True,
'node_up': False,
},
'local_address': '0.0.0.0',
'is_nexthop_addr_registered': True,
'is_global_prefix_registered': False,
'is_running_passive': False,
},
},
'nsf_waiting_for_running': False,
'nsf_ietf_waiting_for_sent_rr': False,
'is_media_ready': True,
'im_base_caps_exist_registered': True,
'tmrs_active': False,
'lsp_pacing_timer': '0x0',
'lsp_sent_last_id': '0000.0000.0000.00-00',
'lsp_sent_last_area': 1,
'lsp_send_b2b_limit': 10,
'lsp_send_b2b_limit_window_end': {
'tv_sec': 1407814,
'tv_nsec': 256518783,
},
'mesh_group': '0x0',
'lsp_send_requested': False,
'lsp_send_in_progress': False,
'mpls_ldp_sync': {
'im_attr_ldp_sync_info_notify_handle': 0,
'ldp_sync_info': False,
'is_ldp_sync_info_ok': 0,
},
'mpls_ldpv6_sync': {
'im_attr_ldp_sync_info_notify_handle': '0x0',
'ldp_sync_info': False,
'is_ldp_sync_info_ok': 0,
},
'stats': {
'ish_recv_count': 0,
'esh_recv_count': 0,
'unk_recv_count': 0,
},
'pri_label_stack_limit': 1,
'bkp_label_stack_limit': 3,
'srte_label_stack_limit': 10,
'srat_label_stack_limit': 10,
'bandwidth': 10000000,
'is_pme_delay_loss_set': False,
'pme_avg_delay': '(not set)',
'pme_min_delay': '(not set)',
'pme_max_delay': '(not set)',
'pme_delay_var': '(not set)',
'pme_loss': '(not set)',
'pme_total_bw': '(not set)',
'pme_rsvp_te_bw': '(not set)',
'rsvp_max_res_bw': '0 kbits/sec',
'rsvp_unres_prio_7': '0 kbits/sec',
},
'Loopback0': {
'im_handle': '0x8000160',
'name': 'Loopback0',
'ref_count': 3,
'index': 0,
'snmp_index': 46,
'chkpt': {
'objid': '0x0',
},
'cfg': {
'refcount': 4,
'is_p2p': False,
'enabled_mode': 'Passive',
'circuit_type': 'level-1-2',
'ipv4_bfd_enabled': False,
'ipv6_bfd_enabled': False,
'bfd_interval': 150,
'bfd_multiplier': 3,
'topos': 'IPv4 Unicast',
'cross_levels': {
'per_topo': {
'IPv4 Unicast': {
'metric': '(not set)',
'weight': '(not set)',
'ldp_sync_cfg': '(not set)',
'admin_tag': '(not set)',
'frr_type': '(not set)',
'is_lkgp_set': 0,
},
},
'is_auth_cfg_ctx_set': False,
'auth_cfg_ctx': {
'alg': 'None',
'failure_mode': 'Drop',
'password': '0x0',
'accept_password': '0x0',
},
'hello_interval_msecs': '(not set)',
'hello_multiplier': '(not set)',
'csnp_interval_secs': '(not set)',
'lsp_pacing_interval_msecs': '(not set)',
'lsp_fast_flood_threshold': '(not set)',
'lsp_rexmit_interval_secs': '(not set)',
'min_lsp_rexmit_interval_msecs': '(not set)',
'dr_priority': '(not set)',
'is_hello_padding_set': False,
'hello_padding': 'Never',
},
'per_level': {
'Level-1': {
'per_topo': {
'IPv4 Unicast': {
'metric': '(not set)',
'weight': '(not set)',
'ldp_sync_cfg': '(not set)',
'admin_tag': '(not set)',
'frr_type': '(not set)',
'is_lkgp_set': 0,
},
},
'is_auth_cfg_ctx_set': False,
'auth_cfg_ctx': {
'alg': 'None',
'failure_mode': 'Drop',
'password': '0x0',
'accept_password': '0x0',
},
'hello_interval_msecs': '(not set)',
'hello_multiplier': '(not set)',
'csnp_interval_secs': '(not set)',
'lsp_pacing_interval_msecs': '(not set)',
'lsp_fast_flood_threshold': '(not set)',
'lsp_rexmit_interval_secs': '(not set)',
'min_lsp_rexmit_interval_msecs': '(not set)',
'dr_priority': '(not set)',
'is_hello_padding_set': False,
'hello_padding': 'Never',
},
'Level-2': {
'per_topo': {
'IPv4 Unicast': {
'metric': '(not set)',
'weight': '(not set)',
'ldp_sync_cfg': '(not set)',
'admin_tag': '(not set)',
'frr_type': '(not set)',
'is_lkgp_set': 0,
},
},
'is_auth_cfg_ctx_set': False,
'auth_cfg_ctx': {
'alg': 'None',
'failure_mode': 'Drop',
'password': '0x0',
'accept_password': '0x0',
},
'hello_interval_msecs': '(not set)',
'hello_multiplier': '(not set)',
'csnp_interval_secs': '(not set)',
'lsp_pacing_interval_msecs': '(not set)',
'lsp_fast_flood_threshold': '(not set)',
'lsp_rexmit_interval_secs': '(not set)',
'min_lsp_rexmit_interval_msecs': '(not set)',
'dr_priority': '(not set)',
'is_hello_padding_set': False,
'hello_padding': 'Never',
},
},
},
'per_topo': {
'IPv4 Unicast': {
'refcount': 2,
},
},
'topos_enabled_passive': 'IPv4 Unicast',
'media': {
'0x440cc90': {
},
},
'clns': {
'im_node': {
'exist_registered': False,
'node_exists': False,
'state_registered': False,
'node_up': False,
},
'mtu': 0,
},
'per_af': {
'IPv4': {
'im_node': {
'exist_registered': True,
'node_exists': True,
'state_registered': True,
'node_up': True,
},
'local_address': '0.0.0.0',
'is_nexthop_addr_registered': False,
'is_global_prefix_registered': True,
'is_running_passive': True,
},
},
'nsf_waiting_for_running': False,
'nsf_ietf_waiting_for_sent_rr': False,
'is_media_ready': False,
'im_base_caps_exist_registered': True,
'tmrs_active': False,
'lsp_pacing_timer': '0x0',
'lsp_sent_last_id': '0000.0000.0000.00-00',
'lsp_sent_last_area': 0,
'lsp_send_b2b_limit': 0,
'lsp_send_b2b_limit_window_end': {
'tv_sec': 0,
'tv_nsec': 0,
},
'mesh_group': '0x0',
'lsp_send_requested': False,
'lsp_send_in_progress': False,
'mpls_ldp_sync': {
'im_attr_ldp_sync_info_notify_handle': 0,
'ldp_sync_info': False,
'is_ldp_sync_info_ok': 0,
},
'mpls_ldpv6_sync': {
'im_attr_ldp_sync_info_notify_handle': '0x0',
'ldp_sync_info': False,
'is_ldp_sync_info_ok': 0,
},
'stats': {
'ish_recv_count': 0,
'esh_recv_count': 0,
'unk_recv_count': 0,
},
'pri_label_stack_limit': '(not set)',
'bkp_label_stack_limit': '(not set)',
'srte_label_stack_limit': '(not set)',
'srat_label_stack_limit': '(not set)',
'bandwidth': '(not set)',
'is_pme_delay_loss_set': False,
'pme_avg_delay': '(not set)',
'pme_min_delay': '(not set)',
'pme_max_delay': '(not set)',
'pme_delay_var': '(not set)',
'pme_loss': '(not set)',
'pme_total_bw': '(not set)',
'pme_rsvp_te_bw': '(not set)',
'rsvp_max_res_bw': '0 kbits/sec',
'rsvp_unres_prio_7': '0 kbits/sec',
},
},
},
},
}
golden_output = {'execute.return_value': '''
RP/0/RSP0/CPU0:bl1-tatooine#show isis private all
Tue Oct 8 17:36:24.107 EDT
+++++++++++++++++++++++ IS-IS TEST Global Private Data ++++++++++++++++++++++++
ISIS TEST private data:
cfg_refcount : 57
isis_is_level : level-2-only
ignore_cksum_errs : TRUE
cfg_log_drops : FALSE
nsf_cfg_purgetime : 90
nsf2_t1_delay : 1
nsf2_t1_max_num_exp : 10
nsf_cfg_interval : 300
Address Family Table
IPv4
ref_count : 1
IPv6
ref_count : 0
Link Topology Table
Standard (IPv4 Unicast)
ref_count : 1
index : 0
is_running : TRUE
list_linkage.next : 0x0
list_linkage.previous : 0x44b3f24
Topology Table
IPv4 Unicast
Configuration:
check_adjacencies : (not set)
attached_bit : (not set)
max_paths : (not set)
is_mcast_intact_set : FALSE
mcast_intact : FALSE
is_igp_intact_set : FALSE
igp_intact : FALSE
is_first_hop_source_set : FALSE
first_hop_source : FALSE
ref_count : 23
index : 0
ltopo_index : 0
list_linkage.next : 0x0
list_linkage.previous : 0x44b51fc
Area Configuration Table
Cross Levels
is_lsp_gen_interval_set : TRUE
lsp_gen_interval.initial_wait_msecs : 20
lsp_gen_interval.secondary_wait_msecs : 100
lsp_gen_interval.maximum_wait_msecs : 5000
is_lsp_arrivaltime_parameter_set : FALSE
lsp_arrivaltime_parameter.backoff_cfg.initial_wait_msecs: 50
lsp_arrivaltime_parameter.backoff_cfg.secondary_wait_msecs: 200
lsp_arrivaltime_parameter.backoff_cfg.maximum_wait_msecs: 5000
lsp_arrivaltime_parameter.max_count : 0
lsp_arrivaltime_parameter.max_window_size_msec: 120001
is_lsp_checksum_interval_set : FALSE
lsp_checksum_interval_secs : 0
is_lsp_refresh_interval_set : TRUE
lsp_refresh_interval_secs : 35000
is_lsp_lifetime_set : TRUE
lsp_lifetime_secs : 65535
is_lsp_mtu_set : FALSE
lsp_mtu : 0
is_auth_cfg_ctx_set : FALSE
auth_cfg_ctx.alg : None
auth_cfg_ctx.failure_mode : Drop
auth_cfg_ctx.password : 0xdecafbad
auth_cfg_ctx.accept_password : 0xdecafbad
is_snp_authentication_options_set : FALSE
snp_authentication_options : 0
is_overload_set : FALSE
overload_mode : -1
overload_on_startup_secs : 0
per_topo[IPv4 Unicast] :
is_metric_style_set : TRUE
generate_metric_mask : 2
accept_metric_mask : 2
summary_table : 0x15431f50
metric : 100000
is_spf_interval_set : TRUE
spf_interval.initial_wait_msecs : 50
spf_interval.secondary_wait_msecs : 150
spf_interval.maximum_wait_msecs : 5000
spf_periodic_interval_secs : (not set)
ispf_state : (not set)
max_redist_prefixes : (not set)
[000] is_spf_prefix_priority_acl_names_set : FALSE
[000] spf_prefix_priority_acl_names : 0x0
[001] is_spf_prefix_priority_acl_names_set : TRUE
[001] spf_prefix_priority_acl_names : 0x154b92c4
[002] is_spf_prefix_priority_acl_names_set : TRUE
[002] spf_prefix_priority_acl_names : 0x155a0e3c
[003] is_spf_prefix_priority_acl_names_set : FALSE
[003] spf_prefix_priority_acl_names : 0x0
[000] is_spf_prefix_priority_tags_set : FALSE
[000] spf_prefix_priority_tags : 0
[001] is_spf_prefix_priority_tags_set : FALSE
[001] spf_prefix_priority_tags : 0
[002] is_spf_prefix_priority_tags_set : FALSE
[002] spf_prefix_priority_tags : 0
[003] is_spf_prefix_priority_tags_set : FALSE
[003] spf_prefix_priority_tags : 0
Level-1
is_lsp_gen_interval_set : FALSE
lsp_gen_interval.initial_wait_msecs : 50
lsp_gen_interval.secondary_wait_msecs : 200
lsp_gen_interval.maximum_wait_msecs : 5000
is_lsp_arrivaltime_parameter_set : FALSE
lsp_arrivaltime_parameter.backoff_cfg.initial_wait_msecs: 50
lsp_arrivaltime_parameter.backoff_cfg.secondary_wait_msecs: 200
lsp_arrivaltime_parameter.backoff_cfg.maximum_wait_msecs: 5000
lsp_arrivaltime_parameter.max_count : 0
lsp_arrivaltime_parameter.max_window_size_msec: 120001
is_lsp_checksum_interval_set : FALSE
lsp_checksum_interval_secs : 0
is_lsp_refresh_interval_set : FALSE
lsp_refresh_interval_secs : 0
is_lsp_lifetime_set : FALSE
lsp_lifetime_secs : 0
is_lsp_mtu_set : FALSE
lsp_mtu : 0
is_auth_cfg_ctx_set : FALSE
auth_cfg_ctx.alg : None
auth_cfg_ctx.failure_mode : Drop
auth_cfg_ctx.password : 0xdecafbad
auth_cfg_ctx.accept_password : 0xdecafbad
is_snp_authentication_options_set : FALSE
snp_authentication_options : 0
is_overload_set : FALSE
overload_mode : 0
overload_on_startup_secs : 0
per_topo[IPv4 Unicast] :
is_metric_style_set : FALSE
generate_metric_mask : 0
accept_metric_mask : 0
summary_table : 0x15431fac
metric : (not set)
is_spf_interval_set : FALSE
spf_interval.initial_wait_msecs : 50
spf_interval.secondary_wait_msecs : 200
spf_interval.maximum_wait_msecs : 5000
spf_periodic_interval_secs : (not set)
ispf_state : (not set)
max_redist_prefixes : (not set)
[000] is_spf_prefix_priority_acl_names_set : FALSE
[000] spf_prefix_priority_acl_names : 0x0
[001] is_spf_prefix_priority_acl_names_set : FALSE
[001] spf_prefix_priority_acl_names : 0x0
[002] is_spf_prefix_priority_acl_names_set : FALSE
[002] spf_prefix_priority_acl_names : 0x0
[003] is_spf_prefix_priority_acl_names_set : FALSE
[003] spf_prefix_priority_acl_names : 0x0
[000] is_spf_prefix_priority_tags_set : FALSE
[000] spf_prefix_priority_tags : 0
[001] is_spf_prefix_priority_tags_set : FALSE
[001] spf_prefix_priority_tags : 0
[002] is_spf_prefix_priority_tags_set : FALSE
[002] spf_prefix_priority_tags : 0
[003] is_spf_prefix_priority_tags_set : FALSE
[003] spf_prefix_priority_tags : 0
Level-2
is_lsp_gen_interval_set : FALSE
lsp_gen_interval.initial_wait_msecs : 50
lsp_gen_interval.secondary_wait_msecs : 200
lsp_gen_interval.maximum_wait_msecs : 5000
is_lsp_arrivaltime_parameter_set : FALSE
lsp_arrivaltime_parameter.backoff_cfg.initial_wait_msecs: 50
lsp_arrivaltime_parameter.backoff_cfg.secondary_wait_msecs: 200
lsp_arrivaltime_parameter.backoff_cfg.maximum_wait_msecs: 5000
lsp_arrivaltime_parameter.max_count : 0
lsp_arrivaltime_parameter.max_window_size_msec: 120001
is_lsp_checksum_interval_set : FALSE
lsp_checksum_interval_secs : 0
is_lsp_refresh_interval_set : FALSE
lsp_refresh_interval_secs : 0
is_lsp_lifetime_set : FALSE
lsp_lifetime_secs : 0
is_lsp_mtu_set : FALSE
lsp_mtu : 0
is_auth_cfg_ctx_set : FALSE
auth_cfg_ctx.alg : None
auth_cfg_ctx.failure_mode : Drop
auth_cfg_ctx.password : 0xdecafbad
auth_cfg_ctx.accept_password : 0xdecafbad
is_snp_authentication_options_set : FALSE
snp_authentication_options : 0
is_overload_set : FALSE
overload_mode : 0
overload_on_startup_secs : 0
per_topo[IPv4 Unicast] :
is_metric_style_set : FALSE
generate_metric_mask : 0
accept_metric_mask : 0
summary_table : 0x1539cef4
metric : (not set)
is_spf_interval_set : FALSE
spf_interval.initial_wait_msecs : 50
spf_interval.secondary_wait_msecs : 200
spf_interval.maximum_wait_msecs : 5000
spf_periodic_interval_secs : (not set)
ispf_state : (not set)
max_redist_prefixes : (not set)
[000] is_spf_prefix_priority_acl_names_set : FALSE
[000] spf_prefix_priority_acl_names : 0x0
[001] is_spf_prefix_priority_acl_names_set : FALSE
[001] spf_prefix_priority_acl_names : 0x0
[002] is_spf_prefix_priority_acl_names_set : FALSE
[002] spf_prefix_priority_acl_names : 0x0
[003] is_spf_prefix_priority_acl_names_set : FALSE
[003] spf_prefix_priority_acl_names : 0x0
[000] is_spf_prefix_priority_tags_set : FALSE
[000] spf_prefix_priority_tags : 0
[001] is_spf_prefix_priority_tags_set : FALSE
[001] spf_prefix_priority_tags : 0
[002] is_spf_prefix_priority_tags_set : FALSE
[002] spf_prefix_priority_tags : 0
[003] is_spf_prefix_priority_tags_set : FALSE
[003] spf_prefix_priority_tags : 0
Area Table
Level-2
index : 1
idb_list.sll_head : 0x151942e0
idb_list.sll_tail : 0x15193fd4
idb_list.sll_count : 8
idb_list.sll_maximum : 0
list_linkage.next : 0x0
list_linkage.previous : 0x44b2534
adj_db : 0x1540cee4
adj_log : 0x1539b844
uni_db_log : 0x15411024
upd_db.lock.rwlock.__active : 0
upd_db.lock.rwlock.__spare : 0x0
upd_db.lock.rwlock.__blockedwriters : 0
upd_db.lock.rwlock.__blockedreaders : 0
upd_db.lock.rwlock.__heavy : 0
upd_db.lock.rwlock.__lock.__count : -2147483648
upd_db.lock.rwlock.__lock.__owner : 0
upd_db.lock.rwlock.__owner : 4294967294
upd_db.lock.description : 0x15393cf0
upd_db.tree.root : 0x0
upd_db.tree.key_size : 8
upd_db.tree.size : 0
upd_db.tree.node_alloc_data : 0x15393cd0
upd_db.tree.node_alloc_fn : 0x42fd024
upd_db.tree.node_free_fn : 0x42fd08a
upd_db.tree.data_to_str_fn : 0x42fd094
upd_db.tree_node_chunks.name : 0x448764c
upd_db.tree_node_chunks.size : 28
upd_db.tree_node_chunks.flags : 1297
upd_db.tree_node_chunks.chunk : 0x1543146c
upd_db.tree_node_chunks.num_allocated_elements : 0
upd_db.area : 0x15393bfc
upd_db.log : 0x15432024
upd_db.name : L2 Update DB
nsf_ietf_csnp_rcvd : FALSE
overload_bit_on_startup_timer : 0x15017530
overload_bit_trigger_expired : TRUE
overload_bit_forced_reasons :
upd_periodic_timer : 0x150174d0
checksum_ptimer.tv_sec : 3657420
checksum_ptimer.tv_nsec : 458761224
dec_db.lock.rwlock.__active : 0
dec_db.lock.rwlock.__spare : 0x0
dec_db.lock.rwlock.__blockedwriters : 0
dec_db.lock.rwlock.__blockedreaders : 0
dec_db.lock.rwlock.__heavy : 0
dec_db.lock.rwlock.__lock.__count : -2147483648
dec_db.lock.rwlock.__lock.__owner : 0
dec_db.lock.rwlock.__owner : 4294967294
dec_db.lock.description : 0x153942b0
dec_db.tree.root : 0x1539f9d4
dec_db.tree.key_size : 8
dec_db.tree.size : 82
dec_db.tree.node_alloc_data : 0x15394290
dec_db.tree.node_alloc_fn : 0x42fd024
dec_db.tree.node_free_fn : 0x42fd08a
dec_db.tree.data_to_str_fn : 0x42fd094
dec_db.tree_node_chunks.name : 0x448764c
dec_db.tree_node_chunks.size : 28
dec_db.tree_node_chunks.flags : 1297
dec_db.tree_node_chunks.chunk : 0x1539f844
dec_db.tree_node_chunks.num_allocated_elements : 82
dec_db.area : 0x15393bfc
dec_db.log : 0x15453024
dec_db.name : L2 Decision DB
node_db.node_created_fn : 0x424fd84
node_db.node_destroyed_fn : 0x424ffa6
node_db.node_ltopo_created_fn : 0x42500b6
node_db.node_ltopo_destroyed_fn : 0x42503ba
node_db.node_topo_created_fn : 0x4250536
node_db.node_topo_destroyed_fn : 0x42506b4
node_db.callback_context : 0x15393bfc
node_db.root_element : 0x151fb9bc
node_db.num_nodes : 64
stats.ta_lsp_build : 850
stats.ta_lsp_refresh : 219
trap_stats.isisSysStatCorrLSPs : 0
trap_stats.isisSysStatAuthTypeFails : 0
trap_stats.isisSysStatAuthFails : 0
trap_stats.isisSysStatLSPDbaseOloads : 4
trap_stats.isisSysStatManAddrDropFromAreas : 0
trap_stats.isisSysStatAttmptToExMaxSeqNums : 0
trap_stats.isisSysStatSeqNumSkips : 1
trap_stats.isisSysStatOwnLSPPurges : 3
trap_stats.isisSysStatIDFieldLenMismatches : 0
trap_stats.isisSysStatLSPErrors : 0
per_ltopo[Standard (IPv4 Unicast)] :
area : 0x15393bfc
ltopo_index : Standard (IPv4 Unicast)
roca_event.mutex.mutex.__count : -2147483648
roca_event.mutex.mutex.__owner : 0
roca_event.mutex.description : 0x1500ee28
roca_event.timer.timer : 0x150179bc
roca_event.timer.num_execution_events : 1
roca_event.timer.is_pending : FALSE
roca_event.timer.is_executing : FALSE
roca_event.timer.postponed_schedule_time.tv_sec: 0
roca_event.timer.postponed_schedule_time.tv_nsec: 0
roca_event.timer.last_execution_time.tv_sec : 3657197
roca_event.timer.last_execution_time.tv_nsec : 824108467
roca_event.log : 0x15474024
roca_event.class : <error>
spf_periodic_timer : 0x1501798c
paths.classification : 0
paths.is_sorted : FALSE
paths.array : 0x1540d45c
paths.num_elements : 64
unreached.classification : 0
unreached.is_sorted : FALSE
unreached.array : 0x1540d4b4
unreached.num_elements : 0
firsthopchanged.classification : 0
firsthopchanged.is_sorted : TRUE
firsthopchanged.array : 0x1540d4e0
firsthopchanged.num_elements : 0
linkchanged.classification : 2
linkchanged.is_sorted : TRUE
linkchanged.array : 0x1540d66c
linkchanged.num_elements : 0
reachable_area_addresses : 0x1540d430
stats.num_spfs : 5004
stats.num_ispfs : 0
stats.num_nhcs : 10
stats.num_prcs : 1219
stats.num_periodic_spfs : 3876
per_topo[IPv4 Unicast] :
area : 0x15393bfc
topo_index : IPv4 Unicast
te.link_holddown_timer : 0x150181cc
te.purge_link_info_timer : 0x1501819c
te.log : 0x153a8d24
te.tunnel_table : 0x153ab844
te.info_from_te : 0x0
te.pce_info_from_te : 0x0
te.is_pce_ready : FALSE
overloaded_count : 0
overload_bit_trigger_running : FALSE
bgp_converged_notify_h : 0x0
added_first_hops : 0x0
deleted_first_hops : 0x0
postponed_added_first_hops : 0x0
postponed_deleted_first_hops : 0x0
prefixeschanged : 0x0
nodechanged : 0x0
prefix_priority_acl[ISIS_PREFIX_PRIORITY_CRITICAL]: 0x0
prefix_priority_acl[ISIS_PREFIX_PRIORITY_HIGH]: 0x15604868
prefix_priority_acl[ISIS_PREFIX_PRIORITY_MED] : 0x156047dc
prefix_priority_acl[ISIS_PREFIX_PRIORITY_LOW] : 0x0
num_redist_prefixes : 166
max_redist_prefixes_exceeded : FALSE
max_redist_prefixes_alarm_on : FALSE
has_prefix_policy_changed : FALSE
per_af[IPv4] :
router_id : 0x15192388
per_af[IPv6] :
router_id : 0x0
++++++++++++++++++++++ IS-IS TEST Interface Private Data ++++++++++++++++++++++
Interface TenGigE0/0/1/3
im_handle : 0x180
name : TenGigE0_0_1_3
ref_count : 2
index : 4
snmp_index : 21
chkpt.objid : 0x0
cfg.refcount : 7
cfg.is_p2p : TRUE
cfg.enabled_mode : Active
cfg.circuit_type : level-1-2
cfg.ipv4_bfd_enabled : TRUE
cfg.ipv6_bfd_enabled : FALSE
cfg.bfd_interval : 250
cfg.bfd_multiplier : 3
cfg.topos : IPv4 Unicast
per_topo[IPv4 Unicast] :
refcount : 2
cfg.cross_levels :
per_topo[IPv4 Unicast] :
metric : 10
weight : (not set)
ldp_sync_cfg : (not set)
admin_tag : (not set)
frr_type : (not set)
is_lkgp_set : 0
is_auth_cfg_ctx_set : FALSE
auth_cfg_ctx.alg : None
auth_cfg_ctx.failure_mode : Drop
auth_cfg_ctx.password : 0x0
auth_cfg_ctx.accept_password : 0x0
hello_interval_msecs : (not set)
hello_multiplier : (not set)
csnp_interval_secs : (not set)
lsp_pacing_interval_msecs : (not set)
lsp_fast_flood_threshold : (not set)
lsp_rexmit_interval_secs : (not set)
min_lsp_rexmit_interval_msecs : (not set)
dr_priority : (not set)
is_hello_padding_set : FALSE
hello_padding : Never
cfg.per_level[Level-1] :
per_topo[IPv4 Unicast] :
metric : (not set)
weight : (not set)
ldp_sync_cfg : (not set)
admin_tag : (not set)
frr_type : (not set)
is_lkgp_set : 0
is_auth_cfg_ctx_set : FALSE
auth_cfg_ctx.alg : None
auth_cfg_ctx.failure_mode : Drop
auth_cfg_ctx.password : 0x0
auth_cfg_ctx.accept_password : 0x0
hello_interval_msecs : (not set)
hello_multiplier : (not set)
csnp_interval_secs : (not set)
lsp_pacing_interval_msecs : (not set)
lsp_fast_flood_threshold : (not set)
lsp_rexmit_interval_secs : (not set)
min_lsp_rexmit_interval_msecs : (not set)
dr_priority : (not set)
is_hello_padding_set : FALSE
hello_padding : Never
cfg.per_level[Level-2] :
per_topo[IPv4 Unicast] :
metric : (not set)
weight : (not set)
ldp_sync_cfg : (not set)
admin_tag : (not set)
frr_type : (not set)
is_lkgp_set : 0
is_auth_cfg_ctx_set : FALSE
auth_cfg_ctx.alg : None
auth_cfg_ctx.failure_mode : Drop
auth_cfg_ctx.password : 0x0
auth_cfg_ctx.accept_password : 0x0
hello_interval_msecs : (not set)
hello_multiplier : (not set)
csnp_interval_secs : (not set)
lsp_pacing_interval_msecs : (not set)
lsp_fast_flood_threshold : (not set)
lsp_rexmit_interval_secs : (not set)
min_lsp_rexmit_interval_msecs : (not set)
dr_priority : (not set)
is_hello_padding_set : FALSE
hello_padding : Never
topos_enabled_passive :
topos_enabled_active : IPv4 Unicast
per_area[Level-2] :
area_linkage : 0x15194244
idb : 0x151916d8
area : 0x15393bfc
adj_filter : 0x0
csnp_control.timer : 0x0
csnp_control.next_lsp_id : 0000.0000.0000.00-00
csnp_control.building_packets : FALSE
psnp_timer : 0x0
nsf_ietf.full_csnp_set_rcvd : FALSE
nsf_ietf.csnp_set_rcvd.list_head : 0x0
nsf_ietf.csnp_set_rcvd.list_size : 0
adj_up_count : 0
lan_adj_up_count : 0
adj_list : 0x0
per_ltopo[Standard (IPv4 Unicast)] :
num_requested_adjs : 0
num_adjs : 0
tmrs_active : FALSE
adj_filter_match_all : FALSE
lsp_count.in : 24185
lsp_count.out : 140529
csnp_count.in : 17
csnp_count.out : 17
psnp_count.in : 134275
psnp_count.out : 23143
lsp_flooding_dup_count : 3
lsp_drop_count : 0
media : 0x440cbe0
caps_id : 30
media_class : LAN
encaps_overhead : 3
media_specific.p2p.hello_timer : 0x156bace8
media_specific.p2p.last_hello.tv_sec : 0
media_specific.p2p.last_hello.tv_nsec : 0
media_specific.p2p.recent_hello_send_count : 0
media_specific.p2p.adj_state : 2
media_specific.p2p.do_ietf_3way : TRUE
media_specific.p2p.received_ietf_3way : FALSE
media_specific.p2p.neighbor_extended_circuit_number: 0
media_specific.p2p.neighbor_system_id : 0000.0000.0000
media_specific.p2p.mib_counters.isisCircuitType : 0
media_specific.p2p.mib_counters.isisCircAdjChanges: 29
media_specific.p2p.mib_counters.isisCircNumAdj : 0
media_specific.p2p.mib_counters.isisCircInitFails : 0
media_specific.p2p.mib_counters.isisCircRejAdjs : 0
media_specific.p2p.mib_counters.isisCircIDFieldLenMismatches: 0
media_specific.p2p.mib_counters.isisCircMaxAreaAddrMismatches: 0
media_specific.p2p.mib_counters.isisCircAuthTypeFails: 0
media_specific.p2p.mib_counters.isisCircAuthFails : 0
media_specific.p2p.mib_counters.isisCircLANDesISChanges: 0
media_specific.p2p.mib_counters.isisCircIndex : 0
media_specific.p2p.init_csnp_wait.tv_sec : 0
media_specific.p2p.init_csnp_wait.tv_nsec : 0
media_specific.p2p.lsp_rexmit_queue.sll_head : 0x0
media_specific.p2p.lsp_rexmit_queue.sll_tail : 0x0
media_specific.p2p.lsp_rexmit_queue.sll_count : 0
media_specific.p2p.lsp_rexmit_queue.sll_maximum : 0
media_specific.p2p.lsp_rexmit_timer : 0x157111ac
media_specific.p2p.nsf_ietf
t1_timer : 0x156bacb8
num_t1_expiries : 0
first_t1_expiry_seen : FALSE
rr_sent : FALSE
ra_rcvd : FALSE
all_ra_seen : FALSE
ra_required_nbr_count : 0
RA-expected neighbor list:
media_specific.p2p.stats.iih_count.in : 160726
media_specific.p2p.stats.iih_count.out : 160689
media_specific.p2p.stats.iih_nomem : 0
media_specific.p2p.stats.lsp_retransmits : 72
media_specific.p2p.p2p_over_lan
mcast_state.is_mcast_group_member : TRUE
mcast_state.mcast_join_reason : 2
snpa_info.im_attr_macaddr_notify_handle : 0x1514d188
snpa_info.snpa : 00c1.64ff.4ef2
snpa_info.is_snpa_ok : TRUE
clns.im_node.exist_registered : TRUE
clns.im_node.node_exists : TRUE
clns.im_node.state_registered : TRUE
clns.im_node.node_up : FALSE
clns.mtu : 9199
per_af[IPv4]
im_node.exist_registered : TRUE
im_node.node_exists : TRUE
im_node.state_registered : TRUE
im_node.node_up : FALSE
local_address : 0.0.0.0
is_nexthop_addr_registered : TRUE
is_global_prefix_registered : FALSE
is_running_passive : FALSE
ltopos_ready_active :
nsf_waiting_for_running : FALSE
nsf_ietf_waiting_for_sent_rr : FALSE
is_media_ready : TRUE
im_base_caps_exist_registered : TRUE
tmrs_active : FALSE
lsp_pacing_timer : 0x0
lsp_sent_last_id : 0000.0000.0000.00-00
lsp_sent_last_area : 1
lsp_send_b2b_limit : 10
lsp_send_b2b_limit_window_end.tv_sec : 1407814
lsp_send_b2b_limit_window_end.tv_nsec : 256518783
mesh_group : 0x0
lsp_send_requested : FALSE
lsp_send_in_progress : FALSE
mpls_ldp_sync.im_attr_ldp_sync_info_notify_handle : 0
mpls_ldp_sync.ldp_sync_info : FALSE
mpls_ldp_sync.is_ldp_sync_info_ok : 0
mpls_ldpv6_sync.im_attr_ldp_sync_info_notify_handle: 0x0
mpls_ldpv6_sync.ldp_sync_info : FALSE
mpls_ldpv6_sync.is_ldp_sync_info_ok : 0
stats.ish_recv_count : 0
stats.esh_recv_count : 0
stats.unk_recv_count : 0
pri_label_stack_limit : 1
bkp_label_stack_limit : 3
srte_label_stack_limit : 10
srat_label_stack_limit : 10
bandwidth : 10000000
is_pme_delay_loss_set : FALSE
pme_avg_delay : (not set)
pme_min_delay : (not set)
pme_max_delay : (not set)
pme_delay_var : (not set)
pme_loss : (not set)
pme_total_bw : (not set)
pme_rsvp_te_bw : (not set)
rsvp_max_res_bw : 0 kbits/sec
rsvp_unres_prio_7 : 0 kbits/sec
Interface Loopback0
im_handle : 0x8000160
name : Loopback0
ref_count : 3
index : 0
snmp_index : 46
chkpt.objid : 0x0
cfg.refcount : 4
cfg.is_p2p : FALSE
cfg.enabled_mode : Passive
cfg.circuit_type : level-1-2
cfg.ipv4_bfd_enabled : FALSE
cfg.ipv6_bfd_enabled : FALSE
cfg.bfd_interval : 150
cfg.bfd_multiplier : 3
cfg.topos : IPv4 Unicast
per_topo[IPv4 Unicast] :
refcount : 2
cfg.cross_levels :
per_topo[IPv4 Unicast] :
metric : (not set)
weight : (not set)
ldp_sync_cfg : (not set)
admin_tag : (not set)
frr_type : (not set)
is_lkgp_set : 0
is_auth_cfg_ctx_set : FALSE
auth_cfg_ctx.alg : None
auth_cfg_ctx.failure_mode : Drop
auth_cfg_ctx.password : 0x0
auth_cfg_ctx.accept_password : 0x0
hello_interval_msecs : (not set)
hello_multiplier : (not set)
csnp_interval_secs : (not set)
lsp_pacing_interval_msecs : (not set)
lsp_fast_flood_threshold : (not set)
lsp_rexmit_interval_secs : (not set)
min_lsp_rexmit_interval_msecs : (not set)
dr_priority : (not set)
is_hello_padding_set : FALSE
hello_padding : Never
cfg.per_level[Level-1] :
per_topo[IPv4 Unicast] :
metric : (not set)
weight : (not set)
ldp_sync_cfg : (not set)
admin_tag : (not set)
frr_type : (not set)
is_lkgp_set : 0
is_auth_cfg_ctx_set : FALSE
auth_cfg_ctx.alg : None
auth_cfg_ctx.failure_mode : Drop
auth_cfg_ctx.password : 0x0
auth_cfg_ctx.accept_password : 0x0
hello_interval_msecs : (not set)
hello_multiplier : (not set)
csnp_interval_secs : (not set)
lsp_pacing_interval_msecs : (not set)
lsp_fast_flood_threshold : (not set)
lsp_rexmit_interval_secs : (not set)
min_lsp_rexmit_interval_msecs : (not set)
dr_priority : (not set)
is_hello_padding_set : FALSE
hello_padding : Never
cfg.per_level[Level-2] :
per_topo[IPv4 Unicast] :
metric : (not set)
weight : (not set)
ldp_sync_cfg : (not set)
admin_tag : (not set)
frr_type : (not set)
is_lkgp_set : 0
is_auth_cfg_ctx_set : FALSE
auth_cfg_ctx.alg : None
auth_cfg_ctx.failure_mode : Drop
auth_cfg_ctx.password : 0x0
auth_cfg_ctx.accept_password : 0x0
hello_interval_msecs : (not set)
hello_multiplier : (not set)
csnp_interval_secs : (not set)
lsp_pacing_interval_msecs : (not set)
lsp_fast_flood_threshold : (not set)
lsp_rexmit_interval_secs : (not set)
min_lsp_rexmit_interval_msecs : (not set)
dr_priority : (not set)
is_hello_padding_set : FALSE
hello_padding : Never
topos_enabled_passive : IPv4 Unicast
topos_enabled_active :
media : 0x440cc90
clns.im_node.exist_registered : FALSE
clns.im_node.node_exists : FALSE
clns.im_node.state_registered : FALSE
clns.im_node.node_up : FALSE
clns.mtu : 0
per_af[IPv4]
im_node.exist_registered : TRUE
im_node.node_exists : TRUE
im_node.state_registered : TRUE
im_node.node_up : TRUE
local_address : 0.0.0.0
is_nexthop_addr_registered : FALSE
is_global_prefix_registered : TRUE
is_running_passive : TRUE
ltopos_ready_active :
nsf_waiting_for_running : FALSE
nsf_ietf_waiting_for_sent_rr : FALSE
is_media_ready : FALSE
im_base_caps_exist_registered : TRUE
tmrs_active : FALSE
lsp_pacing_timer : 0x0
lsp_sent_last_id : 0000.0000.0000.00-00
lsp_sent_last_area : 0
lsp_send_b2b_limit : 0
lsp_send_b2b_limit_window_end.tv_sec : 0
lsp_send_b2b_limit_window_end.tv_nsec : 0
mesh_group : 0x0
lsp_send_requested : FALSE
lsp_send_in_progress : FALSE
mpls_ldp_sync.im_attr_ldp_sync_info_notify_handle : 0
mpls_ldp_sync.ldp_sync_info : FALSE
mpls_ldp_sync.is_ldp_sync_info_ok : 0
mpls_ldpv6_sync.im_attr_ldp_sync_info_notify_handle: 0x0
mpls_ldpv6_sync.ldp_sync_info : FALSE
mpls_ldpv6_sync.is_ldp_sync_info_ok : 0
stats.ish_recv_count : 0
stats.esh_recv_count : 0
stats.unk_recv_count : 0
pri_label_stack_limit : (not set)
bkp_label_stack_limit : (not set)
srte_label_stack_limit : (not set)
srat_label_stack_limit : (not set)
bandwidth : (not set)
is_pme_delay_loss_set : FALSE
pme_avg_delay : (not set)
pme_min_delay : (not set)
pme_max_delay : (not set)
pme_delay_var : (not set)
pme_loss : (not set)
pme_total_bw : (not set)
pme_rsvp_te_bw : (not set)
rsvp_max_res_bw : 0 kbits/sec
rsvp_unres_prio_7 : 0 kbits/sec
'''}
def test_empty_output(self):
device = Mock(**self.empty_output)
obj = ShowIsisPrivateAll(device=device)
with self.assertRaises(SchemaEmptyParserError):
obj.parse()
def test_golden_output(self):
device = Mock(**self.golden_output)
obj = ShowIsisPrivateAll(device=device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
if __name__ == '__main__':
unittest.main()
| 48.727179 | 148 | 0.315827 | 26,267 | 377,928 | 4.328168 | 0.047322 | 0.004838 | 0.014355 | 0.008022 | 0.881694 | 0.843554 | 0.79617 | 0.765666 | 0.732927 | 0.70244 | 0 | 0.104178 | 0.601668 | 377,928 | 7,755 | 149 | 48.733462 | 0.651019 | 0.004083 | 0 | 0.634641 | 0 | 0.003628 | 0.466512 | 0.05856 | 0 | 0 | 0.008464 | 0 | 0.005912 | 1 | 0.005912 | false | 0.00954 | 0.000537 | 0 | 0.020022 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c0454f30f2be303e632cd76ec728d961b3838b49 | 191 | py | Python | cvtk/io/json.py | flystarhe/cvtk | bcfea4c5b13269bdd63899020b9c70f2eb1f5b07 | [
"MIT"
] | 1 | 2021-06-29T07:12:45.000Z | 2021-06-29T07:12:45.000Z | cvtk/io/json.py | flystarhe/cvtk | bcfea4c5b13269bdd63899020b9c70f2eb1f5b07 | [
"MIT"
] | null | null | null | cvtk/io/json.py | flystarhe/cvtk | bcfea4c5b13269bdd63899020b9c70f2eb1f5b07 | [
"MIT"
] | null | null | null | try:
import simplejson as json
except ImportError:
import json
def json_dumps(obj, **kw):
return json.dumps(obj, **kw)
def json_loads(s, **kw):
return json.loads(s, **kw)
| 14.692308 | 32 | 0.649215 | 29 | 191 | 4.206897 | 0.482759 | 0.114754 | 0.196721 | 0.229508 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.21466 | 191 | 12 | 33 | 15.916667 | 0.813333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.375 | 0.25 | 0.875 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
22249e04398195395e7bacf2cb5cf22730383518 | 6,594 | py | Python | src/model/qa_mode/model/bert_multi_hid_model.py | AlfredWGA/House-Property-QA | 47fed327b1714e417e56a88e67af7486e8118b2c | [
"MIT"
] | null | null | null | src/model/qa_mode/model/bert_multi_hid_model.py | AlfredWGA/House-Property-QA | 47fed327b1714e417e56a88e67af7486e8118b2c | [
"MIT"
] | null | null | null | src/model/qa_mode/model/bert_multi_hid_model.py | AlfredWGA/House-Property-QA | 47fed327b1714e417e56a88e67af7486e8118b2c | [
"MIT"
] | null | null | null | # coding=utf-8
import torch.nn as nn
from torch.nn import MSELoss, CrossEntropyLoss, BCELoss
from transformers import BertModel, BertConfig
import torch.nn.functional as F
import torch
import os
class BertMultiHidModel(nn.Module):
def __init__(self, bert_model_dir, args):
super(BertMultiHidModel, self).__init__()
self.bert_model_dir = bert_model_dir
self.config = BertConfig.from_pretrained(bert_model_dir)
self.config.output_hidden_states = True
self.config.output_attentions = False
self.hidden = self.config.hidden_size
self.bert = BertModel.from_pretrained(bert_model_dir, config=self.config)
self.att_w = nn.Linear(4*self.hidden, self.hidden)
self.dropout = nn.Dropout(args.dropout)
self.fc1 = nn.Linear(4*self.hidden, args.class_num)
self.tanh = nn.Tanh()
self.softmax = nn.Softmax(dim=-1)
self.sigmod = nn.Sigmoid()
self.class_num = args.class_num
def forward(self, batch_data):
tokens_tensor, segments_tensors, att_mask, _,_,_, labels = batch_data
outputs = self.bert(tokens_tensor, attention_mask=att_mask, token_type_ids=segments_tensors)
hiddens = outputs[2] # 12*[b, seq, hid]
cls = outputs[1] # [b, hid]
# cls = self.dropout(cls) # [b, hid]
last_first_hidden = hiddens[-1]
last_second_hidden = hiddens[-2]
# last_third_hideen = hiddens[-3]
q1, _ = torch.max(last_first_hidden, dim=1) # [b, hid]
a1 = torch.mean(last_first_hidden, dim=1) # [b, hid]
t1 = last_first_hidden[:, -1] # [b, hid]
e1 = last_first_hidden[:, 0] # [b, hid]
q2, _ = torch.max(last_second_hidden, dim=1) # [b, hid]
a2 = torch.mean(last_second_hidden, dim=1) # [b, hid]
t2 = last_second_hidden[:, -1] # [b, hid]
e2 = last_second_hidden[:, 0] # [b, hid]
# q3, _ = torch.max(last_third_hideen, dim=1) # [b, hid]
# a3 = torch.mean(last_third_hideen, dim=1) # [b, hid]
# t3 = last_third_hideen[:, -1] # [b, hid]
# e3 = last_third_hideen[:, 0] # [b, hid]
class_encode1 = torch.cat([q1, a1, t1, e1], dim=-1) # [b, 4*hid]
class_encode2 = torch.cat([q2, a2, t2, e2], dim=-1) # [b, 4*hid]
# class_encode3 = torch.cat([q3, a3, t3, e3], dim=-1) # [b, 4*hid]
class_encode1 = self.dropout(class_encode1)
class_encode2 = self.dropout(class_encode2)
# class_encode3 = self.dropout(class_encode3)
att1 = self.tanh(self.att_w(class_encode1)) # [b, hid]
att2 = self.tanh(self.att_w(class_encode2)) # [b, hid]
# att3 = self.tanh(self.att_w(class_encode3)) # [b, hid]
a1 = (cls*att1).sum(-1, keepdim=True) # [b, 1]
a2 = (cls*att2).sum(-1, keepdim=True) # [b, 1]
# a3 = (cls*att3).sum(-1, keepdim=True) # [b, 1]
# a2 = self.softmax(torch.cat([a2, a3], dim=-1)).unsqueeze(-1) # [b, 2, 1]
# class_encode2 = torch.stack([class_encode2, class_encode3], dim=1) # [b, 2, 4*hid]
# class_encode2 = (class_encode2*a2).sum(1) # [b, 4*hid]
# att2 = self.tanh(self.att_w(class_encode2)) # [b, hid]
# a2 = (cls*att2).sum(-1, keepdim=True) # [b, 1]
alpha = self.softmax(torch.cat([a1, a2], dim=-1)) # [b, 2]
alpha = alpha.unsqueeze(-1) # [b, 2, 1]
class_encode = torch.stack([class_encode1, class_encode2], dim=1) # [b, 2, 4*hid]
class_encode = (class_encode*alpha).sum(1) # [b, 4*hid]
class_encode = self.dropout(class_encode)
logit = self.fc1(class_encode)
pre = self.sigmod(logit)
out = (logit, pre)
return out
def get_loss_function(self):
if self.class_num == 1:
return MSELoss()
# return BCELoss()
else:
return CrossEntropyLoss()
class BertMultiHidModel2(nn.Module):
def __init__(self, bert_model_dir, args):
super(BertMultiHidModel2, self).__init__()
self.bert_model_dir = bert_model_dir
self.config = BertConfig.from_pretrained(bert_model_dir)
self.config.output_hidden_states = True
self.config.output_attentions = False
self.hidden = self.config.hidden_size
self.bert = BertModel.from_pretrained(bert_model_dir, config=self.config)
self.att_w = nn.Linear(4*self.hidden, self.hidden)
self.dropout = nn.Dropout(args.dropout)
self.fc1 = nn.Linear(4*self.hidden, args.class_num)
self.tanh = nn.Tanh()
self.softmax = nn.Softmax(dim=-1)
self.sigmod = nn.Sigmoid()
self.class_num = args.class_num
def forward(self, batch_data):
tokens_tensor, segments_tensors, att_mask, _,_,_, labels = batch_data
outputs = self.bert(tokens_tensor, attention_mask=att_mask, token_type_ids=segments_tensors)
hiddens = outputs[2] # 12*[b, seq, hid]
cls = outputs[1] # [b, hid]
last_first_hidden = hiddens[-1]
last_second_hidden = hiddens[-2]
q1, _ = torch.max(last_first_hidden, dim=1) # [b, hid]
a1 = torch.mean(last_first_hidden, dim=1) # [b, hid]
t1 = last_first_hidden[:, -1] # [b, hid]
e1 = last_first_hidden[:, 0] # [b, hid]
q2, _ = torch.max(last_second_hidden, dim=1) # [b, hid]
a2 = torch.mean(last_second_hidden, dim=1) # [b, hid]
t2 = last_second_hidden[:, -1] # [b, hid]
e2 = last_second_hidden[:, 0] # [b, hid]
class_encode1 = torch.cat([q1, a1, t1, e1], dim=-1) # [b, 4*hid]
class_encode2 = torch.cat([q2, a2, t2, e2], dim=-1) # [b, 4*hid]
class_encode1 = self.dropout(class_encode1)
class_encode2 = self.dropout(class_encode2)
att1 = self.tanh(self.att_w(class_encode1)) # [b, hid]
att2 = self.tanh(self.att_w(class_encode2)) # [b, hid]
a1 = (cls*att1).sum(-1, keepdim=True) # [b, 1]
a2 = (cls*att2).sum(-1, keepdim=True) # [b, 1]
alpha = self.softmax(torch.cat([a1, a2], dim=-1)) # [b, 2]
alpha = alpha.unsqueeze(-1) # [b, 2, 1]
class_encode = torch.stack([class_encode1, class_encode2], dim=1) # [b, 2, 4*hid]
class_encode = (class_encode*alpha).sum(1) # [b, 4*hid]
class_encode = self.dropout(class_encode)
logit = self.fc1(class_encode)
pre = self.sigmod(logit)
out = (logit, pre)
return out
def get_loss_function(self):
if self.class_num == 1:
return MSELoss()
# return BCELoss()
else:
return CrossEntropyLoss()
| 35.451613 | 100 | 0.599484 | 924 | 6,594 | 4.069264 | 0.124459 | 0.017553 | 0.026596 | 0.021277 | 0.86516 | 0.863564 | 0.853191 | 0.831915 | 0.831915 | 0.831915 | 0 | 0.03965 | 0.25417 | 6,594 | 185 | 101 | 35.643243 | 0.724888 | 0.182894 | 0 | 0.910714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.053571 | false | 0 | 0.053571 | 0 | 0.178571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
3f0318a5bfe3011d557db02e59950437ec031dac | 37,136 | py | Python | test/functional/s3api/test_object.py | naototty/swift | af373a9cbca5f084f1d0f215b82a579f76da5089 | [
"Apache-2.0"
] | null | null | null | test/functional/s3api/test_object.py | naototty/swift | af373a9cbca5f084f1d0f215b82a579f76da5089 | [
"Apache-2.0"
] | null | null | null | test/functional/s3api/test_object.py | naototty/swift | af373a9cbca5f084f1d0f215b82a579f76da5089 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
import os
import boto
# For an issue with venv and distutils, disable pylint message here
# pylint: disable-msg=E0611,F0401
from distutils.version import StrictVersion
import email.parser
from email.utils import formatdate, parsedate
from time import mktime
from hashlib import md5
import six
from six.moves.urllib.parse import quote
import test.functional as tf
from swift.common.middleware.s3api.etree import fromstring
from test.functional.s3api import S3ApiBase
from test.functional.s3api.s3_test_client import Connection
from test.functional.s3api.utils import get_error_code, calculate_md5
DAY = 86400.0 # 60 * 60 * 24 (sec)
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class TestS3ApiObject(S3ApiBase):
def setUp(self):
super(TestS3ApiObject, self).setUp()
self.bucket = 'bucket'
self.conn.make_request('PUT', self.bucket)
def _assertObjectEtag(self, bucket, obj, etag):
status, headers, _ = self.conn.make_request('HEAD', bucket, obj)
self.assertEqual(status, 200) # sanity
self.assertCommonResponseHeaders(headers, etag)
def test_object(self):
obj = 'object name with %-sign'
content = b'abc123'
etag = md5(content).hexdigest()
# PUT Object
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, body=content)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertTrue('content-length' in headers) # sanity
self.assertEqual(headers['content-length'], '0')
self._assertObjectEtag(self.bucket, obj, etag)
# PUT Object Copy
dst_bucket = 'dst-bucket'
dst_obj = 'dst_obj'
self.conn.make_request('PUT', dst_bucket)
headers = {'x-amz-copy-source': '/%s/%s' % (self.bucket, obj)}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj,
headers=headers)
self.assertEqual(status, 200)
# PUT Object Copy with URL-encoded Source
dst_bucket = 'dst-bucket'
dst_obj = 'dst_obj'
self.conn.make_request('PUT', dst_bucket)
headers = {'x-amz-copy-source': quote('/%s/%s' % (self.bucket, obj))}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj,
headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertEqual(headers['content-length'], str(len(body)))
elem = fromstring(body, 'CopyObjectResult')
self.assertTrue(elem.find('LastModified').text is not None)
last_modified_xml = elem.find('LastModified').text
self.assertTrue(elem.find('ETag').text is not None)
self.assertEqual(etag, elem.find('ETag').text.strip('"'))
self._assertObjectEtag(dst_bucket, dst_obj, etag)
# Check timestamp on Copy:
status, headers, body = \
self.conn.make_request('GET', dst_bucket)
self.assertEqual(status, 200)
elem = fromstring(body, 'ListBucketResult')
# FIXME: COPY result drops milli/microseconds but GET doesn't
self.assertEqual(
elem.find('Contents').find("LastModified").text.rsplit('.', 1)[0],
last_modified_xml.rsplit('.', 1)[0])
# GET Object
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers, etag)
self.assertTrue(headers['last-modified'] is not None)
self.assertTrue(headers['content-type'] is not None)
self.assertEqual(headers['content-length'], str(len(content)))
# HEAD Object
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers, etag)
self.assertTrue(headers['last-modified'] is not None)
self.assertTrue('content-type' in headers)
self.assertEqual(headers['content-length'], str(len(content)))
# DELETE Object
status, headers, body = \
self.conn.make_request('DELETE', self.bucket, obj)
self.assertEqual(status, 204)
self.assertCommonResponseHeaders(headers)
# DELETE Non-Existent Object
status, headers, body = \
self.conn.make_request('DELETE', self.bucket, 'does-not-exist')
self.assertEqual(status, 204)
self.assertCommonResponseHeaders(headers)
def test_put_object_error(self):
auth_error_conn = Connection(aws_secret_key='invalid')
status, headers, body = \
auth_error_conn.make_request('PUT', self.bucket, 'object')
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
self.assertEqual(headers['content-type'], 'application/xml')
status, headers, body = \
self.conn.make_request('PUT', 'bucket2', 'object')
self.assertEqual(get_error_code(body), 'NoSuchBucket')
self.assertEqual(headers['content-type'], 'application/xml')
def test_put_object_copy_error(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
dst_bucket = 'dst-bucket'
self.conn.make_request('PUT', dst_bucket)
dst_obj = 'dst_object'
headers = {'x-amz-copy-source': '/%s/%s' % (self.bucket, obj)}
auth_error_conn = Connection(aws_secret_key='invalid')
status, headers, body = \
auth_error_conn.make_request('PUT', dst_bucket, dst_obj, headers)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
self.assertEqual(headers['content-type'], 'application/xml')
# /src/nothing -> /dst/dst
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, 'nothing')}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
self.assertEqual(get_error_code(body), 'NoSuchKey')
self.assertEqual(headers['content-type'], 'application/xml')
# /nothing/src -> /dst/dst
headers = {'X-Amz-Copy-Source': '/%s/%s' % ('nothing', obj)}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
# TODO: source bucket is not check.
# self.assertEqual(get_error_code(body), 'NoSuchBucket')
# /src/src -> /nothing/dst
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj)}
status, headers, body = \
self.conn.make_request('PUT', 'nothing', dst_obj, headers)
self.assertEqual(get_error_code(body), 'NoSuchBucket')
self.assertEqual(headers['content-type'], 'application/xml')
def test_get_object_error(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
auth_error_conn = Connection(aws_secret_key='invalid')
status, headers, body = \
auth_error_conn.make_request('GET', self.bucket, obj)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
self.assertEqual(headers['content-type'], 'application/xml')
status, headers, body = \
self.conn.make_request('GET', self.bucket, 'invalid')
self.assertEqual(get_error_code(body), 'NoSuchKey')
self.assertEqual(headers['content-type'], 'application/xml')
status, headers, body = self.conn.make_request('GET', 'invalid', obj)
self.assertEqual(get_error_code(body), 'NoSuchBucket')
self.assertEqual(headers['content-type'], 'application/xml')
def test_head_object_error(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
auth_error_conn = Connection(aws_secret_key='invalid')
status, headers, body = \
auth_error_conn.make_request('HEAD', self.bucket, obj)
self.assertEqual(status, 403)
self.assertEqual(body, b'') # sanity
self.assertEqual(headers['content-type'], 'application/xml')
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, 'invalid')
self.assertEqual(status, 404)
self.assertEqual(body, b'') # sanity
self.assertEqual(headers['content-type'], 'application/xml')
status, headers, body = \
self.conn.make_request('HEAD', 'invalid', obj)
self.assertEqual(status, 404)
self.assertEqual(body, b'') # sanity
self.assertEqual(headers['content-type'], 'application/xml')
def test_delete_object_error(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
auth_error_conn = Connection(aws_secret_key='invalid')
status, headers, body = \
auth_error_conn.make_request('DELETE', self.bucket, obj)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
self.assertEqual(headers['content-type'], 'application/xml')
status, headers, body = \
self.conn.make_request('DELETE', 'invalid', obj)
self.assertEqual(get_error_code(body), 'NoSuchBucket')
self.assertEqual(headers['content-type'], 'application/xml')
def test_put_object_content_encoding(self):
obj = 'object'
etag = md5().hexdigest()
headers = {'Content-Encoding': 'gzip'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers)
self.assertEqual(status, 200)
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj)
self.assertTrue('content-encoding' in headers) # sanity
self.assertEqual(headers['content-encoding'], 'gzip')
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(self.bucket, obj, etag)
def test_put_object_content_md5(self):
obj = 'object'
content = b'abcdefghij'
etag = md5(content).hexdigest()
headers = {'Content-MD5': calculate_md5(content)}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(self.bucket, obj, etag)
def test_put_object_content_type(self):
obj = 'object'
content = b'abcdefghij'
etag = md5(content).hexdigest()
headers = {'Content-Type': 'text/plain'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content)
self.assertEqual(status, 200)
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj)
self.assertEqual(headers['content-type'], 'text/plain')
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(self.bucket, obj, etag)
def test_put_object_conditional_requests(self):
obj = 'object'
content = b'abcdefghij'
headers = {'If-None-Match': '*'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content)
self.assertEqual(status, 501)
headers = {'If-Match': '*'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content)
self.assertEqual(status, 501)
headers = {'If-Modified-Since': 'Sat, 27 Jun 2015 00:00:00 GMT'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content)
self.assertEqual(status, 501)
headers = {'If-Unmodified-Since': 'Sat, 27 Jun 2015 00:00:00 GMT'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content)
self.assertEqual(status, 501)
# None of the above should actually have created an object
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj, {}, '')
self.assertEqual(status, 404)
def test_put_object_expect(self):
obj = 'object'
content = b'abcdefghij'
etag = md5(content).hexdigest()
headers = {'Expect': '100-continue'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(self.bucket, obj, etag)
def _test_put_object_headers(self, req_headers, expected_headers=None):
if expected_headers is None:
expected_headers = req_headers
obj = 'object'
content = b'abcdefghij'
etag = md5(content).hexdigest()
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj,
req_headers, content)
self.assertEqual(status, 200)
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj)
for header, value in expected_headers.items():
self.assertIn(header.lower(), headers)
self.assertEqual(headers[header.lower()], value)
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(self.bucket, obj, etag)
def test_put_object_metadata(self):
self._test_put_object_headers({
'X-Amz-Meta-Bar': 'foo',
'X-Amz-Meta-Bar2': 'foo2'})
def test_put_object_weird_metadata(self):
req_headers = dict(
('x-amz-meta-' + c, c)
for c in '!"#$%&\'()*+-./<=>?@[\\]^`{|}~')
exp_headers = dict(
('x-amz-meta-' + c, c)
for c in '!#$%&\'(*+-.^`|~')
self._test_put_object_headers(req_headers, exp_headers)
def test_put_object_underscore_in_metadata(self):
# Break this out separately for ease of testing pre-0.19.0 eventlet
self._test_put_object_headers({
'X-Amz-Meta-Foo-Bar': 'baz',
'X-Amz-Meta-Foo_Bar': 'also baz'})
def test_put_object_content_headers(self):
self._test_put_object_headers({
'Content-Type': 'foo/bar',
'Content-Encoding': 'baz',
'Content-Disposition': 'attachment',
'Content-Language': 'en'})
def test_put_object_cache_control(self):
self._test_put_object_headers({
'Cache-Control': 'private, some-extension'})
def test_put_object_expires(self):
self._test_put_object_headers({
# We don't validate that the Expires header is a valid date
'Expires': 'a valid HTTP-date timestamp'})
def test_put_object_robots_tag(self):
self._test_put_object_headers({
'X-Robots-Tag': 'googlebot: noarchive'})
def test_put_object_storage_class(self):
obj = 'object'
content = b'abcdefghij'
etag = md5(content).hexdigest()
headers = {'X-Amz-Storage-Class': 'STANDARD'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(self.bucket, obj, etag)
def test_put_object_copy_source_params(self):
obj = 'object'
src_headers = {'X-Amz-Meta-Test': 'src'}
src_body = b'some content'
dst_bucket = 'dst-bucket'
dst_obj = 'dst_object'
self.conn.make_request('PUT', self.bucket, obj, src_headers, src_body)
self.conn.make_request('PUT', dst_bucket)
headers = {'X-Amz-Copy-Source': '/%s/%s?nonsense' % (
self.bucket, obj)}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
self.assertEqual(status, 400)
self.assertEqual(get_error_code(body), 'InvalidArgument')
headers = {'X-Amz-Copy-Source': '/%s/%s?versionId=null&nonsense' % (
self.bucket, obj)}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
self.assertEqual(status, 400)
self.assertEqual(get_error_code(body), 'InvalidArgument')
headers = {'X-Amz-Copy-Source': '/%s/%s?versionId=null' % (
self.bucket, obj)}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
status, headers, body = \
self.conn.make_request('GET', dst_bucket, dst_obj)
self.assertEqual(status, 200)
self.assertEqual(headers['x-amz-meta-test'], 'src')
self.assertEqual(body, src_body)
def test_put_object_copy_source(self):
obj = 'object'
content = b'abcdefghij'
etag = md5(content).hexdigest()
self.conn.make_request('PUT', self.bucket, obj, body=content)
dst_bucket = 'dst-bucket'
dst_obj = 'dst_object'
self.conn.make_request('PUT', dst_bucket)
# /src/src -> /dst/dst
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj)}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(dst_bucket, dst_obj, etag)
# /src/src -> /src/dst
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj)}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, dst_obj, headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(self.bucket, dst_obj, etag)
# /src/src -> /src/src
# need changes to copy itself (e.g. metadata)
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
'X-Amz-Meta-Foo': 'bar',
'X-Amz-Metadata-Directive': 'REPLACE'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers)
self.assertEqual(status, 200)
self._assertObjectEtag(self.bucket, obj, etag)
self.assertCommonResponseHeaders(headers)
def test_put_object_copy_metadata_directive(self):
obj = 'object'
src_headers = {'X-Amz-Meta-Test': 'src'}
dst_bucket = 'dst-bucket'
dst_obj = 'dst_object'
self.conn.make_request('PUT', self.bucket, obj, headers=src_headers)
self.conn.make_request('PUT', dst_bucket)
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
'X-Amz-Metadata-Directive': 'REPLACE',
'X-Amz-Meta-Test': 'dst'}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
status, headers, body = \
self.conn.make_request('HEAD', dst_bucket, dst_obj)
self.assertEqual(headers['x-amz-meta-test'], 'dst')
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
'X-Amz-Metadata-Directive': 'COPY',
'X-Amz-Meta-Test': 'dst'}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
status, headers, body = \
self.conn.make_request('HEAD', dst_bucket, dst_obj)
self.assertEqual(headers['x-amz-meta-test'], 'src')
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
'X-Amz-Meta-Test2': 'dst',
'X-Amz-Metadata-Directive': 'REPLACE'}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
status, headers, body = \
self.conn.make_request('HEAD', dst_bucket, dst_obj)
self.assertNotIn('x-amz-meta-test', headers)
self.assertEqual(headers['x-amz-meta-test2'], 'dst')
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
'X-Amz-Metadata-Directive': 'BAD'}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
self.assertEqual(status, 400)
def test_put_object_copy_source_if_modified_since(self):
obj = 'object'
dst_bucket = 'dst-bucket'
dst_obj = 'dst_object'
etag = md5().hexdigest()
self.conn.make_request('PUT', self.bucket, obj)
self.conn.make_request('PUT', dst_bucket)
_, headers, _ = self.conn.make_request('HEAD', self.bucket, obj)
src_datetime = mktime(parsedate(headers['last-modified']))
src_datetime = src_datetime - DAY
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
'X-Amz-Copy-Source-If-Modified-Since':
formatdate(src_datetime)}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(self.bucket, obj, etag)
def test_put_object_copy_source_if_unmodified_since(self):
obj = 'object'
dst_bucket = 'dst-bucket'
dst_obj = 'dst_object'
etag = md5().hexdigest()
self.conn.make_request('PUT', self.bucket, obj)
self.conn.make_request('PUT', dst_bucket)
_, headers, _ = self.conn.make_request('HEAD', self.bucket, obj)
src_datetime = mktime(parsedate(headers['last-modified']))
src_datetime = src_datetime + DAY
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
'X-Amz-Copy-Source-If-Unmodified-Since':
formatdate(src_datetime)}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(self.bucket, obj, etag)
def test_put_object_copy_source_if_match(self):
obj = 'object'
dst_bucket = 'dst-bucket'
dst_obj = 'dst_object'
etag = md5().hexdigest()
self.conn.make_request('PUT', self.bucket, obj)
self.conn.make_request('PUT', dst_bucket)
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj)
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
'X-Amz-Copy-Source-If-Match': etag}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(self.bucket, obj, etag)
def test_put_object_copy_source_if_none_match(self):
obj = 'object'
dst_bucket = 'dst-bucket'
dst_obj = 'dst_object'
etag = md5().hexdigest()
self.conn.make_request('PUT', self.bucket, obj)
self.conn.make_request('PUT', dst_bucket)
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
'X-Amz-Copy-Source-If-None-Match': 'none-match'}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(self.bucket, obj, etag)
def test_get_object_response_content_type(self):
obj = 'obj'
self.conn.make_request('PUT', self.bucket, obj)
query = 'response-content-type=text/plain'
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, query=query)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertEqual(headers['content-type'], 'text/plain')
def test_get_object_response_content_language(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
query = 'response-content-language=en'
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, query=query)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertEqual(headers['content-language'], 'en')
def test_get_object_response_cache_control(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
query = 'response-cache-control=private'
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, query=query)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertEqual(headers['cache-control'], 'private')
def test_get_object_response_content_disposition(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
query = 'response-content-disposition=inline'
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, query=query)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertEqual(headers['content-disposition'], 'inline')
def test_get_object_response_content_encoding(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
query = 'response-content-encoding=gzip'
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, query=query)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertEqual(headers['content-encoding'], 'gzip')
def test_get_object_range(self):
obj = 'object'
content = b'abcdefghij'
headers = {'x-amz-meta-test': 'swift'}
self.conn.make_request(
'PUT', self.bucket, obj, headers=headers, body=content)
headers = {'Range': 'bytes=1-5'}
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, headers=headers)
self.assertEqual(status, 206)
self.assertCommonResponseHeaders(headers)
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '5')
self.assertTrue('x-amz-meta-test' in headers)
self.assertEqual('swift', headers['x-amz-meta-test'])
self.assertEqual(body, b'bcdef')
headers = {'Range': 'bytes=5-'}
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, headers=headers)
self.assertEqual(status, 206)
self.assertCommonResponseHeaders(headers)
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '5')
self.assertTrue('x-amz-meta-test' in headers)
self.assertEqual('swift', headers['x-amz-meta-test'])
self.assertEqual(body, b'fghij')
headers = {'Range': 'bytes=-5'}
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, headers=headers)
self.assertEqual(status, 206)
self.assertCommonResponseHeaders(headers)
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '5')
self.assertTrue('x-amz-meta-test' in headers)
self.assertEqual('swift', headers['x-amz-meta-test'])
self.assertEqual(body, b'fghij')
ranges = ['1-2', '4-5']
headers = {'Range': 'bytes=%s' % ','.join(ranges)}
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, headers=headers)
self.assertEqual(status, 206)
self.assertCommonResponseHeaders(headers)
self.assertIn('content-length', headers)
self.assertIn('content-type', headers) # sanity
content_type, boundary = headers['content-type'].split(';')
self.assertEqual('multipart/byteranges', content_type)
self.assertTrue(boundary.startswith('boundary=')) # sanity
boundary_str = boundary[len('boundary='):]
# TODO: Using swift.common.utils.multipart_byteranges_to_document_iters
# could be easy enough.
if six.PY2:
parser = email.parser.FeedParser()
else:
parser = email.parser.BytesFeedParser()
parser.feed(
b"Content-Type: multipart/byterange; boundary=%s\r\n\r\n" %
boundary_str.encode('ascii'))
parser.feed(body)
message = parser.close()
self.assertTrue(message.is_multipart()) # sanity check
mime_parts = message.get_payload()
self.assertEqual(len(mime_parts), len(ranges)) # sanity
for index, range_value in enumerate(ranges):
start, end = map(int, range_value.split('-'))
# go to next section and check sanity
self.assertTrue(mime_parts[index])
part = mime_parts[index]
self.assertEqual(
'application/octet-stream', part.get_content_type())
expected_range = 'bytes %s/%s' % (range_value, len(content))
self.assertEqual(
expected_range, part.get('Content-Range'))
# rest
payload = part.get_payload(decode=True).strip()
self.assertEqual(content[start:end + 1], payload)
def test_get_object_if_modified_since(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
_, headers, _ = self.conn.make_request('HEAD', self.bucket, obj)
src_datetime = mktime(parsedate(headers['last-modified']))
src_datetime = src_datetime - DAY
headers = {'If-Modified-Since': formatdate(src_datetime)}
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
def test_get_object_if_unmodified_since(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
_, headers, _ = self.conn.make_request('HEAD', self.bucket, obj)
src_datetime = mktime(parsedate(headers['last-modified']))
src_datetime = src_datetime + DAY
headers = \
{'If-Unmodified-Since': formatdate(src_datetime)}
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
def test_get_object_if_match(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj)
etag = headers['etag']
headers = {'If-Match': etag}
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
def test_get_object_if_none_match(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
headers = {'If-None-Match': 'none-match'}
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
def test_head_object_range(self):
obj = 'object'
content = b'abcdefghij'
self.conn.make_request('PUT', self.bucket, obj, body=content)
headers = {'Range': 'bytes=1-5'}
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj, headers=headers)
self.assertEqual(headers['content-length'], '5')
self.assertCommonResponseHeaders(headers)
headers = {'Range': 'bytes=5-'}
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj, headers=headers)
self.assertEqual(headers['content-length'], '5')
self.assertCommonResponseHeaders(headers)
headers = {'Range': 'bytes=-5'}
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj, headers=headers)
self.assertEqual(headers['content-length'], '5')
self.assertCommonResponseHeaders(headers)
def test_head_object_if_modified_since(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
_, headers, _ = self.conn.make_request('HEAD', self.bucket, obj)
dt = mktime(parsedate(headers['last-modified']))
dt = dt - DAY
headers = {'If-Modified-Since': formatdate(dt)}
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
def test_head_object_if_unmodified_since(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
_, headers, _ = self.conn.make_request('HEAD', self.bucket, obj)
dt = mktime(parsedate(headers['last-modified']))
dt = dt + DAY
headers = {'If-Unmodified-Since': formatdate(dt)}
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
def test_head_object_if_match(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj)
etag = headers['etag']
headers = {'If-Match': etag}
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
def test_head_object_if_none_match(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
headers = {'If-None-Match': 'none-match'}
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
class TestS3ApiObjectSigV4(TestS3ApiObject):
@classmethod
def setUpClass(cls):
os.environ['S3_USE_SIGV4'] = "True"
@classmethod
def tearDownClass(cls):
del os.environ['S3_USE_SIGV4']
def setUp(self):
super(TestS3ApiObjectSigV4, self).setUp()
@unittest2.skipIf(StrictVersion(boto.__version__) < StrictVersion('3.0'),
'This stuff got the signing issue of boto<=2.x')
def test_put_object_metadata(self):
super(TestS3ApiObjectSigV4, self).test_put_object_metadata()
@unittest2.skipIf(StrictVersion(boto.__version__) < StrictVersion('3.0'),
'This stuff got the signing issue of boto<=2.x')
def test_put_object_copy_source_if_modified_since(self):
super(TestS3ApiObjectSigV4, self).\
test_put_object_copy_source_if_modified_since()
@unittest2.skipIf(StrictVersion(boto.__version__) < StrictVersion('3.0'),
'This stuff got the signing issue of boto<=2.x')
def test_put_object_copy_source_if_unmodified_since(self):
super(TestS3ApiObjectSigV4, self).\
test_put_object_copy_source_if_unmodified_since()
@unittest2.skipIf(StrictVersion(boto.__version__) < StrictVersion('3.0'),
'This stuff got the signing issue of boto<=2.x')
def test_put_object_copy_source_if_match(self):
super(TestS3ApiObjectSigV4,
self).test_put_object_copy_source_if_match()
@unittest2.skipIf(StrictVersion(boto.__version__) < StrictVersion('3.0'),
'This stuff got the signing issue of boto<=2.x')
def test_put_object_copy_source_if_none_match(self):
super(TestS3ApiObjectSigV4,
self).test_put_object_copy_source_if_none_match()
if __name__ == '__main__':
unittest2.main()
| 40.898678 | 79 | 0.625592 | 4,292 | 37,136 | 5.257689 | 0.084576 | 0.042896 | 0.080431 | 0.097669 | 0.812062 | 0.795046 | 0.772357 | 0.745103 | 0.72915 | 0.717008 | 0 | 0.011533 | 0.24351 | 37,136 | 907 | 80 | 40.943771 | 0.791727 | 0.042008 | 0 | 0.701803 | 0 | 0 | 0.131577 | 0.015849 | 0 | 0 | 0 | 0.001103 | 0.277393 | 1 | 0.074896 | false | 0 | 0.020804 | 0 | 0.098474 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
3f07d501d4fcf5004ee6c3852e7f78704b0131d1 | 145 | py | Python | torch_ac_pnn/__init__.py | winnieyangwannan/winnie-pnn | 65f78a3f102679471546c898761c28d6ca522dfd | [
"MIT"
] | null | null | null | torch_ac_pnn/__init__.py | winnieyangwannan/winnie-pnn | 65f78a3f102679471546c898761c28d6ca522dfd | [
"MIT"
] | null | null | null | torch_ac_pnn/__init__.py | winnieyangwannan/winnie-pnn | 65f78a3f102679471546c898761c28d6ca522dfd | [
"MIT"
] | null | null | null | from torch_ac_pnn.algos import A2CAlgo, PPOAlgo
from torch_ac_pnn.model import ACModel, RecurrentACModel
from torch_ac_pnn.utils import DictList
| 36.25 | 56 | 0.868966 | 23 | 145 | 5.217391 | 0.565217 | 0.225 | 0.275 | 0.35 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007634 | 0.096552 | 145 | 3 | 57 | 48.333333 | 0.908397 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
3f588e59099c4e306788ab7408ba3147eef7f226 | 89,365 | py | Python | mailchimp_marketing/api/automations_api.py | Dehorser/mailchimp-marketing-python | ef0c2402b885f1da4e927c25f82313987f12d655 | [
"Apache-2.0"
] | null | null | null | mailchimp_marketing/api/automations_api.py | Dehorser/mailchimp-marketing-python | ef0c2402b885f1da4e927c25f82313987f12d655 | [
"Apache-2.0"
] | null | null | null | mailchimp_marketing/api/automations_api.py | Dehorser/mailchimp-marketing-python | ef0c2402b885f1da4e927c25f82313987f12d655 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Mailchimp Marketing API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 3.0.29
Contact: apihelp@mailchimp.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from mailchimp_marketing.api_client import ApiClient
class AutomationsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client):
self.api_client = api_client
def archive(self, workflow_id, **kwargs): # noqa: E501
"""Archive automation # noqa: E501
Archiving will permanently end your automation and keep the report data. You’ll be able to replicate your archived automation, but you can’t restart it. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.archive(workflow_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.archive_with_http_info(workflow_id, **kwargs) # noqa: E501
else:
(data) = self.archive_with_http_info(workflow_id, **kwargs) # noqa: E501
return data
def archive_with_http_info(self, workflow_id, **kwargs): # noqa: E501
"""Archive automation # noqa: E501
Archiving will permanently end your automation and keep the report data. You’ll be able to replicate your archived automation, but you can’t restart it. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.archive_with_http_info(workflow_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workflow_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method archive" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workflow_id' is set
if ('workflow_id' not in params or
params['workflow_id'] is None):
raise ValueError("Missing the required parameter `workflow_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_id' in params:
path_params['workflow_id'] = params['workflow_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/automations/{workflow_id}/actions/archive', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_workflow_email(self, workflow_id, workflow_email_id, **kwargs): # noqa: E501
"""Delete workflow email # noqa: E501
Removes an individual Automation workflow email. Emails from certain workflow types, including the Abandoned Cart Email (abandonedCart) and Product Retargeting Email (abandonedBrowse) Workflows, cannot be deleted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_workflow_email(workflow_id, workflow_email_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:param str workflow_email_id: The unique id for the Automation workflow email. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_workflow_email_with_http_info(workflow_id, workflow_email_id, **kwargs) # noqa: E501
else:
(data) = self.delete_workflow_email_with_http_info(workflow_id, workflow_email_id, **kwargs) # noqa: E501
return data
def delete_workflow_email_with_http_info(self, workflow_id, workflow_email_id, **kwargs): # noqa: E501
"""Delete workflow email # noqa: E501
Removes an individual Automation workflow email. Emails from certain workflow types, including the Abandoned Cart Email (abandonedCart) and Product Retargeting Email (abandonedBrowse) Workflows, cannot be deleted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_workflow_email_with_http_info(workflow_id, workflow_email_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:param str workflow_email_id: The unique id for the Automation workflow email. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workflow_id', 'workflow_email_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_workflow_email" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workflow_id' is set
if ('workflow_id' not in params or
params['workflow_id'] is None):
raise ValueError("Missing the required parameter `workflow_id` when calling ``") # noqa: E501
# verify the required parameter 'workflow_email_id' is set
if ('workflow_email_id' not in params or
params['workflow_email_id'] is None):
raise ValueError("Missing the required parameter `workflow_email_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_id' in params:
path_params['workflow_id'] = params['workflow_id'] # noqa: E501
if 'workflow_email_id' in params:
path_params['workflow_email_id'] = params['workflow_email_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/automations/{workflow_id}/emails/{workflow_email_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list(self, **kwargs): # noqa: E501
"""List automations # noqa: E501
Get a summary of an account's Automations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int count: The number of records to return. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **10**. [Maximum value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **1000**
:param int offset: The number of records from a collection to skip. Iterating over large collections with this parameter can be slow. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **0**.
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param datetime before_create_time: Restrict the response to automations created before this time. Uses the ISO 8601 time format: 2015-10-21T15:41:36+00:00.
:param datetime since_create_time: Restrict the response to automations created after this time. Uses the ISO 8601 time format: 2015-10-21T15:41:36+00:00.
:param datetime before_start_time: Restrict the response to automations started before this time. Uses the ISO 8601 time format: 2015-10-21T15:41:36+00:00.
:param datetime since_start_time: Restrict the response to automations started after this time. Uses the ISO 8601 time format: 2015-10-21T15:41:36+00:00.
:param str status: Restrict the results to automations with the specified status.
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_with_http_info(**kwargs) # noqa: E501
return data
def list_with_http_info(self, **kwargs): # noqa: E501
"""List automations # noqa: E501
Get a summary of an account's Automations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int count: The number of records to return. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **10**. [Maximum value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **1000**
:param int offset: The number of records from a collection to skip. Iterating over large collections with this parameter can be slow. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **0**.
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param datetime before_create_time: Restrict the response to automations created before this time. Uses the ISO 8601 time format: 2015-10-21T15:41:36+00:00.
:param datetime since_create_time: Restrict the response to automations created after this time. Uses the ISO 8601 time format: 2015-10-21T15:41:36+00:00.
:param datetime before_start_time: Restrict the response to automations started before this time. Uses the ISO 8601 time format: 2015-10-21T15:41:36+00:00.
:param datetime since_start_time: Restrict the response to automations started after this time. Uses the ISO 8601 time format: 2015-10-21T15:41:36+00:00.
:param str status: Restrict the results to automations with the specified status.
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['count', 'offset', 'fields', 'exclude_fields', 'before_create_time', 'since_create_time', 'before_start_time', 'since_start_time', 'status'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list" % key
)
params[key] = val
del params['kwargs']
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'before_create_time' in params:
query_params.append(('before_create_time', params['before_create_time'])) # noqa: E501
if 'since_create_time' in params:
query_params.append(('since_create_time', params['since_create_time'])) # noqa: E501
if 'before_start_time' in params:
query_params.append(('before_start_time', params['before_start_time'])) # noqa: E501
if 'since_start_time' in params:
query_params.append(('since_start_time', params['since_start_time'])) # noqa: E501
if 'status' in params:
query_params.append(('status', params['status'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/automations', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get(self, workflow_id, **kwargs): # noqa: E501
"""Get automation info # noqa: E501
Get a summary of an individual Automation workflow's settings and content. The `trigger_settings` object returns information for the first email in the workflow. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get(workflow_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: AutomationWorkflow
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_with_http_info(workflow_id, **kwargs) # noqa: E501
else:
(data) = self.get_with_http_info(workflow_id, **kwargs) # noqa: E501
return data
def get_with_http_info(self, workflow_id, **kwargs): # noqa: E501
"""Get automation info # noqa: E501
Get a summary of an individual Automation workflow's settings and content. The `trigger_settings` object returns information for the first email in the workflow. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_with_http_info(workflow_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: AutomationWorkflow
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workflow_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workflow_id' is set
if ('workflow_id' not in params or
params['workflow_id'] is None):
raise ValueError("Missing the required parameter `workflow_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_id' in params:
path_params['workflow_id'] = params['workflow_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/automations/{workflow_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AutomationWorkflow', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_all_workflow_emails(self, workflow_id, **kwargs): # noqa: E501
"""List automated emails # noqa: E501
Get a summary of the emails in an Automation workflow. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_all_workflow_emails(workflow_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:return: AutomationEmails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_all_workflow_emails_with_http_info(workflow_id, **kwargs) # noqa: E501
else:
(data) = self.list_all_workflow_emails_with_http_info(workflow_id, **kwargs) # noqa: E501
return data
def list_all_workflow_emails_with_http_info(self, workflow_id, **kwargs): # noqa: E501
"""List automated emails # noqa: E501
Get a summary of the emails in an Automation workflow. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_all_workflow_emails_with_http_info(workflow_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:return: AutomationEmails
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workflow_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_all_workflow_emails" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workflow_id' is set
if ('workflow_id' not in params or
params['workflow_id'] is None):
raise ValueError("Missing the required parameter `workflow_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_id' in params:
path_params['workflow_id'] = params['workflow_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/automations/{workflow_id}/emails', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AutomationEmails', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_workflow_email(self, workflow_id, workflow_email_id, **kwargs): # noqa: E501
"""Get workflow email info # noqa: E501
Get information about an individual Automation workflow email. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workflow_email(workflow_id, workflow_email_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:param str workflow_email_id: The unique id for the Automation workflow email. (required)
:return: AutomationWorkflowEmail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_workflow_email_with_http_info(workflow_id, workflow_email_id, **kwargs) # noqa: E501
else:
(data) = self.get_workflow_email_with_http_info(workflow_id, workflow_email_id, **kwargs) # noqa: E501
return data
def get_workflow_email_with_http_info(self, workflow_id, workflow_email_id, **kwargs): # noqa: E501
"""Get workflow email info # noqa: E501
Get information about an individual Automation workflow email. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workflow_email_with_http_info(workflow_id, workflow_email_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:param str workflow_email_id: The unique id for the Automation workflow email. (required)
:return: AutomationWorkflowEmail
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workflow_id', 'workflow_email_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_workflow_email" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workflow_id' is set
if ('workflow_id' not in params or
params['workflow_id'] is None):
raise ValueError("Missing the required parameter `workflow_id` when calling ``") # noqa: E501
# verify the required parameter 'workflow_email_id' is set
if ('workflow_email_id' not in params or
params['workflow_email_id'] is None):
raise ValueError("Missing the required parameter `workflow_email_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_id' in params:
path_params['workflow_id'] = params['workflow_id'] # noqa: E501
if 'workflow_email_id' in params:
path_params['workflow_email_id'] = params['workflow_email_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/automations/{workflow_id}/emails/{workflow_email_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AutomationWorkflowEmail', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_workflow_email_subscriber_queue(self, workflow_id, workflow_email_id, **kwargs): # noqa: E501
"""List automated email subscribers # noqa: E501
Get information about an Automation email queue. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workflow_email_subscriber_queue(workflow_id, workflow_email_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:param str workflow_email_id: The unique id for the Automation workflow email. (required)
:return: InlineResponse2003
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_workflow_email_subscriber_queue_with_http_info(workflow_id, workflow_email_id, **kwargs) # noqa: E501
else:
(data) = self.get_workflow_email_subscriber_queue_with_http_info(workflow_id, workflow_email_id, **kwargs) # noqa: E501
return data
def get_workflow_email_subscriber_queue_with_http_info(self, workflow_id, workflow_email_id, **kwargs): # noqa: E501
"""List automated email subscribers # noqa: E501
Get information about an Automation email queue. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workflow_email_subscriber_queue_with_http_info(workflow_id, workflow_email_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:param str workflow_email_id: The unique id for the Automation workflow email. (required)
:return: InlineResponse2003
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workflow_id', 'workflow_email_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_workflow_email_subscriber_queue" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workflow_id' is set
if ('workflow_id' not in params or
params['workflow_id'] is None):
raise ValueError("Missing the required parameter `workflow_id` when calling ``") # noqa: E501
# verify the required parameter 'workflow_email_id' is set
if ('workflow_email_id' not in params or
params['workflow_email_id'] is None):
raise ValueError("Missing the required parameter `workflow_email_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_id' in params:
path_params['workflow_id'] = params['workflow_id'] # noqa: E501
if 'workflow_email_id' in params:
path_params['workflow_email_id'] = params['workflow_email_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/automations/{workflow_id}/emails/{workflow_email_id}/queue', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2003', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_workflow_email_subscriber(self, workflow_id, workflow_email_id, subscriber_hash, **kwargs): # noqa: E501
"""Get automated email subscriber # noqa: E501
Get information about a specific subscriber in an Automation email queue. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workflow_email_subscriber(workflow_id, workflow_email_id, subscriber_hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:param str workflow_email_id: The unique id for the Automation workflow email. (required)
:param str subscriber_hash: The MD5 hash of the lowercase version of the list member's email address. (required)
:return: SubscriberInAutomationQueue2
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_workflow_email_subscriber_with_http_info(workflow_id, workflow_email_id, subscriber_hash, **kwargs) # noqa: E501
else:
(data) = self.get_workflow_email_subscriber_with_http_info(workflow_id, workflow_email_id, subscriber_hash, **kwargs) # noqa: E501
return data
def get_workflow_email_subscriber_with_http_info(self, workflow_id, workflow_email_id, subscriber_hash, **kwargs): # noqa: E501
"""Get automated email subscriber # noqa: E501
Get information about a specific subscriber in an Automation email queue. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workflow_email_subscriber_with_http_info(workflow_id, workflow_email_id, subscriber_hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:param str workflow_email_id: The unique id for the Automation workflow email. (required)
:param str subscriber_hash: The MD5 hash of the lowercase version of the list member's email address. (required)
:return: SubscriberInAutomationQueue2
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workflow_id', 'workflow_email_id', 'subscriber_hash'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_workflow_email_subscriber" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workflow_id' is set
if ('workflow_id' not in params or
params['workflow_id'] is None):
raise ValueError("Missing the required parameter `workflow_id` when calling ``") # noqa: E501
# verify the required parameter 'workflow_email_id' is set
if ('workflow_email_id' not in params or
params['workflow_email_id'] is None):
raise ValueError("Missing the required parameter `workflow_email_id` when calling ``") # noqa: E501
# verify the required parameter 'subscriber_hash' is set
if ('subscriber_hash' not in params or
params['subscriber_hash'] is None):
raise ValueError("Missing the required parameter `subscriber_hash` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_id' in params:
path_params['workflow_id'] = params['workflow_id'] # noqa: E501
if 'workflow_email_id' in params:
path_params['workflow_email_id'] = params['workflow_email_id'] # noqa: E501
if 'subscriber_hash' in params:
path_params['subscriber_hash'] = params['subscriber_hash'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/automations/{workflow_id}/emails/{workflow_email_id}/queue/{subscriber_hash}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SubscriberInAutomationQueue2', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_workflow_email_subscribers_removed(self, workflow_id, **kwargs): # noqa: E501
"""List subscribers removed from workflow # noqa: E501
Get information about subscribers who were removed from an Automation workflow. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_workflow_email_subscribers_removed(workflow_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:return: RemovedSubscribers
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_workflow_email_subscribers_removed_with_http_info(workflow_id, **kwargs) # noqa: E501
else:
(data) = self.list_workflow_email_subscribers_removed_with_http_info(workflow_id, **kwargs) # noqa: E501
return data
def list_workflow_email_subscribers_removed_with_http_info(self, workflow_id, **kwargs): # noqa: E501
"""List subscribers removed from workflow # noqa: E501
Get information about subscribers who were removed from an Automation workflow. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_workflow_email_subscribers_removed_with_http_info(workflow_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:return: RemovedSubscribers
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workflow_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_workflow_email_subscribers_removed" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workflow_id' is set
if ('workflow_id' not in params or
params['workflow_id'] is None):
raise ValueError("Missing the required parameter `workflow_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_id' in params:
path_params['workflow_id'] = params['workflow_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/automations/{workflow_id}/removed-subscribers', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RemovedSubscribers', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_removed_workflow_email_subscriber(self, workflow_id, subscriber_hash, **kwargs): # noqa: E501
"""Get subscriber removed from workflow # noqa: E501
Get information about a specific subscriber who was removed from an Automation workflow. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_removed_workflow_email_subscriber(workflow_id, subscriber_hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:param str subscriber_hash: The MD5 hash of the lowercase version of the list member's email address. (required)
:return: SubscriberRemovedFromAutomationWorkflow
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_removed_workflow_email_subscriber_with_http_info(workflow_id, subscriber_hash, **kwargs) # noqa: E501
else:
(data) = self.get_removed_workflow_email_subscriber_with_http_info(workflow_id, subscriber_hash, **kwargs) # noqa: E501
return data
def get_removed_workflow_email_subscriber_with_http_info(self, workflow_id, subscriber_hash, **kwargs): # noqa: E501
"""Get subscriber removed from workflow # noqa: E501
Get information about a specific subscriber who was removed from an Automation workflow. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_removed_workflow_email_subscriber_with_http_info(workflow_id, subscriber_hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:param str subscriber_hash: The MD5 hash of the lowercase version of the list member's email address. (required)
:return: SubscriberRemovedFromAutomationWorkflow
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workflow_id', 'subscriber_hash'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_removed_workflow_email_subscriber" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workflow_id' is set
if ('workflow_id' not in params or
params['workflow_id'] is None):
raise ValueError("Missing the required parameter `workflow_id` when calling ``") # noqa: E501
# verify the required parameter 'subscriber_hash' is set
if ('subscriber_hash' not in params or
params['subscriber_hash'] is None):
raise ValueError("Missing the required parameter `subscriber_hash` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_id' in params:
path_params['workflow_id'] = params['workflow_id'] # noqa: E501
if 'subscriber_hash' in params:
path_params['subscriber_hash'] = params['subscriber_hash'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/automations/{workflow_id}/removed-subscribers/{subscriber_hash}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SubscriberRemovedFromAutomationWorkflow', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_workflow_email(self, workflow_id, workflow_email_id, body, **kwargs): # noqa: E501
"""Update workflow email # noqa: E501
Update settings for a Automation workflow email # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_workflow_email(workflow_id, workflow_email_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:param str workflow_email_id: The unique id for the Automation workflow email. (required)
:param UpdateInformationAboutASpecificWorkflowEmail body: (required)
:return: AutomationWorkflowEmail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_workflow_email_with_http_info(workflow_id, workflow_email_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_workflow_email_with_http_info(workflow_id, workflow_email_id, body, **kwargs) # noqa: E501
return data
def update_workflow_email_with_http_info(self, workflow_id, workflow_email_id, body, **kwargs): # noqa: E501
"""Update workflow email # noqa: E501
Update settings for a Automation workflow email # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_workflow_email_with_http_info(workflow_id, workflow_email_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:param str workflow_email_id: The unique id for the Automation workflow email. (required)
:param UpdateInformationAboutASpecificWorkflowEmail body: (required)
:return: AutomationWorkflowEmail
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workflow_id', 'workflow_email_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_workflow_email" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workflow_id' is set
if ('workflow_id' not in params or
params['workflow_id'] is None):
raise ValueError("Missing the required parameter `workflow_id` when calling ``") # noqa: E501
# verify the required parameter 'workflow_email_id' is set
if ('workflow_email_id' not in params or
params['workflow_email_id'] is None):
raise ValueError("Missing the required parameter `workflow_email_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_id' in params:
path_params['workflow_id'] = params['workflow_id'] # noqa: E501
if 'workflow_email_id' in params:
path_params['workflow_email_id'] = params['workflow_email_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/automations/{workflow_id}/emails/{workflow_email_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AutomationWorkflowEmail', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create(self, body, **kwargs): # noqa: E501
"""Add automation # noqa: E501
Create a new Automation in your Mailchimp account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AutomationWorkflow1 body: (required)
:return: AutomationWorkflow
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_with_http_info(body, **kwargs) # noqa: E501
return data
def create_with_http_info(self, body, **kwargs): # noqa: E501
"""Add automation # noqa: E501
Create a new Automation in your Mailchimp account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AutomationWorkflow1 body: (required)
:return: AutomationWorkflow
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/automations', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AutomationWorkflow', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def pause_all_emails(self, workflow_id, **kwargs): # noqa: E501
"""Pause automation emails # noqa: E501
Pause all emails in a specific Automation workflow. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.pause_all_emails(workflow_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.pause_all_emails_with_http_info(workflow_id, **kwargs) # noqa: E501
else:
(data) = self.pause_all_emails_with_http_info(workflow_id, **kwargs) # noqa: E501
return data
def pause_all_emails_with_http_info(self, workflow_id, **kwargs): # noqa: E501
"""Pause automation emails # noqa: E501
Pause all emails in a specific Automation workflow. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.pause_all_emails_with_http_info(workflow_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workflow_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method pause_all_emails" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workflow_id' is set
if ('workflow_id' not in params or
params['workflow_id'] is None):
raise ValueError("Missing the required parameter `workflow_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_id' in params:
path_params['workflow_id'] = params['workflow_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/automations/{workflow_id}/actions/pause-all-emails', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def start_all_emails(self, workflow_id, **kwargs): # noqa: E501
"""Start automation emails # noqa: E501
Start all emails in an Automation workflow. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.start_all_emails(workflow_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.start_all_emails_with_http_info(workflow_id, **kwargs) # noqa: E501
else:
(data) = self.start_all_emails_with_http_info(workflow_id, **kwargs) # noqa: E501
return data
def start_all_emails_with_http_info(self, workflow_id, **kwargs): # noqa: E501
"""Start automation emails # noqa: E501
Start all emails in an Automation workflow. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.start_all_emails_with_http_info(workflow_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workflow_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method start_all_emails" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workflow_id' is set
if ('workflow_id' not in params or
params['workflow_id'] is None):
raise ValueError("Missing the required parameter `workflow_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_id' in params:
path_params['workflow_id'] = params['workflow_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/automations/{workflow_id}/actions/start-all-emails', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def pause_workflow_email(self, workflow_id, workflow_email_id, **kwargs): # noqa: E501
"""Pause automated email # noqa: E501
Pause an automated email. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.pause_workflow_email(workflow_id, workflow_email_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:param str workflow_email_id: The unique id for the Automation workflow email. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.pause_workflow_email_with_http_info(workflow_id, workflow_email_id, **kwargs) # noqa: E501
else:
(data) = self.pause_workflow_email_with_http_info(workflow_id, workflow_email_id, **kwargs) # noqa: E501
return data
def pause_workflow_email_with_http_info(self, workflow_id, workflow_email_id, **kwargs): # noqa: E501
"""Pause automated email # noqa: E501
Pause an automated email. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.pause_workflow_email_with_http_info(workflow_id, workflow_email_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:param str workflow_email_id: The unique id for the Automation workflow email. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workflow_id', 'workflow_email_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method pause_workflow_email" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workflow_id' is set
if ('workflow_id' not in params or
params['workflow_id'] is None):
raise ValueError("Missing the required parameter `workflow_id` when calling ``") # noqa: E501
# verify the required parameter 'workflow_email_id' is set
if ('workflow_email_id' not in params or
params['workflow_email_id'] is None):
raise ValueError("Missing the required parameter `workflow_email_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_id' in params:
path_params['workflow_id'] = params['workflow_id'] # noqa: E501
if 'workflow_email_id' in params:
path_params['workflow_email_id'] = params['workflow_email_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/automations/{workflow_id}/emails/{workflow_email_id}/actions/pause', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def start_workflow_email(self, workflow_id, workflow_email_id, **kwargs): # noqa: E501
"""Start automated email # noqa: E501
Start an automated email. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.start_workflow_email(workflow_id, workflow_email_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:param str workflow_email_id: The unique id for the Automation workflow email. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.start_workflow_email_with_http_info(workflow_id, workflow_email_id, **kwargs) # noqa: E501
else:
(data) = self.start_workflow_email_with_http_info(workflow_id, workflow_email_id, **kwargs) # noqa: E501
return data
def start_workflow_email_with_http_info(self, workflow_id, workflow_email_id, **kwargs): # noqa: E501
"""Start automated email # noqa: E501
Start an automated email. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.start_workflow_email_with_http_info(workflow_id, workflow_email_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:param str workflow_email_id: The unique id for the Automation workflow email. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workflow_id', 'workflow_email_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method start_workflow_email" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workflow_id' is set
if ('workflow_id' not in params or
params['workflow_id'] is None):
raise ValueError("Missing the required parameter `workflow_id` when calling ``") # noqa: E501
# verify the required parameter 'workflow_email_id' is set
if ('workflow_email_id' not in params or
params['workflow_email_id'] is None):
raise ValueError("Missing the required parameter `workflow_email_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_id' in params:
path_params['workflow_id'] = params['workflow_id'] # noqa: E501
if 'workflow_email_id' in params:
path_params['workflow_email_id'] = params['workflow_email_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/automations/{workflow_id}/emails/{workflow_email_id}/actions/start', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_workflow_email_subscriber(self, workflow_id, workflow_email_id, body, **kwargs): # noqa: E501
"""Add subscriber to workflow email # noqa: E501
Manually add a subscriber to a workflow, bypassing the default trigger settings. You can also use this endpoint to trigger a series of automated emails in an API 3.0 workflow type. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_workflow_email_subscriber(workflow_id, workflow_email_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:param str workflow_email_id: The unique id for the Automation workflow email. (required)
:param SubscriberInAutomationQueue1 body: (required)
:return: SubscriberInAutomationQueue2
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_workflow_email_subscriber_with_http_info(workflow_id, workflow_email_id, body, **kwargs) # noqa: E501
else:
(data) = self.add_workflow_email_subscriber_with_http_info(workflow_id, workflow_email_id, body, **kwargs) # noqa: E501
return data
def add_workflow_email_subscriber_with_http_info(self, workflow_id, workflow_email_id, body, **kwargs): # noqa: E501
"""Add subscriber to workflow email # noqa: E501
Manually add a subscriber to a workflow, bypassing the default trigger settings. You can also use this endpoint to trigger a series of automated emails in an API 3.0 workflow type. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_workflow_email_subscriber_with_http_info(workflow_id, workflow_email_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:param str workflow_email_id: The unique id for the Automation workflow email. (required)
:param SubscriberInAutomationQueue1 body: (required)
:return: SubscriberInAutomationQueue2
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workflow_id', 'workflow_email_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_workflow_email_subscriber" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workflow_id' is set
if ('workflow_id' not in params or
params['workflow_id'] is None):
raise ValueError("Missing the required parameter `workflow_id` when calling ``") # noqa: E501
# verify the required parameter 'workflow_email_id' is set
if ('workflow_email_id' not in params or
params['workflow_email_id'] is None):
raise ValueError("Missing the required parameter `workflow_email_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_id' in params:
path_params['workflow_id'] = params['workflow_id'] # noqa: E501
if 'workflow_email_id' in params:
path_params['workflow_email_id'] = params['workflow_email_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/automations/{workflow_id}/emails/{workflow_email_id}/queue', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SubscriberInAutomationQueue2', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_workflow_email_subscriber(self, workflow_id, body, **kwargs): # noqa: E501
"""Remove subscriber from workflow # noqa: E501
Remove a subscriber from a specific Automation workflow. You can remove a subscriber at any point in an Automation workflow, regardless of how many emails they've been sent from that workflow. Once they're removed, they can never be added back to the same workflow. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_workflow_email_subscriber(workflow_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:param SubscriberInAutomationQueue3 body: (required)
:return: SubscriberRemovedFromAutomationWorkflow
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_workflow_email_subscriber_with_http_info(workflow_id, body, **kwargs) # noqa: E501
else:
(data) = self.remove_workflow_email_subscriber_with_http_info(workflow_id, body, **kwargs) # noqa: E501
return data
def remove_workflow_email_subscriber_with_http_info(self, workflow_id, body, **kwargs): # noqa: E501
"""Remove subscriber from workflow # noqa: E501
Remove a subscriber from a specific Automation workflow. You can remove a subscriber at any point in an Automation workflow, regardless of how many emails they've been sent from that workflow. Once they're removed, they can never be added back to the same workflow. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_workflow_email_subscriber_with_http_info(workflow_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str workflow_id: The unique id for the Automation workflow. (required)
:param SubscriberInAutomationQueue3 body: (required)
:return: SubscriberRemovedFromAutomationWorkflow
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workflow_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_workflow_email_subscriber" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workflow_id' is set
if ('workflow_id' not in params or
params['workflow_id'] is None):
raise ValueError("Missing the required parameter `workflow_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_id' in params:
path_params['workflow_id'] = params['workflow_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/automations/{workflow_id}/removed-subscribers', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SubscriberRemovedFromAutomationWorkflow', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 45.571137 | 287 | 0.636849 | 10,536 | 89,365 | 5.15784 | 0.028948 | 0.047991 | 0.037539 | 0.023849 | 0.979777 | 0.97536 | 0.973502 | 0.966583 | 0.962461 | 0.958909 | 0 | 0.019106 | 0.276081 | 89,365 | 1,960 | 288 | 45.594388 | 0.820908 | 0.368489 | 0 | 0.850984 | 1 | 0.000937 | 0.222877 | 0.055719 | 0 | 0 | 0 | 0 | 0 | 1 | 0.034677 | false | 0 | 0.003749 | 0 | 0.089972 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
3f6fd220f7cfc448f39b98abc99f0f58e9c813b5 | 79 | py | Python | app/views/__init__.py | hustcc/TODO | 406577a0075053be03e710a402aaac0f299e55c4 | [
"MIT"
] | 7 | 2016-09-24T03:12:24.000Z | 2020-07-23T08:18:45.000Z | app/views/__init__.py | hustcc/TODO | 406577a0075053be03e710a402aaac0f299e55c4 | [
"MIT"
] | 1 | 2017-06-30T13:22:08.000Z | 2017-06-30T13:32:38.000Z | app/views/__init__.py | hustcc/TODO | 406577a0075053be03e710a402aaac0f299e55c4 | [
"MIT"
] | 4 | 2017-02-06T01:23:40.000Z | 2019-11-19T13:31:42.000Z | # -*- coding: utf-8 -*-
from app.views import index
from app.views import api
| 15.8 | 27 | 0.683544 | 13 | 79 | 4.153846 | 0.692308 | 0.259259 | 0.444444 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.015385 | 0.177215 | 79 | 4 | 28 | 19.75 | 0.815385 | 0.265823 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
58d35bab90628acdad60aead89c473faadcb3b72 | 15,125 | py | Python | aifeynman/S_separability.py | neighthan/AI-Feynman | de3be5250c759877175ea1725dea64000794bd3a | [
"MIT"
] | 1 | 2020-12-17T08:10:38.000Z | 2020-12-17T08:10:38.000Z | aifeynman/S_separability.py | neighthan/AI-Feynman | de3be5250c759877175ea1725dea64000794bd3a | [
"MIT"
] | null | null | null | aifeynman/S_separability.py | neighthan/AI-Feynman | de3be5250c759877175ea1725dea64000794bd3a | [
"MIT"
] | null | null | null | import os
from itertools import combinations
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
is_cuda = torch.cuda.is_available()
class SimpleNet(nn.Module):
def __init__(self, ni):
super().__init__()
self.linear1 = nn.Linear(ni, 128)
self.linear2 = nn.Linear(128, 128)
self.linear3 = nn.Linear(128, 64)
self.linear4 = nn.Linear(64, 64)
self.linear5 = nn.Linear(64, 1)
def forward(self, x):
x = F.tanh(self.linear1(x))
x = F.tanh(self.linear2(x))
x = F.tanh(self.linear3(x))
x = F.tanh(self.linear4(x))
x = self.linear5(x)
return x
def rmse_loss(pred, targ):
denom = targ ** 2
denom = torch.sqrt(denom.sum() / len(denom))
return torch.sqrt(F.mse_loss(pred, targ)) / denom
def check_separability_plus(pathdir, filename):
try:
pathdir_weights = "results/NN_trained_models/models/"
# load the data
n_variables = np.loadtxt(pathdir + filename, dtype="str").shape[1] - 1
variables = np.loadtxt(pathdir + filename, usecols=(0,))
if n_variables == 1:
print(filename, "just one variable for ADD")
# if there is just one variable you have nothing to separate
return (-1, -1, -1)
else:
for j in range(1, n_variables):
v = np.loadtxt(pathdir + filename, usecols=(j,))
variables = np.column_stack((variables, v))
f_dependent = np.loadtxt(pathdir + filename, usecols=(n_variables,))
f_dependent = np.reshape(f_dependent, (len(f_dependent), 1))
factors = torch.from_numpy(variables)
if is_cuda:
factors = factors.cuda()
else:
factors = factors
factors = factors.float()
product = torch.from_numpy(f_dependent)
if is_cuda:
product = product.cuda()
else:
product = product
product = product.float()
# load the trained model and put it in evaluation mode
if is_cuda:
model = SimpleNet(n_variables).cuda()
else:
model = SimpleNet(n_variables)
model.load_state_dict(torch.load(pathdir_weights + filename + ".h5"))
model.eval()
# make some variables at the time equal to the median of factors
models_one = []
models_rest = []
with torch.no_grad():
fact_vary = factors.clone()
for k in range(len(factors[0])):
fact_vary[:, k] = torch.full(
(len(factors),), torch.median(factors[:, k])
)
# loop through all indices combinations
var_indices_list = np.arange(0, n_variables, 1)
min_error = 1000
best_i = []
best_j = []
best_mu = 0
best_sigma = 0
for i in range(1, n_variables):
c = combinations(var_indices_list, i)
for j in c:
fact_vary_one = factors.clone()
fact_vary_rest = factors.clone()
rest_indx = list(filter(lambda x: x not in j, var_indices_list))
for t1 in rest_indx:
fact_vary_one[:, t1] = torch.full(
(len(factors),), torch.median(factors[:, t1])
)
for t2 in j:
fact_vary_rest[:, t2] = torch.full(
(len(factors),), torch.median(factors[:, t2])
)
# check if the equation is separable
sm = model(fact_vary_one) + model(fact_vary_rest)
# error = torch.sqrt(torch.mean((product-sm+model(fact_vary))**2))/torch.sqrt(torch.mean(product**2))
list_errs = 2 * abs(product - sm + model(fact_vary))
error = torch.median(list_errs)
mu = torch.mean(torch.log2(1 + list_errs * 2 ** 30))
sigma = torch.std(torch.log2(1 + list_errs * 2 ** 30))
# error = 2*torch.median(abs(product-sm+model(fact_vary)))
if error < min_error:
min_error = error
best_i = j
best_j = rest_indx
best_mu = mu
best_sigma = sigma
return min_error, best_i, best_j, best_mu, best_sigma
except Exception as e:
print(e)
return (-1, -1, -1, -1, -1)
def do_separability_plus(pathdir, filename, list_i, list_j):
try:
pathdir_weights = "results/NN_trained_models/models/"
# load the data
n_variables = np.loadtxt(pathdir + filename, dtype="str").shape[1] - 1
variables = np.loadtxt(pathdir + filename, usecols=(0,))
if n_variables == 1:
print(filename, "just one variable for ADD")
# if there is just one variable you have nothing to separate
return (-1, -1, -1)
else:
for j in range(1, n_variables):
v = np.loadtxt(pathdir + filename, usecols=(j,))
variables = np.column_stack((variables, v))
f_dependent = np.loadtxt(pathdir + filename, usecols=(n_variables,))
f_dependent = np.reshape(f_dependent, (len(f_dependent), 1))
factors = torch.from_numpy(variables)
if is_cuda:
factors = factors.cuda()
else:
factors = factors
factors = factors.float()
product = torch.from_numpy(f_dependent)
if is_cuda:
product = product.cuda()
else:
product = product
product = product.float()
# load the trained model and put it in evaluation mode
if is_cuda:
model = SimpleNet(n_variables).cuda()
else:
model = SimpleNet(n_variables)
model.load_state_dict(torch.load(pathdir_weights + filename + ".h5"))
model.eval()
# make some variables at the time equal to the median of factors
models_one = []
models_rest = []
fact_vary = factors.clone()
for k in range(len(factors[0])):
fact_vary[:, k] = torch.full((len(factors),), torch.median(factors[:, k]))
fact_vary_one = factors.clone()
fact_vary_rest = factors.clone()
for t1 in list_j:
fact_vary_one[:, t1] = torch.full(
(len(factors),), torch.median(factors[:, t1])
)
for t2 in list_i:
fact_vary_rest[:, t2] = torch.full(
(len(factors),), torch.median(factors[:, t2])
)
with torch.no_grad():
str1 = filename + "-add_a"
str2 = filename + "-add_b"
# save the first half
data_sep_1 = variables
data_sep_1 = np.delete(data_sep_1, list_j, axis=1)
data_sep_1 = np.column_stack((data_sep_1, model(fact_vary_one).cpu()))
# save the second half
data_sep_2 = variables
data_sep_2 = np.delete(data_sep_2, list_i, axis=1)
data_sep_2 = np.column_stack(
(data_sep_2, model(fact_vary_rest).cpu() - model(fact_vary).cpu())
)
try:
os.mkdir("results/separable_add/")
except:
pass
np.savetxt("results/separable_add/" + str1, data_sep_1)
np.savetxt("results/separable_add/" + str2, data_sep_2)
# if it is separable, return the 2 new files created and the index of the column with the separable variable
return ("results/separable_add/", str1, "results/separable_add/", str2)
except Exception as e:
print(e)
return (-1, -1)
def check_separability_multiply(pathdir, filename):
try:
pathdir_weights = "results/NN_trained_models/models/"
# load the data
n_variables = np.loadtxt(pathdir + filename, dtype="str").shape[1] - 1
variables = np.loadtxt(pathdir + filename, usecols=(0,))
if n_variables == 1:
print(filename, "just one variable for ADD")
# if there is just one variable you have nothing to separate
return (-1, -1, -1)
else:
for j in range(1, n_variables):
v = np.loadtxt(pathdir + filename, usecols=(j,))
variables = np.column_stack((variables, v))
f_dependent = np.loadtxt(pathdir + filename, usecols=(n_variables,))
# Pick only data which is close enough to the maximum value (5 times less or higher)
max_output = np.max(abs(f_dependent))
use_idx = np.where(abs(f_dependent) >= max_output / 5)
f_dependent = f_dependent[use_idx]
f_dependent = np.reshape(f_dependent, (len(f_dependent), 1))
variables = variables[use_idx]
factors = torch.from_numpy(variables)
if is_cuda:
factors = factors.cuda()
else:
factors = factors
factors = factors.float()
product = torch.from_numpy(f_dependent)
if is_cuda:
product = product.cuda()
else:
product = product
product = product.float()
# load the trained model and put it in evaluation mode
if is_cuda:
model = SimpleNet(n_variables).cuda()
else:
model = SimpleNet(n_variables)
model.load_state_dict(torch.load(pathdir_weights + filename + ".h5"))
model.eval()
# make some variables at the time equal to the median of factors
models_one = []
models_rest = []
with torch.no_grad():
fact_vary = factors.clone()
for k in range(len(factors[0])):
fact_vary[:, k] = torch.full(
(len(factors),), torch.median(factors[:, k])
)
# loop through all indices combinations
var_indices_list = np.arange(0, n_variables, 1)
min_error = 1000
best_i = []
best_j = []
best_mu = 0
best_sigma = 0
for i in range(1, n_variables):
c = combinations(var_indices_list, i)
for j in c:
fact_vary_one = factors.clone()
fact_vary_rest = factors.clone()
rest_indx = list(filter(lambda x: x not in j, var_indices_list))
for t1 in rest_indx:
fact_vary_one[:, t1] = torch.full(
(len(factors),), torch.median(factors[:, t1])
)
for t2 in j:
fact_vary_rest[:, t2] = torch.full(
(len(factors),), torch.median(factors[:, t2])
)
# check if the equation is separable
pd = model(fact_vary_one) * model(fact_vary_rest)
# error = torch.sqrt(torch.mean((product-pd/model(fact_vary))**2))/torch.sqrt(torch.mean(product**2))
list_errs = 2 * abs(product - pd / model(fact_vary))
error = torch.median(list_errs)
mu = torch.mean(torch.log2(1 + list_errs * 2 ** 30))
sigma = torch.std(torch.log2(1 + list_errs * 2 ** 30))
if error < min_error:
min_error = error
best_i = j
best_j = rest_indx
best_mu = mu
best_sigma = sigma
return min_error, best_i, best_j, best_mu, best_sigma
except Exception as e:
print(e)
return (-1, -1, -1, -1, -1)
def do_separability_multiply(pathdir, filename, list_i, list_j):
try:
pathdir_weights = "results/NN_trained_models/models/"
# load the data
n_variables = np.loadtxt(pathdir + filename, dtype="str").shape[1] - 1
variables = np.loadtxt(pathdir + filename, usecols=(0,))
if n_variables == 1:
print(filename, "just one variable for ADD")
# if there is just one variable you have nothing to separate
return (-1, -1, -1)
else:
for j in range(1, n_variables):
v = np.loadtxt(pathdir + filename, usecols=(j,))
variables = np.column_stack((variables, v))
f_dependent = np.loadtxt(pathdir + filename, usecols=(n_variables,))
f_dependent = np.reshape(f_dependent, (len(f_dependent), 1))
factors = torch.from_numpy(variables)
if is_cuda:
factors = factors.cuda()
else:
factors = factors
factors = factors.float()
product = torch.from_numpy(f_dependent)
if is_cuda:
product = product.cuda()
else:
product = product
product = product.float()
# load the trained model and put it in evaluation mode
if is_cuda:
model = SimpleNet(n_variables).cuda()
else:
model = SimpleNet(n_variables)
model.load_state_dict(torch.load(pathdir_weights + filename + ".h5"))
model.eval()
# make some variables at the time equal to the median of factors
models_one = []
models_rest = []
fact_vary = factors.clone()
for k in range(len(factors[0])):
fact_vary[:, k] = torch.full((len(factors),), torch.median(factors[:, k]))
fact_vary_one = factors.clone()
fact_vary_rest = factors.clone()
for t1 in list_j:
fact_vary_one[:, t1] = torch.full(
(len(factors),), torch.median(factors[:, t1])
)
for t2 in list_i:
fact_vary_rest[:, t2] = torch.full(
(len(factors),), torch.median(factors[:, t2])
)
with torch.no_grad():
str1 = filename + "-mult_a"
str2 = filename + "-mult_b"
# save the first half
data_sep_1 = variables
data_sep_1 = np.delete(data_sep_1, list_j, axis=1)
data_sep_1 = np.column_stack((data_sep_1, model(fact_vary_one).cpu()))
# save the second half
data_sep_2 = variables
data_sep_2 = np.delete(data_sep_2, list_i, axis=1)
data_sep_2 = np.column_stack(
(data_sep_2, model(fact_vary_rest).cpu() / model(fact_vary).cpu())
)
try:
os.mkdir("results/separable_mult/")
except:
pass
np.savetxt("results/separable_mult/" + str1, data_sep_1)
np.savetxt("results/separable_mult/" + str2, data_sep_2)
# if it is separable, return the 2 new files created and the index of the column with the separable variable
return ("results/separable_mult/", str1, "results/separable_mult/", str2)
except Exception as e:
print(e)
return (-1, -1)
| 37.531017 | 121 | 0.539174 | 1,834 | 15,125 | 4.263359 | 0.104144 | 0.039903 | 0.032741 | 0.049111 | 0.895383 | 0.887198 | 0.874792 | 0.874792 | 0.865328 | 0.865328 | 0 | 0.020831 | 0.355702 | 15,125 | 402 | 122 | 37.624378 | 0.781529 | 0.10162 | 0 | 0.797468 | 0 | 0 | 0.037398 | 0.026333 | 0 | 0 | 0 | 0 | 0 | 1 | 0.022152 | false | 0.006329 | 0.018987 | 0 | 0.088608 | 0.025316 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4536d8fb057a7a2bd58815215d6484efa4fb8fab | 180 | py | Python | sila_cetoni/valves/sila/valve_service/generated/valvepositioncontroller/valvepositioncontroller_types.py | CETONI-Software/sila_cetoni_valves | 2c9dd72c88344471180a4adfce51e4e802994713 | [
"BSD-3-Clause"
] | 1 | 2022-03-08T13:39:46.000Z | 2022-03-08T13:39:46.000Z | sila_cetoni/valves/sila/valve_service/generated/valvepositioncontroller/valvepositioncontroller_types.py | CETONI-Software/sila_cetoni_valves | 2c9dd72c88344471180a4adfce51e4e802994713 | [
"BSD-3-Clause"
] | null | null | null | sila_cetoni/valves/sila/valve_service/generated/valvepositioncontroller/valvepositioncontroller_types.py | CETONI-Software/sila_cetoni_valves | 2c9dd72c88344471180a4adfce51e4e802994713 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import annotations
from typing import NamedTuple
class SwitchToPosition_Responses(NamedTuple):
pass
class TogglePosition_Responses(NamedTuple):
pass
| 12.857143 | 45 | 0.805556 | 18 | 180 | 7.722222 | 0.611111 | 0.273381 | 0.330935 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.161111 | 180 | 13 | 46 | 13.846154 | 0.92053 | 0 | 0 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
454088e33b20e39e9489d5dded1894e9752b3d3d | 107,790 | py | Python | test/test_manning_et_al_2019/test_initial_play_with_stochastic_system.py | kursawe/hesdynamics | e7dd743ba6fcf36bd31937ec4c2c96bd890cc606 | [
"BSD-3-Clause"
] | null | null | null | test/test_manning_et_al_2019/test_initial_play_with_stochastic_system.py | kursawe/hesdynamics | e7dd743ba6fcf36bd31937ec4c2c96bd890cc606 | [
"BSD-3-Clause"
] | null | null | null | test/test_manning_et_al_2019/test_initial_play_with_stochastic_system.py | kursawe/hesdynamics | e7dd743ba6fcf36bd31937ec4c2c96bd890cc606 | [
"BSD-3-Clause"
] | null | null | null | import unittest
import os
os.environ["OMP_NUM_THREADS"] = "1"
import os.path
import sys
import matplotlib as mpl
mpl.rcParams['mathtext.default'] = 'regular'
import matplotlib.pyplot as plt
font = {'size' : 10}
plt.rc('font', **font)
import numpy as np
# make sure we find the right python module
sys.path.append(os.path.join(os.path.dirname(__file__),'..','..','src'))
import hes5
class TestSimpleHes5Model(unittest.TestCase):
def xest_generate_single_oscillatory_trajectory(self):
#First: run the model for 100 minutes
my_trajectory = hes5.generate_deterministic_trajectory( duration = 720,
repression_threshold = 100,
mRNA_degradation_rate = 0.03,
protein_degradation_rate = 0.03,
transcription_delay = 18.5,
initial_mRNA = 3,
initial_protein = 100 )
#Second, plot the model
figuresize = (4,2.75)
my_figure = plt.figure()
plt.plot(my_trajectory[:,0],
my_trajectory[:,1], label = 'mRNA', color = 'black')
plt.plot(my_trajectory[:,0],
my_trajectory[:,2]*0.03, label = 'Hes protein', color = 'black', ls = '--')
plt.xlabel('Time')
plt.ylabel('Scaled expression')
plt.legend()
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','oscillating_trajectory.pdf'))
def xest_generate_hes5_predicted_trajectory(self):
#First: run the model for 100 minutes
my_trajectory = hes5.generate_deterministic_trajectory( duration = 720,
repression_threshold = 23000,
mRNA_degradation_rate = np.log(2)/30,
protein_degradation_rate = np.log(2)/90,
translation_rate = 230,
transcription_delay = 29,
initial_mRNA = 3,
initial_protein = 23000,
for_negative_times = 'initial' )
number_of_data_points = len(my_trajectory[:,2])
fourier_transform = np.fft.fft(my_trajectory[:,2])/number_of_data_points
interval_length = 720.0
fourier_frequencies = np.arange(0,number_of_data_points/(2.0*interval_length), 1/(interval_length) )
print(fourier_transform.shape)
#Second, plot the model
figuresize = (4,4.5)
my_figure = plt.figure()
my_figure.add_subplot(211)
plt.plot(my_trajectory[:,0],
my_trajectory[:,1], label = 'mRNA', color = 'black')
plt.plot(my_trajectory[:,0],
my_trajectory[:,2], label = 'Hes protein', color = 'black', ls = '--')
plt.text(0.95, 0.4, 'Mean protein number: ' + str(np.mean(my_trajectory[:,2])),
verticalalignment='bottom', horizontalalignment='right',
transform=plt.gca().transAxes)
plt.xlabel('Time')
plt.ylabel('Copy number')
plt.legend()
my_figure.add_subplot(212)
plt.plot(fourier_frequencies,
np.abs(fourier_transform[:(number_of_data_points/2)]), color = 'black')
plt.xlim(0,0.02)
plt.xlabel('Frequency')
plt.ylabel('Occurence')
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','hes5_trajectory.pdf'))
def xest_that_I_apply_DFT_correctly(self):
interval_length = 100
x_values = np.linspace(1,interval_length,1000)
function_values = 3*np.sin(2*np.pi*0.5*x_values) + 2*np.sin(2*np.pi*0.2*x_values) + 10.0
number_of_data_points = len(x_values)
fourier_transform = np.fft.fft(function_values)/number_of_data_points
fourier_frequencies = np.arange(0,number_of_data_points/(2.0*interval_length), 1.0/(interval_length) )
my_figure = plt.figure()
my_figure.add_subplot(211)
plt.plot(x_values,
function_values, label = r'$3sin(2\pi 0.5x) + 2sin(2\pi 0.2x)$', color = 'black')
plt.xlabel('x')
plt.ylabel('f(x)')
plt.legend()
my_figure.add_subplot(212)
plt.plot(fourier_frequencies,
np.abs(fourier_transform[:(number_of_data_points/2)]), color = 'black')
plt.xlim(0,1)
plt.xlabel('Frequency')
plt.ylabel('Occurence')
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','fourier_test.pdf'))
def xest_generate_non_dimensionalised_trajectory(self):
#First: run the model for 100 minutes
# my_trajectory = hes5.generate_deterministic_trajectory( duration = 720/29.0,
my_trajectory = hes5.generate_deterministic_trajectory( duration = 60,
repression_threshold = 100.0/np.power(29.0,2),
mRNA_degradation_rate = np.log(2)/30*29.0,
protein_degradation_rate = np.log(2)/90*29.0,
transcription_delay = 29.0/29.0,
initial_mRNA = 3.0/(29),
initial_protein = 100.0/np.power(29.0,2) )
#Second, plot the model
figuresize = (4,2.75)
my_figure = plt.figure()
plt.plot(my_trajectory[:,0],
my_trajectory[:,1], label = 'mRNA', color = 'black')
plt.plot(my_trajectory[:,0],
my_trajectory[:,2], label = 'Hes protein', color = 'black', ls = '--')
plt.xlabel('Time')
plt.ylabel('Copy number')
plt.legend()
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','hes5_rescaled_trajectory.pdf'))
def xest_extract_period_from_signal(self):
# decaying but obvious oscillation
interval_length = 10
x_values = np.linspace(1,interval_length,1000)
period = 0.5
function_values = 10 + 3*np.sin(2*np.pi/period*x_values)*np.exp(-0.5*x_values)
this_period, this_relative_amplitude, this_relative_amplitude_variance = hes5.measure_period_and_amplitude_of_signal(x_values, function_values)
figuresize = (4,4.5)
my_figure = plt.figure(figsize = figuresize)
my_figure.add_subplot(211)
plt.plot(x_values,
function_values, label = r'$3sin(2\pi x/0.5)exp(-0.5x)$', color = 'black')
plt.xlabel('x')
plt.ylabel('f(x)')
plt.title('Measured period: ' + str(this_period) + '\n' +
'Relative amplitude: ' + str(this_relative_amplitude) + '\n' +
'Relative amplitude variation: ' + str(this_relative_amplitude_variance) + '\n')
# plt.gca().text(label_x, label_y, 'A', transform=plt.gca().transAxes)
self.assertAlmostEqual(0.5, this_period, places=2)
plt.legend()
# flat signal
function_values = np.ones_like(function_values)*10
this_period, this_relative_amplitude, this_relative_amplitude_variance = hes5.measure_period_and_amplitude_of_signal(x_values, function_values)
my_figure.add_subplot(212)
plt.plot(x_values,
function_values, label = r'10', color = 'black')
plt.xlabel('x')
plt.ylabel('f(x)')
plt.title('Measured period: ' + str(this_period) + '\n' +
'Relative amplitude: ' + str(this_relative_amplitude) + '\n' +
'Relative amplitude variation: ' + str(this_relative_amplitude_variance) + '\n')
# plt.gca().text(label_x, label_y, 'A', transform=plt.gca().transAxes)
self.assertAlmostEqual(0.0, this_period)
plt.legend()
plt.tight_layout()
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','extract_frequency.pdf'))
def xest_stochastic_trajectory(self):
my_trajectory = hes5.generate_stochastic_trajectory( duration = 720,
repression_threshold = 100,
mRNA_degradation_rate = 0.03,
protein_degradation_rate = 0.03,
transcription_delay = 18.5,
initial_mRNA = 3,
initial_protein = 100 )
#Second, plot the model
figuresize = (4,2.75)
my_figure = plt.figure()
plt.plot(my_trajectory[:,0],
my_trajectory[:,1], label = 'mRNA', color = 'black')
plt.plot(my_trajectory[:,0],
my_trajectory[:,2]*0.03, label = 'Hes protein (scaled)', color = 'black', ls = '--')
plt.xlabel('Time')
plt.ylabel('Copy number')
plt.legend()
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','stochastic_trajectory.pdf'))
def xest_validate_stochastic_implementation(self):
mRNA_trajectories, protein_trajectories = hes5.generate_multiple_trajectories( number_of_trajectories = 10,
duration = 720,
repression_threshold = 100000,
mRNA_degradation_rate = 0.03,
protein_degradation_rate = 0.03,
transcription_delay = 18.5,
basal_transcription_rate = 1000.0,
translation_rate = 1.0,
initial_mRNA = 3000,
initial_protein = 100000 )
mean_protein_trajectory = np.mean(protein_trajectories[:,1:], axis = 1)
protein_deviation = np.std(mRNA_trajectories[:,1:])
mean_mRNA_trajectory = np.mean(mRNA_trajectories[:,1:], axis = 1)
mRNA_deviation = np.std(mRNA_trajectories[:,1:])
deterministic_trajectory = hes5.generate_deterministic_trajectory( duration = 720,
repression_threshold = 100000,
mRNA_degradation_rate = 0.03,
protein_degradation_rate = 0.03,
transcription_delay = 18.5,
basal_transcription_rate = 1000.0,
translation_rate = 1.0,
initial_mRNA = 3000,
initial_protein = 100000,
for_negative_times = 'no_negative' )
figuresize = (4,2.75)
my_figure = plt.figure()
# want to plot: protein and mRNA for stochastic and deterministic system,
# example stochastic system
plt.plot( mRNA_trajectories[:,0],
mRNA_trajectories[:,1]/1000., label = 'mRNA example', color = 'black' )
plt.plot( protein_trajectories[:,0],
protein_trajectories[:,1]/10000., label = 'Protein example', color = 'black', ls = '--' )
plt.plot( mRNA_trajectories[:,0],
mean_mRNA_trajectory/1000., label = 'Mean mRNA', color = 'blue' )
plt.plot( protein_trajectories[:,0],
mean_protein_trajectory/10000., label = 'Mean protein', color = 'blue', ls = '--' )
plt.plot( deterministic_trajectory[:,0],
deterministic_trajectory[:,1]/1000., label = 'Deterministic mRNA', color = 'green' )
plt.plot( deterministic_trajectory[:,0],
deterministic_trajectory[:,2]/10000., label = 'Deterministic Protein', color = 'green', ls = '--' )
plt.xlabel('Time')
plt.ylabel('Scaled expression')
plt.legend()
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','stochastic_model_validation.pdf'))
def xest_stochastic_hes_trajectory(self):
#First: run the model for 100 minutes
my_trajectory = hes5.generate_stochastic_trajectory( duration = 720,
repression_threshold = 23000,
mRNA_degradation_rate = np.log(2)/30,
protein_degradation_rate = np.log(2)/90,
translation_rate = 250,
transcription_delay = 29,
initial_mRNA = 3,
initial_protein = 23000)
figuresize = (4,2.5)
my_figure = plt.figure()
plt.plot(my_trajectory[:,0],
my_trajectory[:,1]*10000, label = 'mRNA*1000', color = 'black')
plt.plot(my_trajectory[:,0],
my_trajectory[:,2], label = 'Hes protein', color = 'black', ls = '--')
plt.text(0.95, 0.4, 'Mean protein number: ' + str(np.mean(my_trajectory[:,2])),
verticalalignment='bottom', horizontalalignment='right',
transform=plt.gca().transAxes)
plt.xlabel('Time')
plt.ylabel('Copy number')
plt.legend()
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','hes5_stochastic_trajectory.pdf'))
def xest_stochastic_hes_trajectory_different_transcription(self):
my_trajectory = hes5.generate_stochastic_trajectory( duration = 1500,
repression_threshold = 23000,
mRNA_degradation_rate = np.log(2)/30,
protein_degradation_rate = np.log(2)/90,
translation_rate = 26,
basal_transcription_rate = 9,
transcription_delay = 29,
initial_mRNA = 3,
initial_protein = 23000)
figuresize = (4,2.5)
my_figure = plt.figure()
plt.plot(my_trajectory[:,0],
my_trajectory[:,1]*1000, label = 'mRNA*1000', color = 'black')
plt.plot(my_trajectory[:,0],
my_trajectory[:,2], label = 'Hes protein', color = 'black', ls = '--')
plt.text(0.95, 0.4, 'Mean protein number: ' + str(np.mean(my_trajectory[:,2])),
verticalalignment='bottom', horizontalalignment='right',
transform=plt.gca().transAxes)
plt.xlabel('Time')
plt.ylabel('Copy number')
plt.legend()
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','hes5_stochastic_trajectory_more_rna.pdf'))
def xest_equlibrate_stochastic_trajectory(self):
#for profiling
np.random.seed(0)
my_trajectory = hes5.generate_stochastic_trajectory( duration = 1500,
repression_threshold = 31400,
mRNA_degradation_rate = np.log(2)/30,
protein_degradation_rate = np.log(2)/90,
translation_rate = 29,
basal_transcription_rate = 11,
transcription_delay = 29,
initial_mRNA = 3,
initial_protein = 31400,
equilibration_time = 1000)
figuresize = (4,2.5)
my_figure = plt.figure()
plt.plot(my_trajectory[:,0],
my_trajectory[:,1]*1000, label = 'mRNA*1000', color = 'black')
plt.plot(my_trajectory[:,0],
my_trajectory[:,2], label = 'Hes protein', color = 'black', ls = '--')
plt.text(0.95, 0.4, 'Mean protein number: ' + str(np.mean(my_trajectory[:,2])),
verticalalignment='bottom', horizontalalignment='right',
transform=plt.gca().transAxes)
plt.xlabel('Time')
plt.ylabel('Copy number')
plt.legend()
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','hes5_stochastic_trajectory_equilibrated.pdf'))
def xest_multiple_equlibrated_trajectories(self):
mRNA_trajectories, protein_trajectories = hes5.generate_multiple_trajectories( number_of_trajectories = 100,
duration = 1500,
repression_threshold = 31400,
mRNA_degradation_rate = np.log(2)/30,
protein_degradation_rate = np.log(2)/90,
translation_rate = 29,
basal_transcription_rate = 11,
transcription_delay = 29,
initial_mRNA = 3,
initial_protein = 31400,
equilibration_time = 1000)
np.save(os.path.join(os.path.dirname(__file__),
'output','protein_traces.npy'), protein_trajectories)
np.save(os.path.join(os.path.dirname(__file__),
'output','rna_traces.npy'), mRNA_trajectories)
mean_protein_trajectory = np.mean(protein_trajectories[:,1:], axis = 1)
protein_deviation = np.std(mRNA_trajectories[:,1:])
mean_mRNA_trajectory = np.mean(mRNA_trajectories[:,1:], axis = 1)
mRNA_deviation = np.std(mRNA_trajectories[:,1:])
figuresize = (4,2.75)
my_figure = plt.figure()
# want to plot: protein and mRNA for stochastic and deterministic system,
# example stochastic system
plt.plot( mRNA_trajectories[:,0],
mRNA_trajectories[:,1]*1000., label = 'mRNA example', color = 'black' )
plt.plot( protein_trajectories[:,0],
protein_trajectories[:,1], label = 'Protein example', color = 'black', ls = '--' )
plt.plot( mRNA_trajectories[:,0],
mean_mRNA_trajectory*1000, label = 'Mean mRNA*1000', color = 'blue' )
plt.plot( protein_trajectories[:,0],
mean_protein_trajectory, label = 'Mean protein*1000', color = 'blue', ls = '--' )
plt.ylabel('Copy number')
plt.legend()
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','average_hes5_behaviour.pdf'))
def xest_average_trajectories_in_oscillating_regime(self):
mRNA_trajectories, protein_trajectories = hes5.generate_multiple_trajectories( number_of_trajectories = 100,
duration = 1500,
repression_threshold = 100,
mRNA_degradation_rate = 0.03,
protein_degradation_rate = 0.03,
transcription_delay = 18.5,
initial_mRNA = 3,
initial_protein = 100,
equilibration_time = 1000 )
mean_protein_trajectory = np.mean(protein_trajectories[:,1:], axis = 1)
protein_deviation = np.std(mRNA_trajectories[:,1:])
mean_mRNA_trajectory = np.mean(mRNA_trajectories[:,1:], axis = 1)
mRNA_deviation = np.std(mRNA_trajectories[:,1:])
figuresize = (4,2.75)
my_figure = plt.figure()
# want to plot: protein and mRNA for stochastic and deterministic system,
# example stochastic system
plt.plot( mRNA_trajectories[:,0],
mRNA_trajectories[:,1]*10., label = 'mRNA example', color = 'black' )
plt.plot( protein_trajectories[:,0],
protein_trajectories[:,1], label = 'Protein example', color = 'black', ls = '--' )
plt.plot( mRNA_trajectories[:,0],
mean_mRNA_trajectory*10, label = 'Mean mRNA*10', color = 'blue' )
plt.plot( protein_trajectories[:,0],
mean_protein_trajectory, label = 'Mean protein*10', color = 'blue', ls = '--' )
plt.ylabel('Copy number')
plt.legend()
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','average_oscillating_behaviour.pdf'))
def xest_power_spectra_of_mean_behaviours(self):
## oscillating power spectrum?
mRNA_trajectories, protein_trajectories = hes5.generate_multiple_trajectories( number_of_trajectories = 100,
duration = 1500,
repression_threshold = 100,
mRNA_degradation_rate = 0.03,
protein_degradation_rate = 0.03,
transcription_delay = 18.5,
initial_mRNA = 3,
initial_protein = 100,
equilibration_time = 1000 )
mean_protein_trajectory = np.mean(protein_trajectories[:,1:], axis = 1)
protein_deviation = np.std(mRNA_trajectories[:,1:])
mean_mRNA_trajectory = np.mean(mRNA_trajectories[:,1:], axis = 1)
mRNA_deviation = np.std(mRNA_trajectories[:,1:])
number_of_data_points = len(mean_mRNA_trajectory)
interval_length = protein_trajectories[-1,0]
oscillating_fourier_transform = np.fft.fft(mean_protein_trajectory)/number_of_data_points
oscillating_fourier_frequencies = np.arange( 0,number_of_data_points/(2*interval_length),
1.0/(interval_length) )
oscillating_power_spectrum = np.power(np.abs(oscillating_fourier_transform),2)[1:]
## Calculate coherence:
max_index = np.argmax(oscillating_power_spectrum)
coherence_boundary_left = int(np.round(max_index - max_index*0.1))
coherence_boundary_right = int(np.round(max_index + max_index*0.1))
coherence_area = np.trapz(oscillating_power_spectrum[coherence_boundary_left:(coherence_boundary_right+1)])
full_area = np.trapz(oscillating_power_spectrum)
oscillating_coherence = coherence_area/full_area
import pdb; pdb.set_trace()
my_figure = plt.figure()
figuresize = (4,3.5)
my_figure.add_subplot(211)
plt.plot(oscillating_fourier_frequencies[1:],
np.power(np.abs(oscillating_fourier_transform[1:(number_of_data_points/2)]), 2), color = 'black')
plt.xlim(0,0.01)
# plt.ylim(0,100)
plt.xlabel('Frequency')
plt.ylabel('Occurence')
# import pdb; pdb.set_trace()
plt.text(0.95, 0.4, 'Coherence: ' + str(oscillating_coherence),
verticalalignment='bottom', horizontalalignment='right',
transform=plt.gca().transAxes)
my_figure.add_subplot(212)
mRNA_trajectories = np.load( os.path.join(os.path.dirname(__file__),
'output','rna_traces.npy') )
protein_trajectories = np.load( os.path.join(os.path.dirname(__file__),
'output','protein_traces.npy') )
mean_protein_trajectory = np.mean(protein_trajectories[:,1:], axis = 1)
protein_deviation = np.std(mRNA_trajectories[:,1:])
mean_mRNA_trajectory = np.mean(mRNA_trajectories[:,1:], axis = 1)
mRNA_deviation = np.std(mRNA_trajectories[:,1:])
#
number_of_data_points = len(mean_mRNA_trajectory)
interval_length = protein_trajectories[-1,0]
hes5_fourier_transform = np.fft.fft(mean_protein_trajectory)/number_of_data_points
hes5_fourier_frequencies = np.arange(0,number_of_data_points/(2*interval_length),
1.0/(interval_length) )
hes5_power_spectrum = np.power(np.abs(hes5_fourier_transform),2)[1:]
## Calculate coherence:
max_index = np.argmax(hes5_power_spectrum)
coherence_boundary_left = int(np.round(max_index - max_index*0.1))
coherence_boundary_right = int(np.round(max_index + max_index*0.1))
coherence_area = np.trapz(hes5_power_spectrum[coherence_boundary_left:(coherence_boundary_right+1)])
full_area = np.trapz(hes5_power_spectrum)
hes5_coherence = coherence_area/full_area
plt.plot(hes5_fourier_frequencies[1:],
np.power(np.abs(hes5_fourier_transform[1:(number_of_data_points/2)]), 2), color = 'black')
# plt.xlim(0,1)
plt.xlim(0,0.01)
plt.text(0.95, 0.4, 'Coherence: ' + str(hes5_coherence),
verticalalignment='bottom', horizontalalignment='right',
transform=plt.gca().transAxes)
plt.xlabel('Frequency')
plt.ylabel('Occurence')
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','fourier_spectra.pdf'))
def xest_calculate_power_spectrum_of_specific_trace(self):
interval_length = 100
x_values = np.linspace(1,interval_length,1000)
function_values = 3*np.sin(2*np.pi*0.5*x_values) + 2*np.sin(2*np.pi*0.2*x_values) + 10.0
number_of_data_points = len(x_values)
trajectory = np.vstack((x_values, function_values)).transpose()
# fourier_transform = np.fft.fft(function_values)/number_of_data_points
# fourier_frequencies = np.arange(0,number_of_data_points/(2.0*interval_length), 1.0/(interval_length) )
power_spectrum,_,_ = hes5.calculate_power_spectrum_of_trajectory(trajectory)
my_figure = plt.figure()
my_figure.add_subplot(211)
plt.plot(x_values,
function_values, label = r'$3sin(2\pi 0.5x) + 2sin(2\pi 0.2x)$', color = 'black')
plt.xlabel('x')
plt.ylabel('f(x)')
plt.legend()
my_figure.add_subplot(212)
plt.plot(power_spectrum[:,0],
power_spectrum[:,1], color = 'black')
plt.xlim(0,1)
plt.xlabel('Frequency')
plt.ylabel('Occurence')
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','power_spectrum_test.pdf'))
def xest_linear_noise_approximation(self):
##
# Hes1 samples
##
number_of_traces = 100
repetition_number = 10
system_size = 500
oscillating_mRNA_trajectories, oscillating_protein_trajectories = hes5.generate_multiple_trajectories( number_of_trajectories = number_of_traces,
duration = 1500*repetition_number,
repression_threshold = system_size*10,
mRNA_degradation_rate = 0.03,
basal_transcription_rate = 1.0*system_size,
protein_degradation_rate = 0.03,
transcription_delay = 18.5,
initial_mRNA = 3,
initial_protein = 10*system_size,
equilibration_time = 3000,
hill_coefficient = 4.1,
synchronize = False )
oscillating_langevin_mRNA_trajectories, oscillating_langevin_protein_trajectories = hes5.generate_multiple_langevin_trajectories( number_of_trajectories = number_of_traces*10,
duration = 1500*repetition_number,
repression_threshold = system_size*10.0,
mRNA_degradation_rate = 0.03,
basal_transcription_rate = 1.0*system_size,
protein_degradation_rate = 0.03,
transcription_delay = 18.5,
initial_mRNA = 3,
initial_protein = 10*system_size,
hill_coefficient = 4.1,
equilibration_time = 3000)
oscillating_power_spectrum, oscillating_coherence, oscillating_period = hes5.calculate_power_spectrum_of_trajectories(oscillating_protein_trajectories)
oscillating_langevin_power_spectrum, oscillating_langevin_coherence, oscillating_langevin_period = hes5.calculate_power_spectrum_of_trajectories(oscillating_langevin_protein_trajectories)
theoretical_power_spectrum = hes5.calculate_theoretical_power_spectrum_at_parameter_point(
basal_transcription_rate = 1.0*system_size,
translation_rate = 1.0,
repression_threshold = 10.0*system_size,
transcription_delay = 18.5,
mRNA_degradation_rate = 0.03,
hill_coefficient = 4.1,
protein_degradation_rate = 0.03
)
figuresize = (6,2.5)
my_figure = plt.figure(figsize = figuresize)
my_figure.add_subplot(121)
plt.plot( oscillating_mRNA_trajectories[:,0],
oscillating_mRNA_trajectories[:,1]*10., label = 'mRNA example*10', color = 'black',
lw = 0.5 )
plt.plot( oscillating_protein_trajectories[:,0],
oscillating_protein_trajectories[:,1], label = 'Protein example', color = 'black', ls = '--',
lw = 0.5, dashes = [1,1] )
plt.plot( oscillating_langevin_mRNA_trajectories[:,0],
oscillating_langevin_mRNA_trajectories[:,1]*10., label = 'mRNA example*10', color = 'green',
lw = 0.5 )
plt.plot( oscillating_langevin_protein_trajectories[:,0],
oscillating_langevin_protein_trajectories[:,1], label = 'Protein example', color = 'green', ls = '--',
lw = 0.5, dashes = [1,1] )
plt.xlim(0,1500)
plt.gca().locator_params(axis='x', tight = True, nbins=4)
plt.xlabel('Time')
plt.ylabel('Copy number')
plt.title('Galla (2009)')
# plt.legend()
my_figure.add_subplot(122)
for trajectory in oscillating_protein_trajectories[:,1:].transpose():
compound_trajectory = np.vstack((oscillating_protein_trajectories[:,0],trajectory)).transpose()
this_power_spectrum,_,_ = hes5.calculate_power_spectrum_of_trajectory(compound_trajectory)
plt.plot(this_power_spectrum[:,0],this_power_spectrum[:,1], color = 'black', alpha = 0.01)
plt.plot(oscillating_power_spectrum[:,0],
oscillating_power_spectrum[:,1], color = 'black')
plt.plot(oscillating_langevin_power_spectrum[:,0],
oscillating_langevin_power_spectrum[:,1], color = 'green')
plt.plot(theoretical_power_spectrum[:,0],
theoretical_power_spectrum[:,1], color = 'blue')
plt.xlim(0.005,0.01)
# plt.ylim(0,100)
plt.gca().locator_params(axis='x', tight = True, nbins=4)
plt.xlabel('Frequency')
plt.ylabel('Occurence')
# import pdb; pdb.set_trace()
plt.text(0.05, 0.95, 'Coherence:\n' + "{:.2f}".format(oscillating_coherence) +
'\nPeriod:\n' + "{:.2f}".format(oscillating_period) ,
verticalalignment='top', horizontalalignment='left',
transform=plt.gca().transAxes)
plt.tight_layout()
plt.savefig(os.path.join(os.path.dirname(__file__),
'output','LNA_test.pdf'))
def xest_mean_power_spectra(self):
##
# Hes1 samples
##
number_of_traces = 100
repetition_number = 10
oscillating_mRNA_trajectories, oscillating_protein_trajectories = hes5.generate_multiple_trajectories( number_of_trajectories = number_of_traces,
duration = 1500*repetition_number,
repression_threshold = 100,
mRNA_degradation_rate = 0.03,
protein_degradation_rate = 0.03,
transcription_delay = 18.5,
initial_mRNA = 3,
initial_protein = 100,
equilibration_time = 1000,
synchronize = False )
oscillating_langevin_mRNA_trajectories, oscillating_langevin_protein_trajectories = hes5.generate_multiple_langevin_trajectories( number_of_trajectories = number_of_traces,
duration = 1500*repetition_number,
repression_threshold = 100,
mRNA_degradation_rate = 0.03,
protein_degradation_rate = 0.03,
transcription_delay = 18.5,
initial_mRNA = 3,
initial_protein = 100,
equilibration_time = 1000)
oscillating_power_spectrum, oscillating_coherence, oscillating_period = hes5.calculate_power_spectrum_of_trajectories(oscillating_protein_trajectories)
oscillating_langevin_power_spectrum, oscillating_langevin_coherence, oscillating_langevin_period = hes5.calculate_power_spectrum_of_trajectories(oscillating_langevin_protein_trajectories)
theoretical_power_spectrum = hes5.calculate_theoretical_power_spectrum_at_parameter_point(
basal_transcription_rate = 1.0,
translation_rate = 1.0,
repression_threshold = 100.0,
transcription_delay = 18.5,
mRNA_degradation_rate = 0.03,
protein_degradation_rate = 0.03
)
mean_oscillating_protein_trajectory = np.mean(oscillating_protein_trajectories[:,1:], axis = 1)
mean_oscillating_mRNA_trajectory = np.mean(oscillating_mRNA_trajectories[:,1:], axis = 1)
mean_oscillating_langevin_protein_trajectory = np.mean(oscillating_langevin_protein_trajectories[:,1:], axis = 1)
mean_oscillating_langevin_mRNA_trajectory = np.mean(oscillating_langevin_mRNA_trajectories[:,1:], axis = 1)
figuresize = (6,6)
my_figure = plt.figure(figsize = figuresize)
my_figure.add_subplot(321)
plt.plot( oscillating_mRNA_trajectories[:,0],
oscillating_mRNA_trajectories[:,1]*10., label = 'mRNA example*10', color = 'black',
lw = 0.5 )
plt.plot( oscillating_protein_trajectories[:,0],
oscillating_protein_trajectories[:,1], label = 'Protein example', color = 'black', ls = '--',
lw = 0.5, dashes = [1,1] )
plt.plot( oscillating_langevin_mRNA_trajectories[:,0],
oscillating_langevin_mRNA_trajectories[:,1]*10., label = 'mRNA example*10', color = 'green',
lw = 0.5 )
plt.plot( oscillating_langevin_protein_trajectories[:,0],
oscillating_langevin_protein_trajectories[:,1], label = 'Protein example', color = 'green', ls = '--',
lw = 0.5, dashes = [1,1] )
plt.plot( oscillating_mRNA_trajectories[:,0],
mean_oscillating_mRNA_trajectory*10, label = 'Mean mRNA*10', color = 'blue',
lw = 0.5 )
plt.plot( oscillating_protein_trajectories[:,0],
mean_oscillating_protein_trajectory, label = 'Mean protein', color = 'blue', ls = '--',
lw = 0.5, dashes = [1,1] )
plt.xlim(0,1500)
plt.gca().locator_params(axis='x', tight = True, nbins=4)
plt.xlabel('Time')
plt.ylabel('Copy number')
plt.title('Monk (2003)')
# plt.legend()
my_figure.add_subplot(322)
for trajectory in oscillating_protein_trajectories[:,1:].transpose():
compound_trajectory = np.vstack((oscillating_protein_trajectories[:,0],trajectory)).transpose()
this_power_spectrum,_,_ = hes5.calculate_power_spectrum_of_trajectory(compound_trajectory)
plt.plot(this_power_spectrum[:,0],this_power_spectrum[:,1], color = 'black', alpha = 0.01)
plt.plot(oscillating_power_spectrum[:,0],
oscillating_power_spectrum[:,1], color = 'black')
plt.plot(oscillating_langevin_power_spectrum[:,0],
oscillating_langevin_power_spectrum[:,1], color = 'green')
plt.plot(theoretical_power_spectrum[:,0],
theoretical_power_spectrum[:,1], color = 'blue')
plt.xlim(0,0.01)
# plt.ylim(0,100)
plt.gca().locator_params(axis='x', tight = True, nbins=4)
plt.xlabel('Frequency')
plt.ylabel('Occurence')
# import pdb; pdb.set_trace()
plt.text(0.05, 0.95, 'Coherence:\n' + "{:.2f}".format(oscillating_coherence) +
'\nPeriod:\n' + "{:.2f}".format(oscillating_period) ,
verticalalignment='top', horizontalalignment='left',
transform=plt.gca().transAxes)
##
# Hes5 samples
##
hes5_mRNA_trajectories, hes5_protein_trajectories = hes5.generate_multiple_trajectories( number_of_trajectories = number_of_traces,
duration = 1500*repetition_number,
repression_threshold = 31400,
mRNA_degradation_rate = np.log(2)/30,
protein_degradation_rate = np.log(2)/90,
translation_rate = 29,
basal_transcription_rate = 11,
transcription_delay = 29,
initial_mRNA = 3,
initial_protein = 31400,
equilibration_time = 1000,
synchronize = False)
hes5_langevin_mRNA_trajectories, hes5_langevin_protein_trajectories = hes5.generate_multiple_langevin_trajectories( number_of_trajectories = number_of_traces,
duration = 1500*repetition_number,
repression_threshold = 31400,
mRNA_degradation_rate = np.log(2)/30,
protein_degradation_rate = np.log(2)/90,
translation_rate = 29,
basal_transcription_rate = 11,
transcription_delay = 29,
initial_mRNA = 3,
initial_protein = 31400,
equilibration_time = 1000)
hes5_power_spectrum, hes5_coherence, hes5_period = hes5.calculate_power_spectrum_of_trajectories(hes5_protein_trajectories)
hes5_langevin_power_spectrum, hes5_langevin_coherence, hes5_langevin_period = hes5.calculate_power_spectrum_of_trajectories(hes5_langevin_protein_trajectories)
theoretical_power_spectrum_hes5 = hes5.calculate_theoretical_power_spectrum_at_parameter_point(
basal_transcription_rate = 11.0,
translation_rate = 29.0,
repression_threshold = 31400.0,
transcription_delay = 29.0,
mRNA_degradation_rate = np.log(2)/30.0,
protein_degradation_rate = np.log(2)/90
)
mean_hes5_protein_trajectory = np.mean(hes5_protein_trajectories[:,1:], axis = 1)
mean_hes5_rna_trajectory = np.mean(hes5_mRNA_trajectories[:,1:], axis = 1)
my_figure.add_subplot(323)
mrna_example, = plt.plot( hes5_mRNA_trajectories[:,0],
hes5_mRNA_trajectories[:,1]*1000., label = 'mRNA example*1000', color = 'black',
lw = 0.5 )
protein_example, = plt.plot( hes5_protein_trajectories[:,0],
hes5_protein_trajectories[:,1], label = 'Protein example', color = 'black', ls = '--',
lw = 0.5, dashes = [1,1] )
plt.plot(theoretical_power_spectrum[:,0],
theoretical_power_spectrum[:,1], color = 'blue')
plt.plot( hes5_langevin_mRNA_trajectories[:,0],
hes5_langevin_mRNA_trajectories[:,1]*1000., label = 'mRNA example*1000', color = 'green',
lw = 0.5 )
plt.plot( hes5_langevin_protein_trajectories[:,0],
hes5_langevin_protein_trajectories[:,1], label = 'Protein example', color = 'green', ls = '--',
lw = 0.5, dashes = [1,1] )
mean_rna, = plt.plot( hes5_mRNA_trajectories[:,0],
mean_hes5_rna_trajectory*1000., label = 'Mean mRNA*10', color = 'blue',
lw = 0.5 )
mean_protein, = plt.plot( hes5_protein_trajectories[:,0],
mean_hes5_protein_trajectory, label = 'Mean protein', color = 'blue', ls = '--',
lw = 0.5, dashes = [1,1] )
plt.xlim(0,1500)
plt.gca().locator_params(axis='x', tight = True, nbins=4)
plt.xlabel('Time')
plt.ylabel('Copy number')
plt.title('Hes5')
# plt.legend()
my_figure.add_subplot(324)
for trajectory in hes5_protein_trajectories[:,1:].transpose():
compound_trajectory = np.vstack((hes5_protein_trajectories[:,0],trajectory)).transpose()
this_power_spectrum,_,_ = hes5.calculate_power_spectrum_of_trajectory(compound_trajectory)
plt.plot(this_power_spectrum[:,0],this_power_spectrum[:,1], color = 'black', alpha = 0.01)
plt.plot(hes5_power_spectrum[:,0],
hes5_power_spectrum[:,1], color = 'black')
plt.plot(hes5_langevin_power_spectrum[:,0],
hes5_langevin_power_spectrum[:,1], color = 'green')
plt.plot(theoretical_power_spectrum_hes5[:,0],
theoretical_power_spectrum_hes5[:,1], color = 'blue')
plt.xlim(0,0.01)
# plt.ylim(0,100)
plt.xlabel('Frequency')
plt.ylabel('Occurence')
plt.gca().locator_params(axis='x', tight = True, nbins=4)
# import pdb; pdb.set_trace()
plt.text(0.95, 0.95, 'Coherence:\n' + "{:.2f}".format(hes5_coherence) +
'\nPeriod:\n' + "{:.2f}".format(hes5_period) ,
verticalalignment='top', horizontalalignment='right',
transform=plt.gca().transAxes)
##
# Random samples
##
# generate the random samples:
random_trajectories = np.zeros((100*repetition_number,number_of_traces+1))
times_of_trajectories = np.linspace(0,1500*repetition_number,100*repetition_number)
random_trajectories[:,0] = times_of_trajectories
# for trajectory_index in range(100):
# for time_index in range(1,100):
for trajectory_index in range(1,number_of_traces+1):
for time_index in range(1,len(times_of_trajectories)):
random_trajectories[time_index, trajectory_index] = random_trajectories[time_index-1, trajectory_index]\
+ np.random.randn()*1.0
random_trajectories[:,1:] += 100
# generate power spectrum, measure period etc
random_power_spectrum, random_coherence, random_period = \
hes5.calculate_power_spectrum_of_trajectories(random_trajectories)
mean_random_trajectory = np.mean(random_trajectories[:,1:], axis = 1)
my_figure.add_subplot(325)
plt.plot( random_trajectories[:,0],
random_trajectories[:,1], label = 'Protein example', color = 'black', ls = '--',
lw = 0.5, dashes = [1,1] )
plt.plot( random_trajectories[:,0],
mean_random_trajectory, label = 'Mean protein', color = 'blue', ls = '--',
lw = 0.5, dashes = [1,1] )
plt.xlim(0,1500)
plt.gca().locator_params(axis='x', tight = True, nbins=4)
plt.xlabel('Time')
plt.ylabel('Copy number')
plt.title('Random traces')
# plt.legend()
my_figure.add_subplot(326)
for trajectory in random_trajectories[:,1:].transpose():
compound_trajectory = np.vstack((random_trajectories[:,0],trajectory)).transpose()
this_power_spectrum,_,_ = hes5.calculate_power_spectrum_of_trajectory(compound_trajectory)
plt.plot(this_power_spectrum[:,0],this_power_spectrum[:,1], color = 'black', alpha = 0.01)
plt.plot(random_power_spectrum[:,0],
random_power_spectrum[:,1], color = 'black')
plt.xlim(0,0.01)
# plt.ylim(0,100)
plt.xlabel('Frequency')
plt.ylabel('Occurence')
# import pdb; pdb.set_trace()
plt.text(0.95, 0.95, 'Coherence: ' + "{:.2f}".format(random_coherence) +
'\nPeriod: ' + "{:.2f}".format(random_period) ,
verticalalignment='top', horizontalalignment='right',
transform=plt.gca().transAxes)
plt.tight_layout()
my_figure.legend((mrna_example, protein_example),
('mRNA example (scaled)', 'Protein example'),
loc = 'upper right', ncol = 2 )
plt.gca().locator_params(axis='x', tight = True, nbins=4)
plt.subplots_adjust(top = 0.85, hspace = 0.7)
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','mean_fourier_spectra_illustration.pdf'))
def xest_power_spectra_of_mean_behaviours(self):
##
# Hes1 samples
##
repetition_number = 1
oscillating_mRNA_trajectories, oscillating_protein_trajectories = hes5.generate_multiple_trajectories( number_of_trajectories = 1,
duration = 1500*repetition_number,
repression_threshold = 100,
mRNA_degradation_rate = 0.03,
protein_degradation_rate = 0.03,
transcription_delay = 18.5,
initial_mRNA = 3,
initial_protein = 100,
equilibration_time = 1000,
synchronize = False )
oscillating_power_spectrum, oscillating_coherence, oscillating_period = hes5.calculate_power_spectrum_of_trajectories(oscillating_protein_trajectories,
method = 'mean')
mean_oscillating_protein_trajectory = np.mean(oscillating_protein_trajectories[:,1:], axis = 1)
mean_oscillating_mRNA_trajectory = np.mean(oscillating_mRNA_trajectories[:,1:], axis = 1)
figuresize = (6,6)
my_figure = plt.figure(figsize = figuresize)
my_figure.add_subplot(321)
plt.plot( oscillating_mRNA_trajectories[:,0],
oscillating_mRNA_trajectories[:,1]*10., label = 'mRNA example*10', color = 'black',
lw = 0.5 )
plt.plot( oscillating_protein_trajectories[:,0],
oscillating_protein_trajectories[:,1], label = 'Protein example', color = 'black', ls = '--',
lw = 0.5, dashes = [1,1] )
plt.plot( oscillating_mRNA_trajectories[:,0],
mean_oscillating_mRNA_trajectory*10, label = 'Mean mRNA*10', color = 'blue',
lw = 0.5 )
plt.plot( oscillating_protein_trajectories[:,0],
mean_oscillating_protein_trajectory, label = 'Mean protein', color = 'blue', ls = '--',
lw = 0.5, dashes = [1,1] )
plt.gca().locator_params(axis='x', tight = True, nbins=4)
plt.xlabel('Time')
plt.ylabel('Copy number')
plt.title('Monk (2003)')
# plt.legend()
my_figure.add_subplot(322)
plt.plot(oscillating_power_spectrum[:,0],
oscillating_power_spectrum[:,1], color = 'black')
plt.xlim(0,0.01)
# plt.ylim(0,100)
plt.gca().locator_params(axis='x', tight = True, nbins=4)
plt.xlabel('Frequency')
plt.ylabel('Occurence')
# import pdb; pdb.set_trace()
plt.text(0.05, 0.95, 'Coherence:\n' + "{:.2f}".format(oscillating_coherence) +
'\nPeriod:\n' + "{:.2f}".format(oscillating_period) ,
verticalalignment='top', horizontalalignment='left',
transform=plt.gca().transAxes)
##
# Hes5 samples
##
hes5_mRNA_trajectories, hes5_protein_trajectories = hes5.generate_multiple_trajectories( number_of_trajectories = 1,
duration = 1500*repetition_number,
repression_threshold = 31400,
mRNA_degradation_rate = np.log(2)/30,
protein_degradation_rate = np.log(2)/90,
translation_rate = 29,
basal_transcription_rate = 11,
transcription_delay = 29,
initial_mRNA = 3,
initial_protein = 31400,
equilibration_time = 1000,
synchronize = False)
hes5_power_spectrum, hes5_coherence, hes5_period = hes5.calculate_power_spectrum_of_trajectories(hes5_protein_trajectories,
method = 'mean')
mean_hes5_protein_trajectory = np.mean(hes5_protein_trajectories[:,1:], axis = 1)
mean_hes5_rna_trajectory = np.mean(hes5_mRNA_trajectories[:,1:], axis = 1)
my_figure.add_subplot(323)
mrna_example, = plt.plot( hes5_mRNA_trajectories[:,0],
hes5_mRNA_trajectories[:,1]*1000., label = 'mRNA example*1000', color = 'black',
lw = 0.5 )
protein_example, = plt.plot( hes5_protein_trajectories[:,0],
hes5_protein_trajectories[:,1], label = 'Protein example', color = 'black', ls = '--',
lw = 0.5, dashes = [1,1] )
mean_rna, = plt.plot( hes5_mRNA_trajectories[:,0],
mean_hes5_rna_trajectory*1000., label = 'Mean mRNA*10', color = 'blue',
lw = 0.5 )
mean_protein, = plt.plot( hes5_protein_trajectories[:,0],
mean_hes5_protein_trajectory, label = 'Mean protein', color = 'blue', ls = '--',
lw = 0.5, dashes = [1,1] )
plt.gca().locator_params(axis='x', tight = True, nbins=4)
plt.xlabel('Time')
plt.ylabel('Copy number')
plt.title('Hes5')
# plt.legend()
my_figure.add_subplot(324)
plt.plot(hes5_power_spectrum[:,0],
hes5_power_spectrum[:,1], color = 'black')
plt.xlim(0,0.01)
# plt.ylim(0,100)
plt.xlabel('Frequency')
plt.ylabel('Occurence')
plt.gca().locator_params(axis='x', tight = True, nbins=4)
# import pdb; pdb.set_trace()
plt.text(0.95, 0.95, 'Coherence:\n' + "{:.2f}".format(hes5_coherence) +
'\nPeriod:\n' + "{:.2f}".format(hes5_period) ,
verticalalignment='top', horizontalalignment='right',
transform=plt.gca().transAxes)
##
# Random samples
##
# generate the random samples:
random_trajectories = np.zeros((100,2))
times_of_trajectories = np.linspace(0,1500*repetition_number,100*repetition_number)
random_trajectories[:,0] = times_of_trajectories
for trajectory_index in range(100):
for time_index in range(1,100):
random_trajectories[time_index, trajectory_index+1] = random_trajectories[time_index-1, trajectory_index+1]\
+ np.random.randn()*1.0
random_trajectories[:,1:] += 100
# generate power spectrum, measure period etc
random_power_spectrum, random_coherence, random_period = \
hes5.calculate_power_spectrum_of_trajectories(random_trajectories, method = 'mean')
mean_random_trajectory = np.mean(random_trajectories[:,1:], axis = 1)
my_figure.add_subplot(325)
plt.plot( random_trajectories[:,0],
random_trajectories[:,1], label = 'Protein example', color = 'black', ls = '--',
lw = 0.5, dashes = [1,1] )
plt.plot( random_trajectories[:,0],
mean_random_trajectory, label = 'Mean protein', color = 'blue', ls = '--',
lw = 0.5, dashes = [1,1] )
plt.gca().locator_params(axis='x', tight = True, nbins=4)
plt.xlabel('Time')
plt.ylabel('Copy number')
plt.title('Random traces')
# plt.legend()
my_figure.add_subplot(326)
plt.plot(random_power_spectrum[:,0],
random_power_spectrum[:,1], color = 'black')
plt.xlim(0,0.01)
# plt.ylim(0,100)
plt.xlabel('Frequency')
plt.ylabel('Occurence')
# import pdb; pdb.set_trace()
plt.text(0.95, 0.95, 'Coherence: ' + "{:.2f}".format(random_coherence) +
'\nPeriod: ' + "{:.2f}".format(random_period) ,
verticalalignment='top', horizontalalignment='right',
transform=plt.gca().transAxes)
plt.tight_layout()
my_figure.legend((mrna_example, protein_example, mean_rna, mean_protein),
('mRNA example (scaled)', 'Protein example',
'mean mRNA (scaled)', 'Mean protein'),
loc = 'upper right', ncol = 2 )
plt.gca().locator_params(axis='x', tight = True, nbins=4)
plt.subplots_adjust(top = 0.85, hspace = 0.7)
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','fourier_spectra_illustration.pdf'))
def xest_power_spectra_of_mean_behaviours_synchronised(self):
##
# Hes1 samples
##
oscillating_mRNA_trajectories, oscillating_protein_trajectories = hes5.generate_multiple_trajectories( number_of_trajectories = 100,
duration = 1500,
repression_threshold = 100,
mRNA_degradation_rate = 0.03,
protein_degradation_rate = 0.03,
transcription_delay = 18.5,
initial_mRNA = 3,
initial_protein = 100,
equilibration_time = 1000 )
oscillating_power_spectrum, oscillating_coherence, oscillating_period = hes5.calculate_power_spectrum_of_trajectories(oscillating_protein_trajectories)
mean_oscillating_protein_trajectory = np.mean(oscillating_protein_trajectories[:,1:], axis = 1)
mean_oscillating_mRNA_trajectory = np.mean(oscillating_mRNA_trajectories[:,1:], axis = 1)
figuresize = (6,6)
my_figure = plt.figure(figsize = figuresize)
my_figure.add_subplot(321)
plt.plot( oscillating_mRNA_trajectories[:,0],
oscillating_mRNA_trajectories[:,1]*10., label = 'mRNA example*10', color = 'black',
lw = 0.5 )
plt.plot( oscillating_protein_trajectories[:,0],
oscillating_protein_trajectories[:,1], label = 'Protein example', color = 'black', ls = '--',
lw = 0.5, dashes = [1,1] )
plt.plot( oscillating_mRNA_trajectories[:,0],
mean_oscillating_mRNA_trajectory*10, label = 'Mean mRNA*10', color = 'blue',
lw = 0.5 )
plt.plot( oscillating_protein_trajectories[:,0],
mean_oscillating_protein_trajectory, label = 'Mean protein', color = 'blue', ls = '--',
lw = 0.5, dashes = [1,1] )
plt.gca().locator_params(axis='x', tight = True, nbins=4)
plt.xlabel('Time')
plt.ylabel('Copy number')
plt.title('Monk (2003)')
# plt.legend()
my_figure.add_subplot(322)
plt.plot(oscillating_power_spectrum[:,0],
oscillating_power_spectrum[:,1], color = 'black')
plt.xlim(0,0.01)
# plt.ylim(0,100)
plt.gca().locator_params(axis='x', tight = True, nbins=4)
plt.xlabel('Frequency')
plt.ylabel('Occurence')
# import pdb; pdb.set_trace()
plt.text(0.05, 0.95, 'Coherence:\n' + "{:.2f}".format(oscillating_coherence) +
'\nPeriod:\n' + "{:.2f}".format(oscillating_period) ,
verticalalignment='top', horizontalalignment='left',
transform=plt.gca().transAxes)
##
# Hes5 samples
##
hes5_mRNA_trajectories, hes5_protein_trajectories = hes5.generate_multiple_trajectories( number_of_trajectories = 100,
duration = 1500,
repression_threshold = 31400,
mRNA_degradation_rate = np.log(2)/30,
protein_degradation_rate = np.log(2)/90,
translation_rate = 29,
basal_transcription_rate = 11,
transcription_delay = 29,
initial_mRNA = 3,
initial_protein = 31400,
equilibration_time = 1000)
hes5_power_spectrum, hes5_coherence, hes5_period = hes5.calculate_power_spectrum_of_trajectories(hes5_protein_trajectories)
mean_hes5_protein_trajectory = np.mean(hes5_protein_trajectories[:,1:], axis = 1)
mean_hes5_rna_trajectory = np.mean(hes5_mRNA_trajectories[:,1:], axis = 1)
my_figure.add_subplot(323)
mrna_example, = plt.plot( hes5_mRNA_trajectories[:,0],
hes5_mRNA_trajectories[:,1]*1000., label = 'mRNA example*1000', color = 'black',
lw = 0.5 )
protein_example, = plt.plot( hes5_protein_trajectories[:,0],
hes5_protein_trajectories[:,1], label = 'Protein example', color = 'black', ls = '--',
lw = 0.5, dashes = [1,1] )
mean_rna, = plt.plot( hes5_mRNA_trajectories[:,0],
mean_hes5_rna_trajectory*1000., label = 'Mean mRNA*10', color = 'blue',
lw = 0.5 )
mean_protein, = plt.plot( hes5_protein_trajectories[:,0],
mean_hes5_protein_trajectory, label = 'Mean protein', color = 'blue', ls = '--',
lw = 0.5, dashes = [1,1] )
plt.gca().locator_params(axis='x', tight = True, nbins=4)
plt.xlabel('Time')
plt.ylabel('Copy number')
plt.title('Hes5')
# plt.legend()
my_figure.add_subplot(324)
plt.plot(hes5_power_spectrum[:,0],
hes5_power_spectrum[:,1], color = 'black')
plt.xlim(0,0.01)
# plt.ylim(0,100)
plt.xlabel('Frequency')
plt.ylabel('Occurence')
plt.gca().locator_params(axis='x', tight = True, nbins=4)
# import pdb; pdb.set_trace()
plt.text(0.95, 0.95, 'Coherence:\n' + "{:.2f}".format(hes5_coherence) +
'\nPeriod:\n' + "{:.2f}".format(hes5_period) ,
verticalalignment='top', horizontalalignment='right',
transform=plt.gca().transAxes)
##
# Random samples
##
# generate the random samples:
random_trajectories = np.zeros((100,101))
times_of_trajectories = np.linspace(0,1500,100)
random_trajectories[:,0] = times_of_trajectories
for trajectory_index in range(100):
for time_index in range(1,100):
random_trajectories[time_index, trajectory_index+1] = random_trajectories[time_index-1, trajectory_index+1]\
+ np.random.randn()*1.0
random_trajectories[:,1:] += 100
# generate power spectrum, measure period etc
random_power_spectrum, random_coherence, random_period = \
hes5.calculate_power_spectrum_of_trajectories(random_trajectories)
mean_random_trajectory = np.mean(random_trajectories[:,1:], axis = 1)
my_figure.add_subplot(325)
plt.plot( random_trajectories[:,0],
random_trajectories[:,1], label = 'Protein example', color = 'black', ls = '--',
lw = 0.5, dashes = [1,1] )
plt.plot( random_trajectories[:,0],
mean_random_trajectory, label = 'Mean protein', color = 'blue', ls = '--',
lw = 0.5, dashes = [1,1] )
plt.gca().locator_params(axis='x', tight = True, nbins=4)
plt.xlabel('Time')
plt.ylabel('Copy number')
plt.title('Random traces')
# plt.legend()
my_figure.add_subplot(326)
plt.plot(random_power_spectrum[:,0],
random_power_spectrum[:,1], color = 'black')
plt.xlim(0,0.01)
# plt.ylim(0,100)
plt.xlabel('Frequency')
plt.ylabel('Occurence')
# import pdb; pdb.set_trace()
plt.text(0.95, 0.95, 'Coherence: ' + "{:.2f}".format(random_coherence) +
'\nPeriod: ' + "{:.2f}".format(random_period) ,
verticalalignment='top', horizontalalignment='right',
transform=plt.gca().transAxes)
plt.tight_layout()
my_figure.legend((mrna_example, protein_example, mean_rna, mean_protein),
('mRNA example (scaled)', 'Protein example',
'mean mRNA (scaled)', 'Mean protein'),
loc = 'upper right', ncol = 2 )
plt.gca().locator_params(axis='x', tight = True, nbins=4)
plt.subplots_adjust(top = 0.85, hspace = 0.7)
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','fourier_spectra_illustration_synchronised.pdf'))
def xest_plot_100_hes_trajectories(self):
saving_path = os.path.join(os.path.dirname(__file__), 'data','sampling_results_logarithmic')
model_results = np.load(saving_path + '.npy' )
prior_samples = np.load(saving_path + '_parameters.npy')
# sns.set()
accepted_indices = np.where(np.logical_and(model_results[:,0]>55000, #cell number
np.logical_and(model_results[:,0]<65000, #cell_number
np.logical_and(model_results[:,1]<0.15, #standard deviation
model_results[:,1]>0.05))))
my_posterior_samples = prior_samples[accepted_indices]
accepted_model_results = model_results[accepted_indices]
further_indices = np.where(accepted_model_results[:,3]>0.8)
my_posterior_samples = my_posterior_samples[further_indices]
this_parameter = my_posterior_samples[0]
hes5_mRNA_trajectories, hes5_protein_trajectories = hes5.generate_multiple_langevin_trajectories( number_of_trajectories = 10,
duration = 2000,
repression_threshold = this_parameter[2],
mRNA_degradation_rate = np.log(2)/30,
protein_degradation_rate = np.log(2)/90,
translation_rate = this_parameter[3],
basal_transcription_rate = this_parameter[0],
transcription_delay = this_parameter[1],
initial_mRNA = 3,
initial_protein = this_parameter[2],
hill_coefficient = this_parameter[4])
# equilibration_time = 1000,
# synchronize = True)
#
# hes5_mRNA_trajectories, hes5_protein_trajectories = hes5.generate_multiple_trajectories(
# number_of_trajectories = 10,
# duration = 2000,
# repression_threshold = 31400,
# mRNA_degradation_rate = np.log(2)/30,
# protein_degradation_rate = np.log(2)/90,
# translation_rate = 29,
# basal_transcription_rate = 11,
# transcription_delay = 29,
# initial_mRNA = 3,
# initial_protein = 31400)
#
deterministic_trajectory = hes5.generate_deterministic_trajectory(duration = 2000,
repression_threshold = this_parameter[2],
mRNA_degradation_rate = np.log(2)/30,
protein_degradation_rate = np.log(2)/90,
translation_rate = this_parameter[3],
basal_transcription_rate = this_parameter[0],
transcription_delay = this_parameter[1],
initial_mRNA = 3,
initial_protein = this_parameter[2],
# repression_threshold = 31400,
# mRNA_degradation_rate = np.log(2)/30,
# protein_degradation_rate = np.log(2)/90,
# translation_rate = 29,
# basal_transcription_rate = 11,
# transcription_delay = 29,
# initial_mRNA = 3,
# initial_protein = 31400,
hill_coefficient = this_parameter[4],
for_negative_times = 'no_negative')
# deterministic_trajectory = deterministic_trajectory[deterministic_trajectory[:,0]>1000]
# deterministic_trajectory[:,0] -= 1000
mean_hes5_protein_trajectory = np.mean(hes5_protein_trajectories[:,1:], axis = 1)
mean_hes5_rna_trajectory = np.mean(hes5_mRNA_trajectories[:,1:], axis = 1)
figuresize = (4,2.5)
my_figure = plt.figure(figsize = figuresize)
for trajectory_index in range(1,11):
# plt.plot( hes5_mRNA_trajectories[:,0],
# hes5_mRNA_trajectories[:,trajectory_index]*1000., color = 'black',
# lw = 0.5, alpha = 0.1 )
plt.plot( hes5_protein_trajectories[:,0],
hes5_protein_trajectories[:,trajectory_index]/10000, color = 'black',
lw = 0.5, alpha = 0.2 )
# plt.plot( hes5_mRNA_trajectories[:,0],
# mean_hes5_rna_trajectory*1000., label = 'mRNA*1000', color = 'blue',
# lw = 0.5 )
plt.plot( deterministic_trajectory[:,0], deterministic_trajectory[:,2]/10000,
lw = 0.5 )
# plt.plot( hes5_protein_trajectories[:,0],
# mean_hes5_protein_trajectory, label = 'Protein', color = 'blue', ls = '--',
# lw = 0.5, dashes = [1,1] )
plt.xlabel('Time [min]')
plt.ylabel('Hes5 expression/1e4')
plt.legend(bbox_to_anchor=(1.05, 1.1), loc = 'upper right')
plt.tight_layout()
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','100_trajectories.pdf'))
def xest_plot_100_hes_trajectories(self):
hes5_mRNA_trajectories, hes5_protein_trajectories = hes5.generate_multiple_trajectories( number_of_trajectories = 50,
duration = 1500,
repression_threshold = 31400,
mRNA_degradation_rate = np.log(2)/30,
protein_degradation_rate = np.log(2)/90,
translation_rate = 29,
basal_transcription_rate = 11,
transcription_delay = 29,
initial_mRNA = 3,
initial_protein = 31400,
equilibration_time = 1000,
synchronize = False)
mean_hes5_protein_trajectory = np.mean(hes5_protein_trajectories[:,1:], axis = 1)
mean_hes5_rna_trajectory = np.mean(hes5_mRNA_trajectories[:,1:], axis = 1)
figuresize = (4,2.5)
my_figure = plt.figure(figsize = figuresize)
for trajectory_index in range(1,51):
plt.plot( hes5_mRNA_trajectories[:,0],
hes5_mRNA_trajectories[:,trajectory_index]*1000., color = 'black',
lw = 0.5, alpha = 0.1 )
plt.plot( hes5_protein_trajectories[:,0],
hes5_protein_trajectories[:,trajectory_index], color = 'black', ls = '--',
lw = 0.5, dashes = [1,1], alpha = 0.1 )
plt.plot( hes5_mRNA_trajectories[:,0],
mean_hes5_rna_trajectory*1000., label = 'mRNA*1000', color = 'blue',
lw = 0.5 )
plt.plot( hes5_protein_trajectories[:,0],
mean_hes5_protein_trajectory, label = 'Protein', color = 'blue', ls = '--',
lw = 0.5, dashes = [1,1] )
plt.xlabel('Time')
plt.ylabel('Copy number')
plt.legend(bbox_to_anchor=(1.05, 1.1), loc = 'upper right')
plt.tight_layout()
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','100_trajectories.pdf'))
def xest_plot_100_sychronised_hes_trajectories(self):
hes5_mRNA_trajectories, hes5_protein_trajectories = hes5.generate_multiple_trajectories( number_of_trajectories = 100,
duration = 1500,
repression_threshold = 31400,
mRNA_degradation_rate = np.log(2)/30,
protein_degradation_rate = np.log(2)/90,
translation_rate = 29,
basal_transcription_rate = 11,
transcription_delay = 29,
initial_mRNA = 3,
initial_protein = 31400,
equilibration_time = 1000,
synchronize = True)
mean_hes5_protein_trajectory = np.mean(hes5_protein_trajectories[:,1:], axis = 1)
mean_hes5_rna_trajectory = np.mean(hes5_mRNA_trajectories[:,1:], axis = 1)
figuresize = (4,2.5)
my_figure = plt.figure(figsize = figuresize)
for trajectory_index in range(1,101):
plt.plot( hes5_mRNA_trajectories[:,0],
hes5_mRNA_trajectories[:,trajectory_index]*1000., color = 'black',
lw = 0.5, alpha = 0.1 )
plt.plot( hes5_protein_trajectories[:,0],
hes5_protein_trajectories[:,trajectory_index], color = 'black', ls = '--',
lw = 0.5, dashes = [1,1], alpha = 0.1 )
plt.plot( hes5_mRNA_trajectories[:,0],
mean_hes5_rna_trajectory*1000., label = 'mRNA*1000', color = 'blue',
lw = 0.5 )
plt.plot( hes5_protein_trajectories[:,0],
mean_hes5_protein_trajectory, label = 'Protein', color = 'blue', ls = '--',
lw = 0.5, dashes = [1,1] )
plt.xlabel('Time')
plt.ylabel('Copy number')
plt.legend(bbox_to_anchor=(1.05, 1.1), loc = 'upper right')
plt.tight_layout()
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','100_sychronised_trajectories.pdf'))
def xest_dependence_of_summary_stats_on_number_of_samples(self):
# plot mean, standard deviation, period and coherence in dependance of sample number
number_of_sample_numbers = 10
max_sample_number = 200
results = np.zeros((number_of_sample_numbers*2, 5))
sample_numbers = np.hstack((np.linspace(1,50,number_of_sample_numbers, dtype = 'int'),
np.linspace(51,max_sample_number,number_of_sample_numbers, dtype = 'int')))
for results_index, sample_number in enumerate(sample_numbers):
print('calculating trajectories with sample number ' + str(sample_number))
these_mRNA_traces, these_protein_traces = hes5.generate_multiple_trajectories( number_of_trajectories = sample_number,
duration = 1500,
repression_threshold = 31400,
mRNA_degradation_rate = np.log(2)/30,
protein_degradation_rate = np.log(2)/90,
translation_rate = 29,
basal_transcription_rate = 11,
transcription_delay = 29,
initial_mRNA = 3,
initial_protein = 31400,
equilibration_time = 1000)
_, this_coherence, this_period = hes5.calculate_power_spectrum_of_trajectories(these_protein_traces)
this_mean = np.mean(these_protein_traces[:,1:])
this_std = np.std(these_protein_traces[:,1:])
results[results_index,0] = sample_number
results[results_index,1] = this_mean
results[results_index,2] = this_std
results[results_index,3] = this_period
results[results_index,4] = this_coherence
np.save(os.path.join(os.path.dirname(__file__),
'output','sample_number_results.npy'), results)
# results = np.load(os.path.join(os.path.dirname(__file__),
# 'output','sample_number_results.npy'))
figuresize = (6,4.5)
my_figure = plt.figure(figsize = figuresize)
my_figure.add_subplot(221)
plt.plot(results[:,0], results[:,1])
plt.xlabel('Sample number')
plt.ylabel('Mean expression')
plt.ylim(0,70000)
my_figure.add_subplot(222)
plt.plot(results[:,0], results[:,2])
plt.xlabel('Sample number')
plt.ylabel('Expression variation')
plt.ylim(0,10000)
my_figure.add_subplot(223)
plt.plot(results[:,0], results[:,3])
plt.xlabel('Sample number')
plt.ylabel('Period [min]')
plt.ylim(0,350)
my_figure.add_subplot(224)
plt.plot(results[:,0], results[:,4])
plt.xlabel('Sample number')
plt.ylabel('Coherence')
plt.ylim(0,0.5)
plt.tight_layout()
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','sample_number_dependance.pdf'))
def xest_plot_hill_function(self):
x_values = np.linspace(0,3,100)
y_values = 1.0/(1.0 + np.power( x_values,2 ))
figuresize = (6,2.5)
my_figure = plt.figure(figsize = figuresize)
my_figure.add_subplot(121)
plt.plot(x_values,y_values)
y_values_2 = 1.0/(1.0 + np.power( x_values,5 ))
plt.plot(x_values,y_values_2)
y_values_4 = 1.0/(1.0 + np.power( x_values,6 ))
plt.plot(x_values,y_values_4)
y_values_3 = 1.0/(1.0 + np.power( x_values,7 ))
plt.plot(x_values,y_values_3)
plt.xlabel('p/p_0')
plt.ylabel('Hillfunction')
my_figure.add_subplot(122)
new_y_values = x_values*(1+np.power(x_values,5))
plt.plot(x_values,new_y_values)
plt.xlabel('p/p_0')
plt.ylabel('Rootfunction')
plt.tight_layout()
plt.savefig(os.path.join(os.path.dirname(__file__),
'output','hill_function.pdf'))
def xest_calculate_mean_expression_at_parameter_point(self):
this_mean_mRNA, this_mean_protein = hes5.calculate_steady_state_of_ode(
repression_threshold = 31400,
mRNA_degradation_rate = np.log(2)/30,
protein_degradation_rate = np.log(2)/90,
translation_rate = 29,
basal_transcription_rate = 11,
)
print 'expected protein number is ' + str(this_mean_protein)
print 'expected mRNA number is ' + str(this_mean_mRNA)
self.assertGreater(this_mean_protein, 58000)
self.assertLess(this_mean_protein, 63000)
self.assertGreater(this_mean_mRNA, 0)
self.assertLess(this_mean_mRNA, 100)
def xest_stochastic_hes_trajectory_example(self):
# same plot as before for different transcription ("more_mrna") - not yet
# our preferred hes5 values
my_trajectory = hes5.generate_langevin_trajectory( duration = 2500,
repression_threshold = 23000,
mRNA_degradation_rate = np.log(2)/30,
protein_degradation_rate = np.log(2)/90,
translation_rate = 26,
basal_transcription_rate = 9,
transcription_delay = 29,
initial_mRNA = 3,
initial_protein = 23000)
self.assertGreaterEqual(np.min(my_trajectory),0.0)
figuresize = (4,2.5)
my_figure = plt.figure(figsize = figuresize)
plt.plot(my_trajectory[:,0],
my_trajectory[:,1]/10, label = 'mRNA*1000', color = 'black')
plt.plot(my_trajectory[:,0],
my_trajectory[:,2]/10000, label = 'Hes5', color = 'green', ls = '--',
dashes = [1,1])
# plt.text(0.95, 0.4, 'Mean protein number: ' + str(np.mean(my_trajectory[:,2])),
# verticalalignment='bottom', horizontalalignment='right',
# transform=plt.gca().transAxes)
plt.xlabel('Time [min]')
plt.ylabel('Copy number/1e4')
plt.legend()
plt.tight_layout()
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','hes5_langevin_trajectory.pdf'))
def xest_stochastic_hes_trajectory_with_langevin(self):
# same plot as before for different transcription ("more_mrna") - not yet
# our preferred hes5 values
my_trajectory = hes5.generate_langevin_trajectory( duration = 1500,
repression_threshold = 23000,
mRNA_degradation_rate = np.log(2)/30,
protein_degradation_rate = np.log(2)/90,
translation_rate = 26,
basal_transcription_rate = 9,
transcription_delay = 29,
initial_mRNA = 3,
initial_protein = 23000)
self.assertGreaterEqual(np.min(my_trajectory),0.0)
figuresize = (4,2.5)
my_figure = plt.figure()
plt.plot(my_trajectory[:,0],
my_trajectory[:,1]*10000, label = 'mRNA*1000', color = 'black')
plt.plot(my_trajectory[:,0],
my_trajectory[:,2], label = 'Hes protein', color = 'black', ls = '--')
plt.text(0.95, 0.4, 'Mean protein number: ' + str(np.mean(my_trajectory[:,2])),
verticalalignment='bottom', horizontalalignment='right',
transform=plt.gca().transAxes)
plt.xlabel('Time')
plt.ylabel('Copy number')
plt.legend()
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','hes5_langevin_trajectory.pdf'))
def xest_equlibrate_langevin_trajectory(self):
import time
np.random.seed(0)
start = time.clock()
my_trajectory = hes5.generate_langevin_trajectory( duration = 1500,
repression_threshold = 31400,
mRNA_degradation_rate = np.log(2)/30,
protein_degradation_rate = np.log(2)/90,
translation_rate = 29,
basal_transcription_rate = 11,
transcription_delay = 29,
initial_mRNA = 3,
initial_protein = 31400,
equilibration_time = 1000)
end = time.clock()
print 'needed ' + str(end-start) + ' seconds'
figuresize = (4,2.5)
my_figure = plt.figure()
plt.plot(my_trajectory[:,0],
my_trajectory[:,1]*1000, label = 'mRNA*1000', color = 'black')
plt.plot(my_trajectory[:,0],
my_trajectory[:,2], label = 'Hes protein', color = 'black', ls = '--')
plt.text(0.95, 0.4, 'Mean protein number: ' + str(np.mean(my_trajectory[:,2])),
verticalalignment='bottom', horizontalalignment='right',
transform=plt.gca().transAxes)
plt.xlabel('Time')
plt.ylabel('Copy number')
plt.legend()
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','hes5_langevin_trajectory_equilibrated.pdf'))
def xest_multiple_equlibrated_langevin_trajectories(self):
mRNA_trajectories, protein_trajectories = hes5.generate_multiple_langevin_trajectories( number_of_trajectories = 100,
duration = 1500,
repression_threshold = 31400,
mRNA_degradation_rate = np.log(2)/30,
protein_degradation_rate = np.log(2)/90,
translation_rate = 29,
basal_transcription_rate = 11,
transcription_delay = 29,
initial_mRNA = 3,
initial_protein = 31400,
equilibration_time = 1000)
np.save(os.path.join(os.path.dirname(__file__),
'output','protein_traces.npy'), protein_trajectories)
np.save(os.path.join(os.path.dirname(__file__),
'output','rna_traces.npy'), mRNA_trajectories)
mean_protein_trajectory = np.mean(protein_trajectories[:,1:], axis = 1)
protein_deviation = np.std(mRNA_trajectories[:,1:])
mean_mRNA_trajectory = np.mean(mRNA_trajectories[:,1:], axis = 1)
mRNA_deviation = np.std(mRNA_trajectories[:,1:])
figuresize = (4,2.75)
my_figure = plt.figure()
# want to plot: protein and mRNA for stochastic and deterministic system,
# example stochastic system
plt.plot( mRNA_trajectories[:,0],
mRNA_trajectories[:,1]*1000., label = 'mRNA example', color = 'black' )
plt.plot( protein_trajectories[:,0],
protein_trajectories[:,1], label = 'Protein example', color = 'black', ls = '--' )
plt.plot( mRNA_trajectories[:,0],
mean_mRNA_trajectory*1000, label = 'Mean mRNA*1000', color = 'blue' )
plt.plot( protein_trajectories[:,0],
mean_protein_trajectory, label = 'Mean protein*1000', color = 'blue', ls = '--' )
plt.ylabel('Copy number')
plt.legend()
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','average_hes5_langevin_behaviour.pdf'))
def xest_plot_heterozygous_model(self):
my_trajectory = hes5.generate_heterozygous_langevin_trajectory( duration = 1500,
repression_threshold = 31400,
mRNA_degradation_rate = np.log(2)/30,
protein_degradation_rate = np.log(2)/90,
translation_rate = 29,
basal_transcription_rate = 11,
transcription_delay = 29,
initial_mRNA = 3,
initial_protein = 31400,
equilibration_time = 1000)
figuresize = (4,2.5)
my_figure = plt.figure()
plt.plot(my_trajectory[:,0],
(my_trajectory[:,1] + my_trajectory[:,3])*1000, label = 'mRNA*1000', color = 'blue')
plt.plot(my_trajectory[:,0],
my_trajectory[:,2] + my_trajectory[:,4], label = 'Hes protein', color = 'blue', ls = '--')
plt.plot(my_trajectory[:,0],
my_trajectory[:,3]*1000, label = 'GFPmRNA*1000', color = 'green')
plt.plot(my_trajectory[:,0],
my_trajectory[:,4], label = 'Hes_GFP', color = 'green', ls = '--')
plt.xlabel('Time')
plt.ylabel('Copy number')
plt.legend()
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','hes5_heterozygous_trajectory_equilibrated.pdf'))
def xest_validate_heterozygous_model(self):
mRNA_trajectories_1, protein_trajectories_1, mRNA_trajectories_2, protein_trajectories_2 = hes5.generate_multiple_heterozygous_langevin_trajectories(
number_of_trajectories = 10,
duration = 720,
repression_threshold = 1000000,
mRNA_degradation_rate = 0.03,
protein_degradation_rate = 0.03,
transcription_delay = 18.5,
basal_transcription_rate = 10000.0,
translation_rate = 1.0,
initial_mRNA = 30000,
initial_protein = 1000000 )
mean_protein_trajectory = np.mean(protein_trajectories_1[:,1:] + protein_trajectories_2[:,1:], axis = 1)
mean_mRNA_trajectory = np.mean(mRNA_trajectories_1[:,1:] + mRNA_trajectories_2[:,1:], axis = 1)
deterministic_trajectory = hes5.generate_deterministic_trajectory( duration = 720,
repression_threshold = 1000000,
mRNA_degradation_rate = 0.03,
protein_degradation_rate = 0.03,
transcription_delay = 18.5,
basal_transcription_rate = 10000.0,
translation_rate = 1.0,
initial_mRNA = 30000,
initial_protein = 1000000,
for_negative_times = 'no_negative' )
figuresize = (4,2.75)
my_figure = plt.figure()
# want to plot: protein and mRNA for stochastic and deterministic system,
# example stochastic system
plt.plot( mRNA_trajectories_1[:,0],
mRNA_trajectories_1[:,1]/1000.
+ mRNA_trajectories_2[:,1]/1000., label = 'mRNA example', color = 'black' )
plt.plot( protein_trajectories_1[:,0],
protein_trajectories_1[:,1]/10000. +
protein_trajectories_2[:,1]/10000., label = 'Protein example', color = 'black', ls = '--' )
plt.plot( mRNA_trajectories_1[:,0],
mean_mRNA_trajectory/1000., label = 'Mean mRNA', color = 'blue' )
plt.plot( protein_trajectories_1[:,0],
mean_protein_trajectory/10000., label = 'Mean protein', color = 'blue', ls = '--' )
plt.plot( deterministic_trajectory[:,0],
deterministic_trajectory[:,1]/1000., label = 'Deterministic mRNA', color = 'green' )
plt.plot( deterministic_trajectory[:,0],
deterministic_trajectory[:,2]/10000., label = 'Deterministic Protein', color = 'green', ls = '--' )
plt.xlabel('Time')
plt.ylabel('Scaled expression')
plt.legend()
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','stochastic_heterozygous_model_validation.pdf'))
def xest_validate_stochastic_langevin_implementation(self):
mRNA_trajectories, protein_trajectories = hes5.generate_multiple_langevin_trajectories( number_of_trajectories = 10,
duration = 720,
repression_threshold = 100000,
mRNA_degradation_rate = 0.03,
protein_degradation_rate = 0.03,
transcription_delay = 18.5,
basal_transcription_rate = 1000.0,
translation_rate = 1.0,
initial_mRNA = 3000,
initial_protein = 100000 )
mean_protein_trajectory = np.mean(protein_trajectories[:,1:], axis = 1)
protein_deviation = np.std(mRNA_trajectories[:,1:])
mean_mRNA_trajectory = np.mean(mRNA_trajectories[:,1:], axis = 1)
mRNA_deviation = np.std(mRNA_trajectories[:,1:])
deterministic_trajectory = hes5.generate_deterministic_trajectory( duration = 720,
repression_threshold = 100000,
mRNA_degradation_rate = 0.03,
protein_degradation_rate = 0.03,
transcription_delay = 18.5,
basal_transcription_rate = 1000.0,
translation_rate = 1.0,
initial_mRNA = 3000,
initial_protein = 100000,
for_negative_times = 'no_negative' )
figuresize = (4,2.75)
my_figure = plt.figure()
# want to plot: protein and mRNA for stochastic and deterministic system,
# example stochastic system
plt.plot( mRNA_trajectories[:,0],
mRNA_trajectories[:,1]/1000., label = 'mRNA example', color = 'black' )
plt.plot( protein_trajectories[:,0],
protein_trajectories[:,1]/10000., label = 'Protein example', color = 'black', ls = '--' )
plt.plot( mRNA_trajectories[:,0],
mean_mRNA_trajectory/1000., label = 'Mean mRNA', color = 'blue' )
plt.plot( protein_trajectories[:,0],
mean_protein_trajectory/10000., label = 'Mean protein', color = 'blue', ls = '--' )
plt.plot( deterministic_trajectory[:,0],
deterministic_trajectory[:,1]/1000., label = 'Deterministic mRNA', color = 'green' )
plt.plot( deterministic_trajectory[:,0],
deterministic_trajectory[:,2]/10000., label = 'Deterministic Protein', color = 'green', ls = '--' )
plt.xlabel('Time')
plt.ylabel('Scaled expression')
plt.legend()
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','stochastic_langevin_model_validation.pdf'))
def xest_plot_histogram_for_logarithmic_prior(self):
uniform_random_numbers = np.random.rand(10000)
logarithmically_distributed_numbers = np.power(100,uniform_random_numbers)
my_figure = plt.figure(figsize = (4.5,2.5))
plt.hist(logarithmically_distributed_numbers, bins=np.logspace(0,2, 20))
plt.gca().set_xscale("log")
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','logarithmic_prior.pdf'))
def xest_vary_repression_threshold(self):
saving_path = os.path.join(os.path.dirname(__file__), 'output','sampling_results_hill_low_transcription')
model_results = np.load(saving_path + '.npy' )
prior_samples = np.load(saving_path + '_parameters.npy')
accepted_indices = np.where(np.logical_and(model_results[:,0]>55000, #cell number
np.logical_and(model_results[:,0]<65000, #cell_number
np.logical_and(model_results[:,1]<0.15, #standard deviation
model_results[:,1]>0.05)))) #standard deviation
my_posterior_samples = prior_samples[accepted_indices]
accepted_model_results = model_results[accepted_indices]
my_parameter_sweep_results = np.load(os.path.join(os.path.dirname(__file__),
'output',
'hill_relative_sweeps_low_transcription' +
'repression_threshold.npy'))
increase_indices = np.where(np.logical_and(my_parameter_sweep_results[:,9,4] <
my_parameter_sweep_results[:,4 ,4],
# my_parameter_sweep_results[:,reference_indices[parameter_name] -1,4] > 0.2))
np.logical_and(my_parameter_sweep_results[:,4,4] >
my_parameter_sweep_results[:,9,4]*8,
np.logical_and(my_parameter_sweep_results[:,9,4] < 0.1,
my_parameter_sweep_results[:,4,4] > 0.2))))
my_posterior_results = accepted_model_results[increase_indices]
my_posterior_samples = my_posterior_samples[increase_indices]
my_parameter = my_posterior_samples[2]
my_trajectory = hes5.generate_stochastic_trajectory( duration = 3500,
repression_threshold = my_parameter[2],
mRNA_degradation_rate = np.log(2)/30,
protein_degradation_rate = np.log(2)/90,
translation_rate = my_parameter[1],
basal_transcription_rate = my_parameter[0],
transcription_delay = my_parameter[3],
initial_mRNA = 3,
initial_protein = my_parameter[2],
equilibration_time = 1000,
hill_coefficient = my_parameter[4],
vary_repression_threshold = False)
figuresize = (4,2.5)
my_figure = plt.figure()
plt.plot(my_trajectory[:,0],
my_trajectory[:,1]*1000, label = 'mRNA*1000', color = 'black')
plt.plot(my_trajectory[:,0],
my_trajectory[:,2], label = 'Hes protein', color = 'black', ls = '--', dashes = [1, 1])
plt.axvline(2000)
# plt.text(0.95, 0.4, 'Mean protein number: ' + str(np.mean(my_trajectory[:,2])),
# verticalalignment='bottom', horizontalalignment='right',
# transform=plt.gca().transAxes)
plt.xlabel('Time')
plt.ylabel('Copy number')
plt.legend()
my_figure.savefig(os.path.join(os.path.dirname(__file__),
'output','hes5_vary_repression_threshold.pdf'))
| 59.030668 | 195 | 0.490871 | 9,902 | 107,790 | 5.059584 | 0.041911 | 0.020399 | 0.017305 | 0.01996 | 0.902375 | 0.881697 | 0.865309 | 0.843932 | 0.831357 | 0.817405 | 0 | 0.052722 | 0.41245 | 107,790 | 1,825 | 196 | 59.063014 | 0.738347 | 0.058735 | 0 | 0.784647 | 0 | 0 | 0.057961 | 0.009568 | 0 | 0 | 0 | 0 | 0.005435 | 0 | null | null | 0 | 0.006793 | null | null | 0.003397 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
18d2ce4862500e7c0b57700f288f94975cf7ba2e | 12,410 | py | Python | SimModel_Python_API/simmodel_swig/Release/SimParameterizedProfileDef_ParameterizedProfile_Rectangle.py | EnEff-BIM/EnEffBIM-Framework | 6328d39b498dc4065a60b5cc9370b8c2a9a1cddf | [
"MIT"
] | 3 | 2016-05-30T15:12:16.000Z | 2022-03-22T08:11:13.000Z | SimModel_Python_API/simmodel_swig/Release/SimParameterizedProfileDef_ParameterizedProfile_Rectangle.py | EnEff-BIM/EnEffBIM-Framework | 6328d39b498dc4065a60b5cc9370b8c2a9a1cddf | [
"MIT"
] | 21 | 2016-06-13T11:33:45.000Z | 2017-05-23T09:46:52.000Z | SimModel_Python_API/simmodel_swig/Release/SimParameterizedProfileDef_ParameterizedProfile_Rectangle.py | EnEff-BIM/EnEffBIM-Framework | 6328d39b498dc4065a60b5cc9370b8c2a9a1cddf | [
"MIT"
] | null | null | null | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.7
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_SimParameterizedProfileDef_ParameterizedProfile_Rectangle', [dirname(__file__)])
except ImportError:
import _SimParameterizedProfileDef_ParameterizedProfile_Rectangle
return _SimParameterizedProfileDef_ParameterizedProfile_Rectangle
if fp is not None:
try:
_mod = imp.load_module('_SimParameterizedProfileDef_ParameterizedProfile_Rectangle', fp, pathname, description)
finally:
fp.close()
return _mod
_SimParameterizedProfileDef_ParameterizedProfile_Rectangle = swig_import_helper()
del swig_import_helper
else:
import _SimParameterizedProfileDef_ParameterizedProfile_Rectangle
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr_nondynamic(self, class_type, name, static=1):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
if (not static):
return object.__getattr__(self, name)
else:
raise AttributeError(name)
def _swig_getattr(self, class_type, name):
return _swig_getattr_nondynamic(self, class_type, name, 0)
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object:
pass
_newclass = 0
try:
import weakref
weakref_proxy = weakref.proxy
except:
weakref_proxy = lambda x: x
import base
class SimParameterizedProfileDef(base.SimProfileDefinition):
__swig_setmethods__ = {}
for _s in [base.SimProfileDefinition]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimParameterizedProfileDef, name, value)
__swig_getmethods__ = {}
for _s in [base.SimProfileDefinition]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimParameterizedProfileDef, name)
__repr__ = _swig_repr
def Position(self, *args):
return _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_Position(self, *args)
def __init__(self, *args):
this = _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.new_SimParameterizedProfileDef(*args)
try:
self.this.append(this)
except:
self.this = this
def _clone(self, f=0, c=None):
return _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef__clone(self, f, c)
__swig_destroy__ = _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.delete_SimParameterizedProfileDef
__del__ = lambda self: None
SimParameterizedProfileDef_swigregister = _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_swigregister
SimParameterizedProfileDef_swigregister(SimParameterizedProfileDef)
class SimParameterizedProfileDef_ParameterizedProfile(SimParameterizedProfileDef):
__swig_setmethods__ = {}
for _s in [SimParameterizedProfileDef]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimParameterizedProfileDef_ParameterizedProfile, name, value)
__swig_getmethods__ = {}
for _s in [SimParameterizedProfileDef]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimParameterizedProfileDef_ParameterizedProfile, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.new_SimParameterizedProfileDef_ParameterizedProfile(*args)
try:
self.this.append(this)
except:
self.this = this
def _clone(self, f=0, c=None):
return _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_ParameterizedProfile__clone(self, f, c)
__swig_destroy__ = _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.delete_SimParameterizedProfileDef_ParameterizedProfile
__del__ = lambda self: None
SimParameterizedProfileDef_ParameterizedProfile_swigregister = _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_ParameterizedProfile_swigregister
SimParameterizedProfileDef_ParameterizedProfile_swigregister(SimParameterizedProfileDef_ParameterizedProfile)
class SimParameterizedProfileDef_ParameterizedProfile_Rectangle(SimParameterizedProfileDef_ParameterizedProfile):
__swig_setmethods__ = {}
for _s in [SimParameterizedProfileDef_ParameterizedProfile]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimParameterizedProfileDef_ParameterizedProfile_Rectangle, name, value)
__swig_getmethods__ = {}
for _s in [SimParameterizedProfileDef_ParameterizedProfile]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimParameterizedProfileDef_ParameterizedProfile_Rectangle, name)
__repr__ = _swig_repr
def XDim(self, *args):
return _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_ParameterizedProfile_Rectangle_XDim(self, *args)
def YDim(self, *args):
return _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_ParameterizedProfile_Rectangle_YDim(self, *args)
def __init__(self, *args):
this = _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.new_SimParameterizedProfileDef_ParameterizedProfile_Rectangle(*args)
try:
self.this.append(this)
except:
self.this = this
def _clone(self, f=0, c=None):
return _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_ParameterizedProfile_Rectangle__clone(self, f, c)
__swig_destroy__ = _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.delete_SimParameterizedProfileDef_ParameterizedProfile_Rectangle
__del__ = lambda self: None
SimParameterizedProfileDef_ParameterizedProfile_Rectangle_swigregister = _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_ParameterizedProfile_Rectangle_swigregister
SimParameterizedProfileDef_ParameterizedProfile_Rectangle_swigregister(SimParameterizedProfileDef_ParameterizedProfile_Rectangle)
class SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence(base.sequence_common):
__swig_setmethods__ = {}
for _s in [base.sequence_common]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence, name, value)
__swig_getmethods__ = {}
for _s in [base.sequence_common]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.new_SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence(*args)
try:
self.this.append(this)
except:
self.this = this
def assign(self, n, x):
return _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence_assign(self, n, x)
def begin(self, *args):
return _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence_begin(self, *args)
def end(self, *args):
return _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence_end(self, *args)
def rbegin(self, *args):
return _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence_rbegin(self, *args)
def rend(self, *args):
return _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence_rend(self, *args)
def at(self, *args):
return _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence_at(self, *args)
def front(self, *args):
return _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence_front(self, *args)
def back(self, *args):
return _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence_back(self, *args)
def push_back(self, *args):
return _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence_push_back(self, *args)
def pop_back(self):
return _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence_pop_back(self)
def detach_back(self, pop=True):
return _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence_detach_back(self, pop)
def insert(self, *args):
return _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence_insert(self, *args)
def erase(self, *args):
return _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence_erase(self, *args)
def detach(self, position, r, erase=True):
return _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence_detach(self, position, r, erase)
def swap(self, x):
return _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence_swap(self, x)
__swig_destroy__ = _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.delete_SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence
__del__ = lambda self: None
SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence_swigregister = _SimParameterizedProfileDef_ParameterizedProfile_Rectangle.SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence_swigregister
SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence_swigregister(SimParameterizedProfileDef_ParameterizedProfile_Rectangle_sequence)
# This file is compatible with both classic and new-style classes.
| 48.666667 | 220 | 0.790008 | 1,109 | 12,410 | 8.259693 | 0.13165 | 0.441921 | 0.450328 | 0.229913 | 0.762773 | 0.711135 | 0.667249 | 0.605786 | 0.541812 | 0.441266 | 0 | 0.001605 | 0.146334 | 12,410 | 254 | 221 | 48.858268 | 0.863036 | 0.023691 | 0 | 0.39899 | 1 | 0 | 0.028911 | 0.009582 | 0 | 0 | 0 | 0 | 0 | 1 | 0.156566 | false | 0.010101 | 0.055556 | 0.116162 | 0.540404 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
e13c16ac51b9e02c06f79eda0885c6b54dd69736 | 15,557 | py | Python | tests/advent_2019/day8_test.py | devshawn/advent-of-code | 1cc12900898eabb94f8632984e578665116e5001 | [
"MIT"
] | 1 | 2020-12-07T06:08:54.000Z | 2020-12-07T06:08:54.000Z | tests/advent_2019/day8_test.py | devshawn/advent-of-code-2019 | 1cc12900898eabb94f8632984e578665116e5001 | [
"MIT"
] | null | null | null | tests/advent_2019/day8_test.py | devshawn/advent-of-code-2019 | 1cc12900898eabb94f8632984e578665116e5001 | [
"MIT"
] | null | null | null | from src.advent_2019 import day8
my_input = "220221222222102122222222220222222221222010222222222222221102222222222222222221022222222122202222212222222222202222222220222002202222202122222222222222221221222222112022222222222222222220222022222222222222220002222222222222222222022222222222202222202222222222222222222221222002222222222222222222222222220221222022002122222222222222222221202002222222222222220222222202222222222222022222222222202222202222222222222222222222222122222222212122222222222222220222222122012022222222220222222221222202222222222222220212222222222222222222022222222022202222222222222222212222222220222222212222222022222222222222221222212222112122222222220222222220222000222222222222221002222222222222222220122222222122202222202222222222222222222222222212212222202022222222222222222221212122212222222222220222222222212211222222222222220122222212222222222220122222222122222222212222222222222222222222222122202222202222222222222222222221202022102022222222222222222220222201222222222222220202222202222222222222022222222222212222212222222222222222222222222022222222222122222222222222221222222022112122222222220222222220212212222222222222220022222212222222222220222222222122212222202222222222222222222221222102222222212222222222222222221220202022212022222222222222222220212001222222222222222202222202222222222220022222222222202222222222222222202222222221222202212222222002222222222222222221212022012122222222220222222222222120222222222222222222222202222222222201122222222222202222222222222222222222222220222022202222222022222222222222220220222222222222222222221222222221202010222222222222221222222202222222222220222222222022212222202222222222212222222221222022212222222202222222222222221222212122012022222222221222222220202100222222222222222112222212222202222210222222222122202222222222222222222222222220222012212222212222222222222222220222212122222122222222221222222222212221222222222222222022222222222212222222022222222022212222222222222222222222222222222022202222212112222222222222221220222022002222222222222222222222212110222222222222222122222202222222222210122222222122222222202222222222202221222220222112212222202022222222222222220220202122102222222222221222222220222201222222222222022112222222222222222210222220222222202222212222222222202222222222222102202222212212222222222222222220222022212122222222221222222221212010222222222222021012222222222222222222222222222122202222212222222222222221222220222002222222212012222222222202222220222122022222222222221222222220202020222222222222022112222222222202222211222220222122202222202222222222222220222221222212212222212212222222222212222220212022122122222222220222222221212022222222222222022222222222222212222212122221222122202222212222222222212222222221222022222222222112222222222212220221212222122222222222220222222221221002222222222222222012222202222212222220122221222222202222222222222222222220222221222022222222222202221222222222222221202122222022222222221222222222202111222222222222121222222202222202222202222221222022202220202222222222212222222221222012202222222022222222222202220222202022022022222222220222222222211020222222222222022212222202222222222210122222222222212222202222202222212220222221222022222222212212221222222112220221222222122222222222220222202220211110222222222220120102222222222212222202222221222222212222212222202222202220222221222002212222202122220222222012220222222222022022222222222222212222201221222222222222020212222222222212222211222221222022222222222222222222202222222221222112202222202202221222222002222221202222202122222222221222202220211021222222222220022212222212222202222202022222222222202221222222212222202222222222222212222222222212221222222122221222212022112022222222222222202220212220222222222222221012222222222212212210022221222022202222222222222222202221222220222222202222212212220222222102202222222022102022222222222222222220200121222222222221121102222222222222222200022221222222212220202222202222202221222220222012202222222222222222222112210221202222002222222222221222222221200222222222222220020222222222222222212222122222222022212222212222222222212220222210222212212222202012220222222012220220222222022012222222221222212220211021222222222220220222222202222222222201122220222222212220222222212222202222222220222112202222222002221222222202222220202022012022222222222222102220200022222222222222222002222212222222222220022222222222222222222222212222212222222201222002222222212212220222222002211220212222202112222222221222002220202002222222222220022012222222222212202210222222222022202220202222202222202220222220222112212222222222220222222022221220202122122122222222221222102222211111222222222221020002222212222212212211022221222022222221202222222222222221222212222002202222212112221222222012222222212122202202222222211220112222220220222222222220220202222222222212222202222222222022212222212222212222212221222222222222222222202212221222222122220222212022212102222222202221102220221120222222222220021002222212222212212220022221222022222221212222202222202222222220222122212222212022222222222022211220212222002202222222202202012220220210222222222220221102222202222212212212022221222222202221212222202222222222222201222002222222212022220222202202220220222222202112222222222201112222200011222222222220220202222212222222212211022221221222202222212222222222202022222222222022212222212202222222222202222220222222122022222222220221222221200010222222222220022012222222222212202221022221220022222220222222222222202022222222222002222222222222222222202212200220222122102012202222220221202220212010222222222222121212222212222202222221022221220022112221212222202222202120222211222202222222222202222222202022120222212222222012222222222202212222221121222222222220121202222202222222222202022220222222002222202222212222222221222202222102212222202122222222222202010202222122122222022222212210102222202112222222222221020112222202222222202222022220220122002222222222212222212122222222222102212222202002221222222212211202221022222202012222222212212222221022222222222220120022222212222212222201122221221022012220212222222222202021222200222212202222202212221222202112212221200222002112222222210210112220212122222222222222021102222212222222202220222222220022022220202222202222202122222210222102202222202202200222202002102201210122122202002222220220022220211101222222222221221202222212222202202212022222221122102222202222212222202020222210222212212222202202220222202222100221202022102122212222200212222220220022222222222221021202222202222222222210022222221122002220222222222222202021222220222122222222222212210222212222020221222022012022112222110210002221211101222222222221222112222202222212212211122222220022012222222222222222222222212200222122202222202202212222222202221210210222012022002222210212212222221212222222221220121122222202222212202201222221220122012222212222202222212220222202222222212222212212202222212022120201202022012002112222101201012222220101222222221222022122222212222202212200022222221122102221202222202222212020202201222112222222222212222222202002210220220122202022002222000122022221201121222222220220220112222222222222202222122221220222202220202222202222222222202221222212202222202002211222212022001221212122112112202222020202102221200122222222220220220112222202222202212221122221222222022220222222212222222222212201222102202222212002200222202202221210221122202022102222220121202220210200222222220220222212222202222212212202022221222122022222212222112222222020202211222222202222222022210222202122020210201222012022002222202122202222200211222222220221020112222202222212222211122221222122202222202222002222202120222201222202222222212002212222202112120220211022012012202222102211002222220101222222221222222112222202222202212212022221220122012221202222012222222200212221222122211222212112202222202022202211202122012002212222000000212222210100222222221221221012222212222202222211022222221222212222212222002220212111212222222202202222222002211222212102122210210222222112122222100211122220220122222222220221120202222202222202212201221220222022122222212222102221212210202212222002201222202022201222220202111202221022022112202222121102112221210010222222222221021012222212222202202220120221220222212222222222222220202100202220222002220212202112222222210212002201201122122112222222002221222221202211222222221222221122222202222212212222221221220222222222202222102221202222212222222212220202222022202222212102222201212122222212012222202012222222210101222222222221222022222202222202222221121220221022102220202222202220202000202202222002201212202222221222202002212212210222112022112222011110102221220120222222220221122222220212222212222210020220220122222221202222012221212201222222222202210212212222222222211212202220202222112212202222101200112220211120222222220221121112220212222222202200221220220122122221212222022222002011220221222212202202202212202222200212000220202222212212002222211201112222202010222222221222220102221212222202222200120221222122012222222222102220002221221221222022220222202122202222202102111202202022102102002222121002012221222011222222221221212222221222222202222222222220221222102221222222002221002120220201222112200212212022222222222022210222222222212102102222002000112220220121222222222221210002220222222222202222221220220122222222212222222220002022202220222202210202212012200222221222022222201122210212202222111222122220201202222222220222012202220202222002212212222220221022122220202222012221122100221222222202211202202122201222222002202210201022001112222222000212102222202112222222222220201102222212222222212220122221211122002221222222012221112020212200222002221202222222210222212012101220201022102212012222012201212222222100222222221221211012220212222102202210120220200222202221202222102221102010221222222112202202222102202222222022112200222222220022112222202112212222221110222222221221010012221212222012212222022220200222122201212222012220012212211212222012221212202002221222222022101212221122100121202222011001202222211202222222222220002120221222222222212222220221211022222212212222002221102211220222222102221212202012221222200002201202211022202211122222222002112221221101222222222222120121221202222202212221121222202022112220222222112221112102211000222202212222222022202222201002122221221022020210222222121211102020210011222222221222201102222212222202212202121222211122012210212222112222202120221211222002202212222222202222201212120202222022221102112222210012222021222121222222020221211120222222222122202220221221220022022212202222212200012221021002222122220202222112212222202102000221220222212122222222022210012120210212222202021220010120222212222102222212021022200022212210212222012202212201120212222102211212222012222222212122221202221022201001122222110202002120220011222202220221120020222212222202202202021121202222012210222222022102212202122001222212201222222222222222220022122220202222102102222222002222102120200000222222221221121010220222222122222222021121220122102222212222202212022202220200222012220212222122221222210002221220220022211022202222122101112222212212222202222221020011221222222112222201121222200022112212202222112110122200122102222012220222202102220222222202102211222222121212201222000222202021212001222212021220020111222222222002202222020221200222212221222222022100122012201210222012021212212202200220210212120212200022111021000022021000122201220011222202222221222100220222222002202222220020220220222210202222212012212211020121222112102222212102221221221112221202200022111101011122201220102211220212222222122222210200220212222202222121002120222220022200212222012121022102111002222202210222222122200221222102010200210122021101111122210210202111201221222212221220101222222222222012212110100022221122102201212222122202222102110102222222001222212122200221210202022221201122001012100122211120122110210211220202220221012201222212222102212120000120202022112212202222022110002220010221222022020222212012200221201112012201211222012002102222200201222022200000220212020222102222220202222122212200011022220221012221222222202012022200121012222022100222222102220222221102010212212222212020121122121100112110201201220222021222000001220222222012202100101022202222022221002222212001112212020002222102222202222112212120201102002200220122101120102122120201212000202121221202121221220120222222222022202100220222202122202200202222122200202021001121222222211202212212222122221002001122210122012101022122011110102112200011220212221220222221221212222202202100012221212022022221102222202022212100000101222212200222212112101220221012012120200022210201201122011220102121201102220202020220200220222212222202222022201121220120212221102222022200122022021110222002222222022002212221210002222000201022100112222202021110002210202101221202121222202201222202222212202220122122220121212212012222222212202222010222222202020212222212122121211002111222202122202102221012112011012210200011220112020222011122221222222112212020110121201220012201022222012000222021021210222102001212122022100121220022002221211122100021111112202000202022200201221112120221201220221212222022212022210120222122202222112222202022222021020102222202110212202022002020202002110110220222222220021012110110012122210001221012121222220120222222212002212010111120200122102201120222012102012112220010222212002012202202112020210212020021202122221002211002002002122021200102221202020220122111222222212022202221102022212021222222111222222121102011020210222202112200012002010221221002121221201122122111110202012002022120212000222012220220202011222202212002222011112220211220202220111222102210202121210122222012010211122102110222201022002100221122002202001202121011022110201010221122020221201012220212212122222212111221212222102210222222102211112211201121022212021010202012202022220102111211202222121022121002102222002100212020220112021202201112221212222022202100110022211022212220002222212102002012120201202222020102212020021021222222222000222022020102111022101221112222201000221022120212112220220222222012222120120022211121102212211020102010112010211201022012222221122212022022220122222201221122220010221202210200012212222210220000022222122012222212212102202212020022211121022211012022002020202202100021202212111102202101110220220102111102222222121210101212222002012222210212222112021211200220222212122122222022100220210121202222200120222010222101001202022202101201012202010120220022002110211222211222112202202121022102201202222111022202010200222202022112222211112221202220022212212121012112002012211120012222200111012112002120210022222221222022000002002212220002112020212001222121221100012022221202202002222221002222222122112222012022222002222201201020212222021121002100102021220002012210210022020002110002221122022212211112222112020121102010221222102202202020212020221021012210001120222210222002122101002122111200102102102221212012002000221122212211121022221212022020202212221201220211220101222202110102212201022020221120022202111121022010112101111211012211020101222211112122221022102001212022022020222022000110202012211020220020121120211220120222221202222122001102210220212210222221102220222212110121212100001020222221211020201022110210100101012101100020122100110201011122012110012202212122001121011211000120220101002211020100220012020222221002111121001210002021200210011111022010"
part2_output = """
█ █ ████ ██ ███ █ █
█ █ █ █ █ █ █ █ █
██ ███ █ █ ███ █ █
█ █ █ ████ █ █ █
█ █ █ █ █ █ █ █
█ █ █ █ █ ███ █
"""
def test_for_corruption():
result = day8.calculate_part_1("123456789012", 3, 2)
assert result == 1
def test_part1():
result = day8.calculate_part_1(my_input, 25, 6)
assert result == 1716
def test_part2():
result = day8.calculate_part_2(my_input, 25, 6)
assert result.strip() == part2_output.strip()
| 555.607143 | 15,013 | 0.981745 | 104 | 15,557 | 147.288462 | 0.307692 | 0.004048 | 0.005092 | 0.006006 | 0.008487 | 0.005353 | 0.001763 | 0.001763 | 0.001763 | 0.001763 | 0 | 0.977512 | 0.010992 | 15,557 | 27 | 15,014 | 576.185185 | 0.014104 | 0 | 0 | 0.105263 | 0 | 0 | 0.975059 | 0.964196 | 0 | 1 | 0 | 0 | 0.157895 | 1 | 0.157895 | false | 0 | 0.052632 | 0 | 0.210526 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
e13e45969f46571f009533bd1029dc6938ab80e7 | 16,545 | py | Python | youtube/youtube-dl.py | adam0306/personal | f387fc3aa13f038114a6cc11e45d8ae9f17a5bc4 | [
"Unlicense"
] | null | null | null | youtube/youtube-dl.py | adam0306/personal | f387fc3aa13f038114a6cc11e45d8ae9f17a5bc4 | [
"Unlicense"
] | null | null | null | youtube/youtube-dl.py | adam0306/personal | f387fc3aa13f038114a6cc11e45d8ae9f17a5bc4 | [
"Unlicense"
] | null | null | null |
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/channel/UCOQ1cSf37ags3wnn9XEOC6Q/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/JustinRhodesVlog/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/TheSmokingTire/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/channel/UCqdlPPL3kxVRT9hbGhq5GCw/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/viadmin/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/VMwareCareers/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/TheHeroLance/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/TheVMwareTips/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/VMwareTechPubs/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/AlabamaShakespeare/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/MoreFPSRussia/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/channelintel/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/DOITWITHDAN/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/cosroman/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/channel/UCQu452k816654zZoo1iT9SA/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/wilsontech1/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/BaconDonutTV/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/barnacules1/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/TheSyndicateProject/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/YouVeeam/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/vmwaretv/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/ThatRacingChannel/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/channel/UC8HeWUOGOc1EVHl0fALuNlQ/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/AFBlueTube/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/Walterrificonline/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/channel/UCXBtIJBzUZtgaBdVkcCOMnw/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/channel/UCeU05pwtEAreeF81saVb9XQ/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/defenseupdate/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/MrNicksmith82/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/channel/UCN8FHFshMw-15AtFKWSLczA/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/channel/UCK9JEqf7LBBx3tkrPx2xvbQ/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/TheSharkDaymond/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/CERNTV/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/patrickbetdavid/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/channel/UCaZf13iWhwnBdpIkrEmHLbA/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/thegentlemansgazette/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/Airforceproud95/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/DemolitionRanch/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/VMwareLearning/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/TheUkstrongest/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/Boeing/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/hickok45/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/americanmusclevideos/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/SyndicateCentral/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/trickstutorials/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/channel/UCOWcZ6Wicl-1N34H0zZe38w/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/LockheedMartinVideos/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/DeviantOllam/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/unboxtherapy/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/Jayztwocents/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/duncan33303/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/AwesomeSauceNews/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/paulshardware/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/channel/UCQN2DsjnYH60SFBIA6IkNwg/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/channel/UCVS6ejD9NLZvjsvhcbiDzjw/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/SHAWSTRENGTH/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/channel/UCVYamHliCI9rw1tHR1xbkfw/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/UrAvgConsumer/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/channel/UCT3EznhW_CNFcfOlyDNTLLw/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/channel/UCrUL8K81R4VBzm-KOYwrcxQ/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/channel/UC9ZKDGCc5R67fVvLFSv-OLA/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/Ryanfun1/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/engineer775/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/channel/UCgTNupxATBfWmfehv21ym-g/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/tldtoday/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/marquesbrownlee/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/Hak5Darren/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/channel/UCwO_xoYm2vjhu4kZSxFO5mA/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/channel/UCsgzmECky2Q9lQMWzDwMhYw"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/letsdig18/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/channel/UCEFRKVwtp_ZYU1TbDqA9nzQ/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/RealMenRealStyle/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/TWiTSecurityNow/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/LinusTechTips/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/TheTecknowledge/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/MoneyGuyShow/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/teksyndicate/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/SecurityWeeklyTV/videos"
youtube-dl --dateafter now-7days --download-archive download.txt -f 'bestvideo[ext=mp4]+bestaudio[ext=m4a]/bestvideo+bestaudio' --merge-output-format mp4 "https://www.youtube.com/user/DaveRamseyShow/videos"
| 204.259259 | 219 | 0.789604 | 2,298 | 16,545 | 5.683638 | 0.047868 | 0.054437 | 0.108874 | 0.127019 | 0.904908 | 0.904908 | 0.904908 | 0.904908 | 0.904908 | 0.904908 | 0 | 0.025835 | 0.047809 | 16,545 | 80 | 220 | 206.8125 | 0.803225 | 0 | 0 | 0 | 0 | 0 | 0.522485 | 0.272183 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
e14ec5506d86cef48aa883f6818000771ae03f09 | 28,365 | py | Python | kaleidoscope/kaleidoscope/server/server.py | UAMS-DBMI/PosdaTools | 7d33605da1b88e4787a1368dbecaffda1df95e5b | [
"Apache-2.0"
] | 6 | 2019-01-17T15:47:44.000Z | 2022-02-02T16:47:25.000Z | kaleidoscope/kaleidoscope/server/server.py | UAMS-DBMI/PosdaTools | 7d33605da1b88e4787a1368dbecaffda1df95e5b | [
"Apache-2.0"
] | 23 | 2016-06-08T21:51:36.000Z | 2022-03-02T08:11:44.000Z | kaleidoscope/kaleidoscope/server/server.py | UAMS-DBMI/PosdaTools | 7d33605da1b88e4787a1368dbecaffda1df95e5b | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3.6
import sys
import os
import logging
from sanic import Sanic
from sanic.response import json, text, HTTPResponse
import aiofiles
import uuid
import asyncio
import uvloop
import datetime
from urllib.parse import unquote
import asyncpg
DEBUG=False
LOGIN_TIMEOUT = datetime.timedelta(seconds=2*60*60) # 2 hours
sessions = {} # token => username
app = Sanic()
pool = None
eventloop = None
class User(object):
def __init__(self, name):
self.name = name
self.token = uuid.uuid4().hex
self.touch()
def age(self):
return datetime.datetime.now() - self.last_updated
def is_elderly(self):
pass
def touch(self):
self.last_updated = datetime.datetime.now()
def __str__(self):
return f"<User: {self.name}, {self.age()}, {self.token}>"
def __unicode__(self):
return self.str()
@app.listener("before_server_start")
async def connect_to_db(sanic, loop):
global pool
pool = await asyncpg.create_pool(database='posda_files',
loop=loop)
loop.create_task(user_watch())
@app.route("/api/details/<iec>")
async def get_details(request, iec):
query = """
select
image_equivalence_class_id,
series_instance_uid,
equivalence_class_number,
processing_status,
review_status,
projection_type,
file_id,
root_path || '/' || rel_path as path,
update_user,
to_char(update_date, 'YYYY-MM-DD HH:MI:SS AM') as update_date,
(select count(file_id)
from image_equivalence_class_input_image i
where i.image_equivalence_class_id =
image_equivalence_class.image_equivalence_class_id) as file_count,
(select body_part_examined
from file_series
where file_series.series_instance_uid = image_equivalence_class.series_instance_uid limit 1) as body_part_examined,
(select patient_id
from file_patient
natural join file_series
where file_series.series_instance_uid = image_equivalence_class.series_instance_uid limit 1) as patient_id
from image_equivalence_class
natural join image_equivalence_class_out_image
natural join file_location
natural join file_storage_root
where image_equivalence_class_id = $1
"""
conn = await pool.acquire()
records = await conn.fetch(query, int(iec))
await pool.release(conn)
return json(dict(records[0]))
@app.route("/api/hide/collection/<collection>/<site>")
async def hide_collection(request, collection, site):
collection = unquote(collection)
site = unquote(site)
# TODO: record this action in an audit log! that's why we got username
user = request.headers['user'] # was injected by login middleware
logging.debug(f"Hiding: {collection}|{site}")
query = f"""
insert into log_iec_hide (user_name, project, site, hidden)
values ('{user.name}', '{collection}', '{site}', true);
update image_equivalence_class
set hidden = true
where image_equivalence_class_id in (
select image_equivalence_class_id
from image_equivalence_class
natural join image_equivalence_class_input_image
natural join ctp_file
where project_name = '{collection}'
and site_name = '{site}'
)
"""
conn = await pool.acquire()
records = await conn.execute(query)
logging.debug(f"Updated {records} rows?")
await pool.release(conn)
return json({'status': 'success'})
@app.route("/api/unhide/collection/<collection>/<site>")
async def unhide_collection(request, collection, site):
collection = unquote(collection)
site = unquote(site)
# TODO: record this action in an audit log! that's why we got username
user = request.headers['user'] # was injected by login middleware
logging.debug(f"Unhiding: {collection}|{site}")
query = f"""
insert into log_iec_hide (user_name, project, site, hidden)
values ('{user.name}', '{collection}', '{site}', false);
update image_equivalence_class
set hidden = false
where image_equivalence_class_id in (
select image_equivalence_class_id
from image_equivalence_class
natural join image_equivalence_class_input_image
natural join ctp_file
where project_name = '{collection}'
and site_name = '{site}'
)
"""
conn = await pool.acquire()
records = await conn.execute(query)
logging.debug(f"Updated {records} rows?")
await pool.release(conn)
return json({'status': 'success'})
@app.route("/api/hide/patient/<collection>/<site>/<patient>")
async def hide_patient(request, collection, site, patient):
collection = unquote(collection)
site = unquote(site)
patient = unquote(patient)
user = request.headers['user'] # was injected by login middleware
logging.debug(f"Hiding: {collection}|{site}[{patient}]")
query = f"""
insert into log_iec_hide (user_name, project, site, patient, hidden)
values ('{user.name}', '{collection}', '{site}', '{patient}', true);
update image_equivalence_class
set hidden = true
where image_equivalence_class_id in (
select image_equivalence_class_id
from image_equivalence_class
natural join image_equivalence_class_input_image
natural join ctp_file
natural join file_patient
where project_name = '{collection}'
and site_name = '{site}'
and patient_id = '{patient}'
)
"""
conn = await pool.acquire()
records = await conn.execute(query)
logging.debug(f"Updated {records} rows?")
await pool.release(conn)
return json({'status': 'success'})
@app.route("/api/unhide/patient/<collection>/<site>/<patient>")
async def unhide_patient(request, collection, site, patient):
collection = unquote(collection)
site = unquote(site)
patient = unquote(patient)
user = request.headers['user'] # was injected by login middleware
logging.debug(f"Hiding: {collection}|{site}[{patient}]")
query = f"""
insert into log_iec_hide (user_name, project, site, patient, hidden)
values ('{user.name}', '{collection}', '{site}', '{patient}', false);
update image_equivalence_class
set hidden = false
where image_equivalence_class_id in (
select image_equivalence_class_id
from image_equivalence_class
natural join image_equivalence_class_input_image
natural join ctp_file
natural join file_patient
where project_name = '{collection}'
and site_name = '{site}'
and patient_id = '{patient}'
)
"""
conn = await pool.acquire()
records = await conn.execute(query)
logging.debug(f"Updated {records} rows?")
await pool.release(conn)
return json({'status': 'success'})
@app.route("/api/patients/<collection>/<site>/<state>")
async def get_patients(request, collection, site, state):
collection = unquote(collection)
site = unquote(site)
logging.debug(f"State: {state} {collection}|{site}")
where_clause = {
'hidden': ("hidden"),
'unhidden': ("not hidden"),
}[state.lower()]
query = f"""
select distinct patient_id
from ctp_file
natural join file_patient
natural join image_equivalence_class
natural join image_equivalence_class_input_image
where {where_clause}
and project_name = $1
and site_name = $2
"""
conn = await pool.acquire()
records = await conn.fetch(query, collection, site)
await pool.release(conn)
return json([i[0] for i in records])
@app.route("/api/projects/<state>")
async def get_projects(request, state):
logging.debug(f"State: {state}")
where_clause = {
'unreviewed': "not hidden and processing_status = 'ReadyToReview'",
'good': ("not hidden "
"and processing_status = 'Reviewed' "
"and review_status='Good'"),
'bad': ("not hidden "
"and processing_status = 'Reviewed' "
"and review_status='Bad'"),
'blank': ("not hidden "
"and processing_status = 'Reviewed' "
"and review_status='Blank'"),
'scout': ("not hidden "
"and processing_status = 'Reviewed' "
"and review_status='Scout'"),
'other': ("not hidden "
"and processing_status = 'Reviewed' "
"and review_status='Other'"),
'hidden': ("hidden"),
'unhidden': ("not hidden"),
}[state.lower()]
query = f"""
/*
This query gets a list of what Project/Site combos have IECs waiting
to be reviewed, along with a count for each.
It is somewhat complex, as it attempts to figure out the project/site
of a given IEC based on only the first input image. This is much faster
than the original simpler query.
This could be improved further by storing the project/site info
on an IEC level, either in the image_equivalence_class table or in a
seperate table.
*/
select
project_name,
site_name,
count(image_equivalence_class_id)
from (
select
image_equivalence_class_id,
(select project_name from ctp_file
where ctp_file.file_id =
(
select file_id
from image_equivalence_class_input_image i
where i.image_equivalence_class_id = iec.image_equivalence_class_id
limit 1) limit 1
) project_name,
(select site_name from ctp_file
where ctp_file.file_id =
(
select file_id
from image_equivalence_class_input_image i
where i.image_equivalence_class_id = iec.image_equivalence_class_id
limit 1) limit 1
) site_name
from image_equivalence_class iec
where {where_clause}
) a
group by project_name, site_name
order by count desc
"""
conn = await pool.acquire()
records = await conn.fetch(query)
await pool.release(conn)
return json([dict(i.items()) for i in records])
@app.route("/api/set/<state>")
async def get_set(request, state):
after = int(request.args.get('offset') or 0)
collection = request.args.get('project')
site = request.args.get('site')
dicom_file_type = request.args.get('dicom_file_type')
visual_review_instance_id = request.args.get('visual_review_instance_id')
logging.debug(f"get_set:state={state},site={site},collection={collection},dicom_file_type={dicom_file_type},visual_review_instance_id={visual_review_instance_id}")
handler = {
'unreviewed': get_unreviewed_data,
'good': get_good_data,
'bad': get_bad_data,
'blank': get_blank_data,
'scout': get_scout_data,
'other': get_other_data,
}[state.lower()]
logging.debug(f"handler chosen: {handler}")
records = await handler(after, collection, site, dicom_file_type, visual_review_instance_id)
logging.debug("get_set:request handled, emitting response now")
return json([dict(i.items()) for i in records])
async def new_get_reviewed_data(state, after, dicom_file_type, visual_review_instance_id):
# Note: Currently this method is only called if a visual_review_instance_id
# was given, so we can assume it exists!
logging.debug(f"new_get_reviewed_data called with vr {visual_review_instance_id} "
f"and after: {after}")
dicom_where = ""
if dicom_file_type is not None:
dicom_where = f"where dicom_file_type = '{dicom_file_type}'"
query = f"""
with one_file_per_iec as (
/*
For each IEC in the given Visual Review, select
one input file.
"distinct on()" allows us to return only 1 row
from within the distinct set. It is not really defined
which file we get, but it doesn't matter for this query.
*/
select distinct on (image_equivalence_class_id)
image_equivalence_class_id,
file_id
from
image_equivalence_class_input_image
natural join image_equivalence_class
where
visual_review_instance_id = {visual_review_instance_id}
), iec_with_filetype as (
select
image_equivalence_class_id,
file_id
from
one_file_per_iec
natural join dicom_file
{dicom_where}
), iec_with_project as (
select
image_equivalence_class_id,
project_name,
trial_name,
site_name,
site_id,
visibility,
file_id as input_file_id
from
iec_with_filetype
natural join ctp_file
)
select * from (
select distinct on (image_equivalence_class_id)
image_equivalence_class_id,
image_equivalence_class_id::int as id2,
image_equivalence_class.series_instance_uid,
equivalence_class_number,
processing_status,
review_status,
projection_type,
image_equivalence_class_out_image.file_id,
root_path || '/' || rel_path as path,
(
select count(file_id)
from image_equivalence_class_input_image i
where i.image_equivalence_class_id =
image_equivalence_class.image_equivalence_class_id
) as file_count,
body_part_examined,
patient_id
from iec_with_project
natural join image_equivalence_class
natural join image_equivalence_class_out_image
natural join file_location
natural join file_storage_root
join file_series on file_series.file_id = input_file_id
join file_patient on file_patient.file_id = input_file_id
where not hidden -- this hidden is the IEC-level hidden, NOT file-level
and processing_status = 'Reviewed'
and review_status = '{state}'
order by image_equivalence_class_id
) foo
where id2 > $1
limit 1
"""
logging.debug(query)
conn = await pool.acquire()
records = await conn.fetch(query, after)
await pool.release(conn)
return records
async def new_get_unreviewed_data(after, dicom_file_type, visual_review_instance_id):
# Note: Currently this method is only called if a visual_review_instance_id
# was given, so we can assume it exists!
logging.debug(f"new_get_unreviewed_data called with vr {visual_review_instance_id} "
f"and after: {after}")
dicom_where = ""
if dicom_file_type is not None:
dicom_where = f"where dicom_file_type = '{dicom_file_type}'"
query = f"""
with one_file_per_iec as (
/*
For each IEC in the given Visual Review, select
one input file.
"distinct on()" allows us to return only 1 row
from within the distinct set. It is not really defined
which file we get, but it doesn't matter for this query.
*/
select distinct on (image_equivalence_class_id)
image_equivalence_class_id,
file_id
from
image_equivalence_class_input_image
natural join image_equivalence_class
where
visual_review_instance_id = {visual_review_instance_id}
), iec_with_filetype as (
select
image_equivalence_class_id,
file_id
from
one_file_per_iec
natural join dicom_file
{dicom_where}
), iec_with_project as (
select
image_equivalence_class_id,
project_name,
trial_name,
site_name,
site_id,
visibility,
file_id as input_file_id
from
iec_with_filetype
natural join ctp_file
)
select distinct on (image_equivalence_class_id)
image_equivalence_class_id,
image_equivalence_class.series_instance_uid,
equivalence_class_number,
processing_status,
review_status,
projection_type,
image_equivalence_class_out_image.file_id,
root_path || '/' || rel_path as path,
(
select count(file_id)
from image_equivalence_class_input_image i
where i.image_equivalence_class_id =
image_equivalence_class.image_equivalence_class_id
) as file_count,
body_part_examined,
patient_id
from iec_with_project
natural join image_equivalence_class
natural join image_equivalence_class_out_image
natural join file_location
natural join file_storage_root
join file_series on file_series.file_id = input_file_id
join file_patient on file_patient.file_id = input_file_id
where not hidden -- this hidden is the IEC-level hidden, NOT file-level
and processing_status = 'ReadyToReview'
and image_equivalence_class_id > $1
order by image_equivalence_class_id
limit 1
"""
logging.debug(query)
conn = await pool.acquire()
records = await conn.fetch(query, after)
await pool.release(conn)
return records
async def get_unreviewed_data(after, collection, site, dicom_file_type, visual_review_instance_id):
if visual_review_instance_id is not None:
return await new_get_unreviewed_data(after, dicom_file_type, visual_review_instance_id)
where_text = ""
if collection is not None:
where_text += f"and project_name = '{collection}' "
if site is not None:
where_text += f"and site_name = '{site}' "
if dicom_file_type is not None:
where_text += f"and dicom_file_type = '{dicom_file_type}' "
if visual_review_instance_id is not None:
where_text += f"and visual_review_instance_id = {visual_review_instance_id}"
logging.debug(f"get_unreviewed_data where_text: {where_text}")
logging.debug(f"after: {after}")
query = f"""
select
image_equivalence_class_id,
series_instance_uid,
equivalence_class_number,
processing_status,
review_status,
projection_type,
image_equivalence_class_out_image.file_id,
root_path || '/' || rel_path as path,
(select count(file_id)
from image_equivalence_class_input_image i
where i.image_equivalence_class_id =
image_equivalence_class.image_equivalence_class_id) as file_count,
(select body_part_examined
from file_series
where file_series.series_instance_uid = image_equivalence_class.series_instance_uid limit 1) as body_part_examined,
(select patient_id
from file_patient
natural join file_series
where file_series.series_instance_uid = image_equivalence_class.series_instance_uid limit 1) as patient_id
from (
/*
Acquire the project_name and site_name associated with each IEC
by looking only at the first file_id of it's input image set.
This is pretty ugly, but is more than 100x faster than other
solutions.
It could probably be sped up even more by storing project/site name
at the IEC level (say, in image_equivalence_class table)
Quasar, 2017-04-27
*/
select
image_equivalence_class_id,
(select project_name from ctp_file
where ctp_file.file_id =
(
select file_id
from image_equivalence_class_input_image i
where i.image_equivalence_class_id = iec.image_equivalence_class_id
limit 1) limit 1
) project_name,
(select site_name from ctp_file
where ctp_file.file_id =
(
select file_id
from image_equivalence_class_input_image i
where i.image_equivalence_class_id = iec.image_equivalence_class_id
limit 1) limit 1
) site_name,
(select dicom_file_type from dicom_file
where dicom_file.file_id =
(select file_id
from image_equivalence_class_input_image i
where i.image_equivalence_class_id = iec.image_equivalence_class_id
limit 1) limit 1
) dicom_file_type,
processing_status
from image_equivalence_class iec
where not hidden
and processing_status = 'ReadyToReview'
order by image_equivalence_class_id
) iecs
natural join image_equivalence_class
natural join image_equivalence_class_out_image
natural join file_location
natural join file_storage_root
where 1 = 1
and image_equivalence_class_id > $1
{where_text}
order by image_equivalence_class_id
limit 1
"""
conn = await pool.acquire()
records = await conn.fetch(query, after)
await pool.release(conn)
return records
async def get_good_data(after, collection, site, dicom_file_type, visual_review_instance_id):
return await get_reviewed_data('Good', after, collection, site, dicom_file_type, visual_review_instance_id)
async def get_bad_data(after, collection, site, dicom_file_type, visual_review_instance_id):
return await get_reviewed_data('Bad', after, collection, site, dicom_file_type, visual_review_instance_id)
async def get_blank_data(after, collection, site, dicom_file_type, visual_review_instance_id):
return await get_reviewed_data('Blank', after, collection, site, dicom_file_type, visual_review_instance_id)
async def get_scout_data(after, collection, site, dicom_file_type, visual_review_instance_id):
return await get_reviewed_data('Scout', after, collection, site, dicom_file_type, visual_review_instance_id)
async def get_other_data(after, collection, site, dicom_file_type, visual_review_instance_id):
return await get_reviewed_data('Other', after, collection, site, dicom_file_type, visual_review_instance_id)
async def get_reviewed_data(state, after, collection, site, dicom_file_type, visual_review_instance_id):
if visual_review_instance_id is not None:
return await new_get_reviewed_data(state, after, dicom_file_type, visual_review_instance_id)
where_text = ""
if collection is not None:
where_text += f"and project_name = '{collection}' "
if site is not None:
where_text += f"and site_name = '{site}' "
if dicom_file_type is not None:
where_text += f"and dicom_file_type = '{dicom_file_type}' "
if visual_review_instance_id is not None:
where_text += f"and visual_review_instance_id = {visual_review_instance_id}"
logging.debug(f"get_unreviewed_data where_text: {where_text}")
logging.debug(f"after: {after}")
query = f"""
select
image_equivalence_class_id,
series_instance_uid,
equivalence_class_number,
processing_status,
review_status,
projection_type,
image_equivalence_class_out_image.file_id,
root_path || '/' || rel_path as path,
(select count(file_id)
from image_equivalence_class_input_image i
where i.image_equivalence_class_id =
image_equivalence_class.image_equivalence_class_id) as file_count,
(select body_part_examined
from file_series
where file_series.series_instance_uid = image_equivalence_class.series_instance_uid limit 1) as body_part_examined,
(select patient_id
from file_patient
natural join file_series
where file_series.series_instance_uid = image_equivalence_class.series_instance_uid limit 1) as patient_id
from (
/*
Acquire the project_name and site_name associated with each IEC
by looking only at the first file_id of it's input image set.
This is pretty ugly, but is more than 100x faster than other
solutions.
It could probably be sped up even more by storing project/site name
at the IEC level (say, in image_equivalence_class table)
Quasar, 2017-04-27
*/
select
image_equivalence_class_id,
(select project_name from ctp_file
where ctp_file.file_id =
(
select file_id
from image_equivalence_class_input_image i
where i.image_equivalence_class_id = iec.image_equivalence_class_id
limit 1) limit 1
) project_name,
(select site_name from ctp_file
where ctp_file.file_id =
(
select file_id
from image_equivalence_class_input_image i
where i.image_equivalence_class_id = iec.image_equivalence_class_id
limit 1) limit 1
) site_name,
(select dicom_file_type from dicom_file
where dicom_file.file_id =
(select file_id
from image_equivalence_class_input_image i
where i.image_equivalence_class_id = iec.image_equivalence_class_id
limit 1) limit 1
) dicom_file_type,
processing_status
from image_equivalence_class iec
where not hidden
and processing_status = 'Reviewed'
and review_status = '{state}'
order by image_equivalence_class_id
) iecs
natural join image_equivalence_class
natural join image_equivalence_class_out_image
natural join file_location
natural join file_storage_root
where 1 = 1
and image_equivalence_class_id > $1
{where_text}
order by image_equivalence_class_id
limit 1
"""
# print(query)
logging.debug(query)
conn = await pool.acquire()
records = await conn.fetch(query, after)
await pool.release(conn)
return records
@app.route("/api/img")
async def image_from_id(request):
path = request.args['path'][0]
async with aiofiles.open(path, 'rb') as f:
data = await f.read()
return HTTPResponse(status=200,
headers=None,
content_type="image/jpeg",
body_bytes=data)
@app.route("/api/new_token/<user>")
async def new_token(request, user):
user_obj = User(user)
sessions[user_obj.token] = user_obj
logging.debug(f"Creating new session for {user_obj.name}: {user_obj.token}")
return json({'token': user_obj.token})
@app.route("/test", methods=["GET", "POST"])
def slash_test(request):
return json({"args": request.args,
"url": request.url,
"headers": request.headers,
"query_string": request.query_string})
@app.middleware('request')
async def login_check(request):
if True:
request.headers["user"] = User('__auth_disabled__')
return None
logging.debug(f"### {request.url}?{request.query_string}")
if 'new_token' in request.url:
return None
# get token from args, or from json body
token = request.args.get('token', None)
if token is not None:
# print("Token from get: ", token)
pass
else:
# try to find it in the request body
try:
details = request.json
token = details['token']
# logging.debug("Token from json: ", token)
except Exception as e:
logging.debug("Rejecting request because no token")
return text("not logged in", status=401)
try:
user = sessions[token]
user.touch()
request.headers["user"] = user
return None
except KeyError:
logging.debug("Rejecting request because invalid token")
return text("not logged in", status=401)
@app.route("/api/save", methods=["POST"])
async def save(request):
user = request.headers['user'] # was injected by login middleware
details = request.json
iec = details['iec']
state = details['state'].title()
logging.debug(f"Setting {iec} to {state}, by {user.name}")
# TODO: There is a problem with conn.execute() and bind vars
# in version 0.5.1 of asyncpg, which we are currently
# pinned to because of using old postgres 8!
query = f"""
update image_equivalence_class
set processing_status = 'Reviewed',
review_status = '{state}',
update_user = '{user.name}',
update_date = now()
where image_equivalence_class_id = {iec}
"""
conn = await pool.acquire()
records = await conn.execute(query)
logging.debug(f"Updated {records} rows?")
await pool.release(conn)
return json({'status': 'success'})
async def user_watch():
await asyncio.sleep(10)
# logging.debug("Checking logins...")
to_delete = []
for t in sessions:
user = sessions[t]
if user.age() > LOGIN_TIMEOUT:
logging.debug(f"Dropping login session for user: {user.name}")
to_delete.append(t)
for t in to_delete:
del sessions[t]
# put ourselves back on the queue
asyncio.get_event_loop().create_task(user_watch())
if __name__ == "__main__":
if os.environ.get('DEBUG', 0) != 0:
DEBUG = True
if len(sys.argv) > 1 and sys.argv[1].lower() == 'debug':
DEBUG = True
if DEBUG:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.ERROR)
logging.info("Starting up...")
app.run(host="0.0.0.0", port=8089, debug=DEBUG)
| 31.41196 | 167 | 0.68306 | 3,773 | 28,365 | 4.856348 | 0.09568 | 0.117885 | 0.148993 | 0.081591 | 0.788299 | 0.768542 | 0.751514 | 0.744038 | 0.732522 | 0.702341 | 0 | 0.004733 | 0.232787 | 28,365 | 902 | 168 | 31.446785 | 0.837239 | 0.034338 | 0 | 0.702592 | 0 | 0.001364 | 0.621369 | 0.183433 | 0 | 0 | 0 | 0.001109 | 0 | 1 | 0.00955 | false | 0.002729 | 0.016371 | 0.005457 | 0.069577 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e19f302b94d2d2b9e6c1a3e54fa043e1ed95bcf3 | 100,737 | py | Python | pydra/engine/tests/test_workflow.py | ValHayot/pydra | b2740f7e7c7b3023e2862f617966b17d9965e30a | [
"Apache-2.0"
] | null | null | null | pydra/engine/tests/test_workflow.py | ValHayot/pydra | b2740f7e7c7b3023e2862f617966b17d9965e30a | [
"Apache-2.0"
] | null | null | null | pydra/engine/tests/test_workflow.py | ValHayot/pydra | b2740f7e7c7b3023e2862f617966b17d9965e30a | [
"Apache-2.0"
] | null | null | null | import pytest
import shutil, os
import time
import attr
from pathlib import Path
from .utils import (
add2,
add2_wait,
multiply,
power,
ten,
identity,
list_output,
fun_addvar3,
add2_sub2_res,
fun_addvar_none,
fun_addvar_default,
fun_write_file,
fun_write_file_list,
fun_write_file_list2dict,
)
from ..submitter import Submitter
from ..core import Workflow
if bool(shutil.which("sbatch")):
Plugins = ["cf", "slurm"]
else:
Plugins = ["cf"]
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_1(plugin):
""" workflow with one task and no splitter"""
wf = Workflow(name="wf_1", input_spec=["x"])
wf.add(add2(name="add2", x=wf.lzin.x))
wf.set_output([("out", wf.add2.lzout.out)])
wf.inputs.x = 2
wf.plugin = plugin
checksum_before = wf.checksum
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.checksum == checksum_before
results = wf.result()
assert 4 == results.output.out
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_1a_outpastuple(plugin):
""" workflow with one task and no splitter
set_output takes a tuple
"""
wf = Workflow(name="wf_1", input_spec=["x"])
wf.add(add2(name="add2", x=wf.lzin.x))
wf.set_output(("out", wf.add2.lzout.out))
wf.inputs.x = 2
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert 4 == results.output.out
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_1_call_subm(plugin):
"""using wf.__call_ with submitter"""
wf = Workflow(name="wf_1", input_spec=["x"])
wf.add(add2(name="add2", x=wf.lzin.x))
wf.set_output([("out", wf.add2.lzout.out)])
wf.inputs.x = 2
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
wf(submitter=sub)
results = wf.result()
assert 4 == results.output.out
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_1_call_plug(plugin):
"""using wf.__call_ with plugin"""
wf = Workflow(name="wf_1", input_spec=["x"])
wf.add(add2(name="add2", x=wf.lzin.x))
wf.set_output([("out", wf.add2.lzout.out)])
wf.inputs.x = 2
wf.plugin = plugin
wf(plugin=plugin)
results = wf.result()
assert 4 == results.output.out
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_1_call_exception(plugin):
"""using wf.__call_ with plugin and submitter - should raise an exception"""
wf = Workflow(name="wf_1", input_spec=["x"])
wf.add(add2(name="add2", x=wf.lzin.x))
wf.set_output([("out", wf.add2.lzout.out)])
wf.inputs.x = 2
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
with pytest.raises(Exception) as e:
wf(submitter=sub, plugin=plugin)
assert "Specify submitter OR plugin" in str(e.value)
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_2(plugin):
""" workflow with 2 tasks, no splitter"""
wf = Workflow(name="wf_2", input_spec=["x", "y"])
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y))
wf.add(add2(name="add2", x=wf.mult.lzout.out))
wf.set_output([("out", wf.add2.lzout.out)])
wf.inputs.x = 2
wf.inputs.y = 3
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.output_dir.exists()
results = wf.result()
assert 8 == results.output.out
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_2a(plugin):
""" workflow with 2 tasks, no splitter
creating add2_task first (before calling add method),
"""
wf = Workflow(name="wf_2", input_spec=["x", "y"])
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y))
add2_task = add2(name="add2")
add2_task.inputs.x = wf.mult.lzout.out
wf.add(add2_task)
wf.set_output([("out", wf.add2.lzout.out)])
wf.inputs.x = 2
wf.inputs.y = 3
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert 8 == results.output.out
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_2b(plugin):
""" workflow with 2 tasks, no splitter
creating add2_task first (before calling add method),
adding inputs.x after add method
"""
wf = Workflow(name="wf_2", input_spec=["x", "y"])
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y))
add2_task = add2(name="add2")
wf.add(add2_task)
add2_task.inputs.x = wf.mult.lzout.out
wf.set_output([("out", wf.add2.lzout.out)])
wf.inputs.x = 2
wf.inputs.y = 3
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert 8 == results.output.out
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_2c_multoutp(plugin):
""" workflow with 2 tasks, no splitter
setting multiple outputs for the workflow
"""
wf = Workflow(name="wf_2", input_spec=["x", "y"])
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y))
add2_task = add2(name="add2")
add2_task.inputs.x = wf.mult.lzout.out
wf.add(add2_task)
# setting multiple output (from both nodes)
wf.set_output([("out_add2", wf.add2.lzout.out), ("out_mult", wf.mult.lzout.out)])
wf.inputs.x = 2
wf.inputs.y = 3
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
# checking outputs from both nodes
assert 6 == results.output.out_mult
assert 8 == results.output.out_add2
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_2d_outpasdict(plugin):
""" workflow with 2 tasks, no splitter
setting multiple outputs using a dictionary
"""
wf = Workflow(name="wf_2", input_spec=["x", "y"])
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y))
add2_task = add2(name="add2")
add2_task.inputs.x = wf.mult.lzout.out
wf.add(add2_task)
# setting multiple output (from both nodes)
wf.set_output({"out_add2": wf.add2.lzout.out, "out_mult": wf.mult.lzout.out})
wf.inputs.x = 2
wf.inputs.y = 3
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
# checking outputs from both nodes
assert 6 == results.output.out_mult
assert 8 == results.output.out_add2
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_3(plugin):
""" testing None value for an input"""
wf = Workflow(name="wf_3", input_spec=["x", "y"])
wf.add(fun_addvar_none(name="addvar", a=wf.lzin.x, b=wf.lzin.y))
wf.add(add2(name="add2", x=wf.addvar.lzout.out))
wf.set_output([("out", wf.add2.lzout.out)])
wf.inputs.x = 2
wf.inputs.y = None
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.output_dir.exists()
results = wf.result()
assert 4 == results.output.out
@pytest.mark.xfail(reason="the task error doesn't propagate")
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_3a_exception(plugin):
""" testinh wf without set input, attr.NOTHING should be set
and the function should raise an exception
"""
wf = Workflow(name="wf_3", input_spec=["x", "y"])
wf.add(fun_addvar_none(name="addvar", a=wf.lzin.x, b=wf.lzin.y))
wf.add(add2(name="add2", x=wf.addvar.lzout.out))
wf.set_output([("out", wf.add2.lzout.out)])
wf.inputs.x = 2
wf.inputs.y = attr.NOTHING
wf.plugin = plugin
with pytest.raises(TypeError) as excinfo:
with Submitter(plugin=plugin) as sub:
sub(wf)
assert "unsupported" in str(excinfo.value)
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_4(plugin):
"""wf with a task that doesn't set one input and use the function default value"""
wf = Workflow(name="wf_4", input_spec=["x", "y"])
wf.add(fun_addvar_default(name="addvar", a=wf.lzin.x))
wf.add(add2(name="add2", x=wf.addvar.lzout.out))
wf.set_output([("out", wf.add2.lzout.out)])
wf.inputs.x = 2
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.output_dir.exists()
results = wf.result()
assert 5 == results.output.out
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_4a(plugin):
""" wf with a task that doesn't set one input,
the unset input is send to the task input,
so the task should use the function default value
"""
wf = Workflow(name="wf_4a", input_spec=["x", "y"])
wf.add(fun_addvar_default(name="addvar", a=wf.lzin.x, y=wf.lzin.y))
wf.add(add2(name="add2", x=wf.addvar.lzout.out))
wf.set_output([("out", wf.add2.lzout.out)])
wf.inputs.x = 2
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.output_dir.exists()
results = wf.result()
assert 5 == results.output.out
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_st_1(plugin):
""" Workflow with one task, a splitter for the workflow"""
wf = Workflow(name="wf_spl_1", input_spec=["x"])
wf.add(add2(name="add2", x=wf.lzin.x))
wf.split(("x"))
wf.inputs.x = [1, 2]
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
checksum_before = wf.checksum
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.checksum == checksum_before
results = wf.result()
# expected: [({"test7.x": 1}, 3), ({"test7.x": 2}, 4)]
assert results[0].output.out == 3
assert results[1].output.out == 4
# checking all directories
assert wf.output_dir
for odir in wf.output_dir:
assert odir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_st_1_call_subm(plugin):
""" Workflow with one task, a splitter for the workflow"""
wf = Workflow(name="wf_spl_1", input_spec=["x"])
wf.add(add2(name="add2", x=wf.lzin.x))
wf.split(("x"))
wf.inputs.x = [1, 2]
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
wf(submitter=sub)
results = wf.result()
# expected: [({"test7.x": 1}, 3), ({"test7.x": 2}, 4)]
assert results[0].output.out == 3
assert results[1].output.out == 4
# checking all directories
assert wf.output_dir
for odir in wf.output_dir:
assert odir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_st_1_call_plug(plugin):
""" Workflow with one task, a splitter for the workflow"""
wf = Workflow(name="wf_spl_1", input_spec=["x"])
wf.add(add2(name="add2", x=wf.lzin.x))
wf.split(("x"))
wf.inputs.x = [1, 2]
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
wf(plugin=plugin)
results = wf.result()
# expected: [({"test7.x": 1}, 3), ({"test7.x": 2}, 4)]
assert results[0].output.out == 3
assert results[1].output.out == 4
# checking all directories
assert wf.output_dir
for odir in wf.output_dir:
assert odir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_st_noinput_1(plugin):
""" Workflow with one task, a splitter for the workflow"""
wf = Workflow(name="wf_spl_1", input_spec=["x"])
wf.add(add2(name="add2", x=wf.lzin.x))
wf.split(("x"))
wf.inputs.x = []
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
checksum_before = wf.checksum
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.checksum == checksum_before
results = wf.result()
assert results == []
# checking all directories
assert wf.output_dir == []
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndst_1(plugin):
""" workflow with one task, a splitter on the task level"""
wf = Workflow(name="wf_spl_1", input_spec=["x"])
wf.add(add2(name="add2", x=wf.lzin.x).split("x"))
wf.inputs.x = [1, 2]
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
checksum_before = wf.checksum
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.checksum == checksum_before
results = wf.result()
# expected: [({"test7.x": 1}, 3), ({"test7.x": 2}, 4)]
assert results.output.out == [3, 4]
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndst_updatespl_1(plugin):
""" workflow with one task,
a splitter on the task level is added *after* calling add
"""
wf = Workflow(name="wf_spl_1", input_spec=["x"])
wf.add(add2(name="add2", x=wf.lzin.x))
wf.inputs.x = [1, 2]
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
wf.add2.split("x")
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
# expected: [({"test7.x": 1}, 3), ({"test7.x": 2}, 4)]
assert results.output.out == [3, 4]
assert wf.output_dir.exists()
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndst_updatespl_1a(plugin):
""" workflow with one task (initialize before calling add),
a splitter on the task level is added *after* calling add
"""
wf = Workflow(name="wf_spl_1", input_spec=["x"])
task_add2 = add2(name="add2", x=wf.lzin.x)
wf.add(task_add2)
task_add2.split("x")
wf.inputs.x = [1, 2]
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
# expected: [({"test7.x": 1}, 3), ({"test7.x": 2}, 4)]
assert results.output.out == [3, 4]
assert wf.output_dir.exists()
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndst_updateinp_1(plugin):
""" workflow with one task,
a splitter on the task level,
updating input of the task after calling add
"""
wf = Workflow(name="wf_spl_1", input_spec=["x", "y"])
wf.add(add2(name="add2", x=wf.lzin.x))
wf.inputs.x = [1, 2]
wf.inputs.y = [11, 12]
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
wf.add2.split("x")
wf.add2.inputs.x = wf.lzin.y
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert results.output.out == [13, 14]
assert wf.output_dir.exists()
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndst_noinput_1(plugin):
""" workflow with one task, a splitter on the task level"""
wf = Workflow(name="wf_spl_1", input_spec=["x"])
wf.add(add2(name="add2", x=wf.lzin.x).split("x"))
wf.inputs.x = []
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
checksum_before = wf.checksum
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.checksum == checksum_before
results = wf.result()
assert results.output.out == []
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_st_2(plugin):
""" workflow with one task, splitters and combiner for workflow"""
wf = Workflow(name="wf_st_2", input_spec=["x"])
wf.add(add2(name="add2", x=wf.lzin.x))
wf.split(("x")).combine(combiner="x")
wf.inputs.x = [1, 2]
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
# expected: [({"test7.x": 1}, 3), ({"test7.x": 2}, 4)]
assert results[0].output.out == 3
assert results[1].output.out == 4
# checking all directories
assert wf.output_dir
for odir in wf.output_dir:
assert odir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndst_2(plugin):
""" workflow with one task, splitters and combiner on the task level"""
wf = Workflow(name="wf_ndst_2", input_spec=["x"])
wf.add(add2(name="add2", x=wf.lzin.x).split("x").combine(combiner="x"))
wf.inputs.x = [1, 2]
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
# expected: [({"test7.x": 1}, 3), ({"test7.x": 2}, 4)]
assert results.output.out == [3, 4]
assert wf.output_dir.exists()
# workflows with structures A -> B
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_st_3(plugin):
""" workflow with 2 tasks, splitter on wf level"""
wf = Workflow(name="wf_st_3", input_spec=["x", "y"])
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y))
wf.add(add2(name="add2", x=wf.mult.lzout.out))
wf.inputs.x = [1, 2]
wf.inputs.y = [11, 12]
wf.split(("x", "y"))
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
# expected: [({"test7.x": 1, "test7.y": 11}, 13), ({"test7.x": 2, "test.y": 12}, 26)]
assert results[0].output.out == 13
assert results[1].output.out == 26
# checking all directories
assert wf.output_dir
for odir in wf.output_dir:
assert odir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndst_3(plugin):
"""Test workflow with 2 tasks, splitter on a task level"""
wf = Workflow(name="wf_ndst_3", input_spec=["x", "y"])
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y).split(("x", "y")))
wf.add(add2(name="add2", x=wf.mult.lzout.out))
wf.inputs.x = [1, 2]
wf.inputs.y = [11, 12]
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
# expected: [({"test7.x": 1, "test7.y": 11}, 13), ({"test7.x": 2, "test.y": 12}, 26)]
assert results.output.out == [13, 26]
# checking the output directory
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_st_4(plugin):
""" workflow with two tasks, scalar splitter and combiner for the workflow"""
wf = Workflow(name="wf_st_4", input_spec=["x", "y"])
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y))
wf.add(add2(name="add2", x=wf.mult.lzout.out))
wf.split(("x", "y"), x=[1, 2], y=[11, 12])
wf.combine("x")
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
# expected: [
# ({"test7.x": 1, "test7.y": 11}, 13), ({"test7.x": 2, "test.y": 12}, 26)
# ]
assert results[0].output.out == 13
assert results[1].output.out == 26
# checking all directories
assert wf.output_dir
for odir in wf.output_dir:
assert odir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndst_4(plugin):
""" workflow with two tasks, scalar splitter and combiner on tasks level"""
wf = Workflow(name="wf_ndst_4", input_spec=["a", "b"])
wf.add(multiply(name="mult", x=wf.lzin.a, y=wf.lzin.b).split(("x", "y")))
wf.add(add2(name="add2", x=wf.mult.lzout.out).combine("mult.x"))
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
wf.inputs.a = [1, 2]
wf.inputs.b = [11, 12]
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
# expected: [
# ({"test7.x": 1, "test7.y": 11}, 13), ({"test7.x": 2, "test.y": 12}, 26)
# ]
assert results.output.out == [13, 26]
# checking the output directory
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_st_5(plugin):
""" workflow with two tasks, outer splitter and no combiner"""
wf = Workflow(name="wf_st_5", input_spec=["x", "y"])
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y))
wf.add(add2(name="add2", x=wf.mult.lzout.out))
wf.split(["x", "y"], x=[1, 2], y=[11, 12])
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert results[0].output.out == 13
assert results[1].output.out == 14
assert results[2].output.out == 24
assert results[3].output.out == 26
# checking all directories
assert wf.output_dir
for odir in wf.output_dir:
assert odir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndst_5(plugin):
""" workflow with two tasks, outer splitter on tasks level and no combiner"""
wf = Workflow(name="wf_ndst_5", input_spec=["x", "y"])
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y).split(["x", "y"]))
wf.add(add2(name="add2", x=wf.mult.lzout.out))
wf.inputs.x = [1, 2]
wf.inputs.y = [11, 12]
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert results.output.out[0] == 13
assert results.output.out[1] == 14
assert results.output.out[2] == 24
assert results.output.out[3] == 26
# checking the output directory
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_st_6(plugin):
""" workflow with two tasks, outer splitter and combiner for the workflow"""
wf = Workflow(name="wf_st_6", input_spec=["x", "y"])
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y))
wf.add(add2(name="add2", x=wf.mult.lzout.out))
wf.split(["x", "y"], x=[1, 2, 3], y=[11, 12])
wf.combine("x")
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert results[0][0].output.out == 13
assert results[0][1].output.out == 24
assert results[0][2].output.out == 35
assert results[1][0].output.out == 14
assert results[1][1].output.out == 26
assert results[1][2].output.out == 38
# checking all directories
assert wf.output_dir
for odir in wf.output_dir:
assert odir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndst_6(plugin):
""" workflow with two tasks, outer splitter and combiner on tasks level"""
wf = Workflow(name="wf_ndst_6", input_spec=["x", "y"])
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y).split(["x", "y"]))
wf.add(add2(name="add2", x=wf.mult.lzout.out).combine("mult.x"))
wf.inputs.x = [1, 2, 3]
wf.inputs.y = [11, 12]
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert results.output.out[0] == [13, 24, 35]
assert results.output.out[1] == [14, 26, 38]
# checking the output directory
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndst_7(plugin):
""" workflow with two tasks, outer splitter and (full) combiner for first node only"""
wf = Workflow(name="wf_ndst_6", input_spec=["x", "y"])
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y).split("x").combine("x"))
wf.add(identity(name="iden", x=wf.mult.lzout.out))
wf.inputs.x = [1, 2, 3]
wf.inputs.y = 11
wf.set_output([("out", wf.iden.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert results.output.out == [11, 22, 33]
# checking the output directory
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndst_8(plugin):
""" workflow with two tasks, outer splitter and (partial) combiner for first task only"""
wf = Workflow(name="wf_ndst_6", input_spec=["x", "y"])
wf.add(
multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y).split(["x", "y"]).combine("x")
)
wf.add(identity(name="iden", x=wf.mult.lzout.out))
wf.inputs.x = [1, 2, 3]
wf.inputs.y = [11, 12]
wf.set_output([("out", wf.iden.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert results.output.out[0] == [11, 22, 33]
assert results.output.out[1] == [12, 24, 36]
# checking the output directory
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndst_9(plugin):
""" workflow with two tasks, outer splitter and (full) combiner for first task only"""
wf = Workflow(name="wf_ndst_6", input_spec=["x", "y"])
wf.add(
multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y)
.split(["x", "y"])
.combine(["x", "y"])
)
wf.add(identity(name="iden", x=wf.mult.lzout.out))
wf.inputs.x = [1, 2, 3]
wf.inputs.y = [11, 12]
wf.set_output([("out", wf.iden.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert results.output.out == [11, 12, 22, 24, 33, 36]
# checking the output directory
assert wf.output_dir.exists()
# workflows with structures A -> C, B -> C
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_3nd_st_1(plugin):
""" workflow with three tasks, third one connected to two previous tasks,
splitter on the workflow level
"""
wf = Workflow(name="wf_st_7", input_spec=["x", "y"])
wf.add(add2(name="add2x", x=wf.lzin.x))
wf.add(add2(name="add2y", x=wf.lzin.y))
wf.add(multiply(name="mult", x=wf.add2x.lzout.out, y=wf.add2y.lzout.out))
wf.split(["x", "y"], x=[1, 2, 3], y=[11, 12])
wf.set_output([("out", wf.mult.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert len(results) == 6
assert results[0].output.out == 39
assert results[1].output.out == 42
assert results[5].output.out == 70
# checking all directories
assert wf.output_dir
for odir in wf.output_dir:
assert odir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_3nd_ndst_1(plugin):
""" workflow with three tasks, third one connected to two previous tasks,
splitter on the tasks levels
"""
wf = Workflow(name="wf_ndst_7", input_spec=["x", "y"])
wf.add(add2(name="add2x", x=wf.lzin.x).split("x"))
wf.add(add2(name="add2y", x=wf.lzin.y).split("x"))
wf.add(multiply(name="mult", x=wf.add2x.lzout.out, y=wf.add2y.lzout.out))
wf.inputs.x = [1, 2, 3]
wf.inputs.y = [11, 12]
wf.set_output([("out", wf.mult.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert len(results.output.out) == 6
assert results.output.out == [39, 42, 52, 56, 65, 70]
# checking the output directory
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_3nd_st_2(plugin):
""" workflow with three tasks, third one connected to two previous tasks,
splitter and partial combiner on the workflow level
"""
wf = Workflow(name="wf_st_8", input_spec=["x", "y"])
wf.add(add2(name="add2x", x=wf.lzin.x))
wf.add(add2(name="add2y", x=wf.lzin.y))
wf.add(multiply(name="mult", x=wf.add2x.lzout.out, y=wf.add2y.lzout.out))
wf.split(["x", "y"], x=[1, 2, 3], y=[11, 12]).combine("x")
wf.set_output([("out", wf.mult.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert len(results) == 2
assert results[0][0].output.out == 39
assert results[0][1].output.out == 52
assert results[0][2].output.out == 65
assert results[1][0].output.out == 42
assert results[1][1].output.out == 56
assert results[1][2].output.out == 70
# checking all directories
assert wf.output_dir
for odir in wf.output_dir:
assert odir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_3nd_ndst_2(plugin):
""" workflow with three tasks, third one connected to two previous tasks,
splitter and partial combiner on the tasks levels
"""
wf = Workflow(name="wf_ndst_8", input_spec=["x", "y"])
wf.add(add2(name="add2x", x=wf.lzin.x).split("x"))
wf.add(add2(name="add2y", x=wf.lzin.y).split("x"))
wf.add(
multiply(name="mult", x=wf.add2x.lzout.out, y=wf.add2y.lzout.out).combine(
"add2x.x"
)
)
wf.inputs.x = [1, 2, 3]
wf.inputs.y = [11, 12]
wf.set_output([("out", wf.mult.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert len(results.output.out) == 2
assert results.output.out[0] == [39, 52, 65]
assert results.output.out[1] == [42, 56, 70]
# checking the output directory
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_3nd_st_3(plugin):
""" workflow with three tasks, third one connected to two previous tasks,
splitter and partial combiner (from the second task) on the workflow level
"""
wf = Workflow(name="wf_st_9", input_spec=["x", "y"])
wf.add(add2(name="add2x", x=wf.lzin.x))
wf.add(add2(name="add2y", x=wf.lzin.y))
wf.add(multiply(name="mult", x=wf.add2x.lzout.out, y=wf.add2y.lzout.out))
wf.split(["x", "y"], x=[1, 2, 3], y=[11, 12]).combine("y")
wf.set_output([("out", wf.mult.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert len(results) == 3
assert results[0][0].output.out == 39
assert results[0][1].output.out == 42
assert results[1][0].output.out == 52
assert results[1][1].output.out == 56
assert results[2][0].output.out == 65
assert results[2][1].output.out == 70
# checking all directories
assert wf.output_dir
for odir in wf.output_dir:
assert odir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_3nd_ndst_3(plugin):
""" workflow with three tasks, third one connected to two previous tasks,
splitter and partial combiner (from the second task) on the tasks levels
"""
wf = Workflow(name="wf_ndst_9", input_spec=["x", "y"])
wf.add(add2(name="add2x", x=wf.lzin.x).split("x"))
wf.add(add2(name="add2y", x=wf.lzin.y).split("x"))
wf.add(
multiply(name="mult", x=wf.add2x.lzout.out, y=wf.add2y.lzout.out).combine(
"add2y.x"
)
)
wf.inputs.x = [1, 2, 3]
wf.inputs.y = [11, 12]
wf.set_output([("out", wf.mult.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert len(results.output.out) == 3
assert results.output.out[0] == [39, 42]
assert results.output.out[1] == [52, 56]
assert results.output.out[2] == [65, 70]
# checking the output directory
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_3nd_st_4(plugin):
""" workflow with three tasks, third one connected to two previous tasks,
splitter and full combiner on the workflow level
"""
wf = Workflow(name="wf_st_10", input_spec=["x", "y"])
wf.add(add2(name="add2x", x=wf.lzin.x))
wf.add(add2(name="add2y", x=wf.lzin.y))
wf.add(multiply(name="mult", x=wf.add2x.lzout.out, y=wf.add2y.lzout.out))
wf.split(["x", "y"], x=[1, 2, 3], y=[11, 12]).combine(["x", "y"])
wf.set_output([("out", wf.mult.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert len(results) == 6
assert results[0].output.out == 39
assert results[1].output.out == 42
assert results[2].output.out == 52
assert results[3].output.out == 56
assert results[4].output.out == 65
assert results[5].output.out == 70
# checking all directories
assert wf.output_dir
for odir in wf.output_dir:
assert odir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_3nd_ndst_4(plugin):
""" workflow with three tasks, third one connected to two previous tasks,
splitter and full combiner on the tasks levels
"""
wf = Workflow(name="wf_ndst_10", input_spec=["x", "y"])
wf.add(add2(name="add2x", x=wf.lzin.x).split("x"))
wf.add(add2(name="add2y", x=wf.lzin.y).split("x"))
wf.add(
multiply(name="mult", x=wf.add2x.lzout.out, y=wf.add2y.lzout.out).combine(
["add2x.x", "add2y.x"]
)
)
wf.inputs.x = [1, 2, 3]
wf.inputs.y = [11, 12]
wf.set_output([("out", wf.mult.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
# assert wf.output_dir.exists()
results = wf.result()
assert len(results.output.out) == 6
assert results.output.out == [39, 42, 52, 56, 65, 70]
# checking the output directory
assert wf.output_dir.exists()
# workflows with Left and Right part in splitters A -> B (L&R parts of the splitter)
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndstLR_1(plugin):
""" Test workflow with 2 tasks, splitters on tasks levels
The second task has its own simple splitter
and the Left part from the first task should be added
"""
wf = Workflow(name="wf_ndst_3", input_spec=["x", "y"])
wf.add(add2(name="add2", x=wf.lzin.x).split("x"))
wf.add(multiply(name="mult", x=wf.add2.lzout.out, y=wf.lzin.y).split("y"))
wf.inputs.x = [1, 2]
wf.inputs.y = [11, 12]
wf.set_output([("out", wf.mult.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
# checking if the splitter is created properly
assert wf.mult.state.splitter == ["_add2", "mult.y"]
assert wf.mult.state.splitter_rpn == ["add2.x", "mult.y", "*"]
results = wf.result()
# expected: [({"add2.x": 1, "mult.y": 11}, 33), ({"add2.x": 1, "mult.y": 12}, 36),
# ({"add2.x": 2, "mult.y": 11}, 44), ({"add2.x": 2, "mult.y": 12}, 48)]
assert results.output.out == [33, 36, 44, 48]
# checking the output directory
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndstLR_1a(plugin):
""" Test workflow with 2 tasks, splitters on tasks levels
The second task has splitter that has Left part (from previous state)
and the Right part (it's onw splitter)
"""
wf = Workflow(name="wf_ndst_3", input_spec=["x", "y"])
wf.add(add2(name="add2", x=wf.lzin.x).split("x"))
wf.add(
multiply(name="mult", x=wf.add2.lzout.out, y=wf.lzin.y).split(["_add2", "y"])
)
wf.inputs.x = [1, 2]
wf.inputs.y = [11, 12]
wf.set_output([("out", wf.mult.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
# checking if the splitter is created properly
assert wf.mult.state.splitter == ["_add2", "mult.y"]
assert wf.mult.state.splitter_rpn == ["add2.x", "mult.y", "*"]
results = wf.result()
# expected: [({"add2.x": 1, "mult.y": 11}, 33), ({"add2.x": 1, "mult.y": 12}, 36),
# ({"add2.x": 2, "mult.y": 11}, 44), ({"add2.x": 2, "mult.y": 12}, 48)]
assert results.output.out == [33, 36, 44, 48]
# checking the output directory
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndstLR_2(plugin):
""" Test workflow with 2 tasks, splitters on tasks levels
The second task has its own outer splitter
and the Left part from the first task should be added
"""
wf = Workflow(name="wf_ndst_3", input_spec=["x", "y", "z"])
wf.add(add2(name="add2", x=wf.lzin.x).split("x"))
wf.add(
fun_addvar3(name="addvar", a=wf.add2.lzout.out, b=wf.lzin.y, c=wf.lzin.z).split(
["b", "c"]
)
)
wf.inputs.x = [1, 2, 3]
wf.inputs.y = [10, 20]
wf.inputs.z = [100, 200]
wf.set_output([("out", wf.addvar.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
# checking if the splitter is created properly
assert wf.addvar.state.splitter == ["_add2", ["addvar.b", "addvar.c"]]
assert wf.addvar.state.splitter_rpn == ["add2.x", "addvar.b", "addvar.c", "*", "*"]
results = wf.result()
# expected: [({"add2.x": 1, "mult.b": 10, "mult.c": 100}, 113),
# ({"add2.x": 1, "mult.b": 10, "mult.c": 200}, 213),
# ({"add2.x": 1, "mult.b": 20, "mult.c": 100}, 123),
# ({"add2.x": 1, "mult.b": 20, "mult.c": 200}, 223),
# ...]
assert results.output.out == [
113,
213,
123,
223,
114,
214,
124,
224,
115,
215,
125,
225,
]
# checking the output directory
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndstLR_2a(plugin):
""" Test workflow with 2 tasks, splitters on tasks levels
The second task has splitter that has Left part (from previous state)
and the Right part (it's onw outer splitter)
"""
wf = Workflow(name="wf_ndst_3", input_spec=["x", "y", "z"])
wf.add(add2(name="add2", x=wf.lzin.x).split("x"))
wf.add(
fun_addvar3(name="addvar", a=wf.add2.lzout.out, b=wf.lzin.y, c=wf.lzin.z).split(
["_add2", ["b", "c"]]
)
)
wf.inputs.x = [1, 2, 3]
wf.inputs.y = [10, 20]
wf.inputs.z = [100, 200]
wf.set_output([("out", wf.addvar.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
# checking if the splitter is created properly
assert wf.addvar.state.splitter == ["_add2", ["addvar.b", "addvar.c"]]
assert wf.addvar.state.splitter_rpn == ["add2.x", "addvar.b", "addvar.c", "*", "*"]
results = wf.result()
# expected: [({"add2.x": 1, "mult.b": 10, "mult.c": 100}, 113),
# ({"add2.x": 1, "mult.b": 10, "mult.c": 200}, 213),
# ({"add2.x": 1, "mult.b": 20, "mult.c": 100}, 123),
# ({"add2.x": 1, "mult.b": 20, "mult.c": 200}, 223),
# ...]
assert results.output.out == [
113,
213,
123,
223,
114,
214,
124,
224,
115,
215,
125,
225,
]
# checking the output directory
assert wf.output_dir.exists()
# workflows with inner splitters A -> B (inner spl)
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndstinner_1(plugin):
""" workflow with 2 tasks,
the second task has inner splitter
"""
wf = Workflow(name="wf_st_3", input_spec=["x"])
wf.add(list_output(name="list", x=wf.lzin.x))
wf.add(add2(name="add2", x=wf.list.lzout.out).split("x"))
wf.inputs.x = 1
wf.set_output([("out_list", wf.list.lzout.out), ("out", wf.add2.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.add2.state.splitter == "add2.x"
assert wf.add2.state.splitter_rpn == ["add2.x"]
results = wf.result()
assert results.output.out_list == [1, 2, 3]
assert results.output.out == [3, 4, 5]
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndstinner_2(plugin):
""" workflow with 2 tasks,
the second task has two inputs and inner splitter from one of the input
"""
wf = Workflow(name="wf_st_3", input_spec=["x", "y"])
wf.add(list_output(name="list", x=wf.lzin.x))
wf.add(multiply(name="mult", x=wf.list.lzout.out, y=wf.lzin.y).split("x"))
wf.inputs.x = 1
wf.inputs.y = 10
wf.set_output([("out_list", wf.list.lzout.out), ("out", wf.mult.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.mult.state.splitter == "mult.x"
assert wf.mult.state.splitter_rpn == ["mult.x"]
results = wf.result()
assert results.output.out_list == [1, 2, 3]
assert results.output.out == [10, 20, 30]
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndstinner_3(plugin):
""" workflow with 2 tasks,
the second task has two inputs and outer splitter that includes an inner field
"""
wf = Workflow(name="wf_st_3", input_spec=["x", "y"])
wf.add(list_output(name="list", x=wf.lzin.x))
wf.add(multiply(name="mult", x=wf.list.lzout.out, y=wf.lzin.y).split(["x", "y"]))
wf.inputs.x = 1
wf.inputs.y = [10, 100]
wf.set_output([("out_list", wf.list.lzout.out), ("out", wf.mult.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.mult.state.splitter == ["mult.x", "mult.y"]
assert wf.mult.state.splitter_rpn == ["mult.x", "mult.y", "*"]
results = wf.result()
assert results.output.out_list == [1, 2, 3]
assert results.output.out == [10, 100, 20, 200, 30, 300]
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndstinner_4(plugin):
""" workflow with 3 tasks,
the second task has two inputs and inner splitter from one of the input,
the third task has no its own splitter
"""
wf = Workflow(name="wf_st_3", input_spec=["x", "y"])
wf.add(list_output(name="list", x=wf.lzin.x))
wf.add(multiply(name="mult", x=wf.list.lzout.out, y=wf.lzin.y).split("x"))
wf.add(add2(name="add2", x=wf.mult.lzout.out))
wf.inputs.x = 1
wf.inputs.y = 10
wf.set_output([("out_list", wf.list.lzout.out), ("out", wf.add2.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.mult.state.splitter == "mult.x"
assert wf.mult.state.splitter_rpn == ["mult.x"]
assert wf.add2.state.splitter == "_mult"
assert wf.add2.state.splitter_rpn == ["mult.x"]
results = wf.result()
assert results.output.out_list == [1, 2, 3]
assert results.output.out == [12, 22, 32]
assert wf.output_dir.exists()
# workflow that have some single values as the input
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_st_singl_1(plugin):
""" workflow with two tasks, only one input is in the splitter and combiner"""
wf = Workflow(name="wf_st_5", input_spec=["x", "y"])
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y))
wf.add(add2(name="add2", x=wf.mult.lzout.out))
wf.split("x", x=[1, 2], y=11)
wf.combine("x")
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert results[0].output.out == 13
assert results[1].output.out == 24
# checking all directories
assert wf.output_dir
for odir in wf.output_dir:
assert odir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndst_singl_1(plugin):
""" workflow with two tasks, outer splitter and combiner on tasks level;
only one input is part of the splitter, the other is a single value
"""
wf = Workflow(name="wf_ndst_5", input_spec=["x", "y"])
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y).split("x"))
wf.add(add2(name="add2", x=wf.mult.lzout.out).combine("mult.x"))
wf.inputs.x = [1, 2]
wf.inputs.y = 11
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert results.output.out == [13, 24]
# checking the output directory
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_st_singl_2(plugin):
""" workflow with three tasks, third one connected to two previous tasks,
splitter on the workflow level
only one input is part of the splitter, the other is a single value
"""
wf = Workflow(name="wf_st_6", input_spec=["x", "y"])
wf.add(add2(name="add2x", x=wf.lzin.x))
wf.add(add2(name="add2y", x=wf.lzin.y))
wf.add(multiply(name="mult", x=wf.add2x.lzout.out, y=wf.add2y.lzout.out))
wf.split("x", x=[1, 2, 3], y=11)
wf.set_output([("out", wf.mult.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert len(results) == 3
assert results[0].output.out == 39
assert results[1].output.out == 52
assert results[2].output.out == 65
# checking all directories
assert wf.output_dir
for odir in wf.output_dir:
assert odir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndst_singl_2(plugin):
""" workflow with three tasks, third one connected to two previous tasks,
splitter on the tasks levels
only one input is part of the splitter, the other is a single value
"""
wf = Workflow(name="wf_ndst_6", input_spec=["x", "y"])
wf.add(add2(name="add2x", x=wf.lzin.x).split("x"))
wf.add(add2(name="add2y", x=wf.lzin.y))
wf.add(multiply(name="mult", x=wf.add2x.lzout.out, y=wf.add2y.lzout.out))
wf.inputs.x = [1, 2, 3]
wf.inputs.y = 11
wf.set_output([("out", wf.mult.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert len(results.output.out) == 3
assert results.output.out == [39, 52, 65]
# checking the output directory
assert wf.output_dir.exists()
# workflows with structures wf(A)
@pytest.mark.parametrize("plugin", Plugins)
def test_wfasnd_1(plugin):
""" workflow as a node
workflow-node with one task and no splitter
"""
wfnd = Workflow(name="wfnd", input_spec=["x"])
wfnd.add(add2(name="add2", x=wfnd.lzin.x))
wfnd.set_output([("out", wfnd.add2.lzout.out)])
wfnd.inputs.x = 2
wf = Workflow(name="wf", input_spec=["x"])
wf.add(wfnd)
wf.set_output([("out", wf.wfnd.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert results.output.out == 4
# checking the output directory
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wfasnd_wfinp_1(plugin):
""" workflow as a node
workflow-node with one task and no splitter
input set for the main workflow
"""
wf = Workflow(name="wf", input_spec=["x"])
wfnd = Workflow(name="wfnd", input_spec=["x"], x=wf.lzin.x)
wfnd.add(add2(name="add2", x=wfnd.lzin.x))
wfnd.set_output([("out", wfnd.add2.lzout.out)])
wf.add(wfnd)
wf.inputs.x = 2
wf.set_output([("out", wf.wfnd.lzout.out)])
wf.plugin = plugin
checksum_before = wf.checksum
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.checksum == checksum_before
results = wf.result()
assert results.output.out == 4
# checking the output directory
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wfasnd_wfndupdate(plugin):
""" workflow as a node
workflow-node with one task and no splitter
wfasnode input is updated to use the main workflow input
"""
wfnd = Workflow(name="wfnd", input_spec=["x"], x=2)
wfnd.add(add2(name="add2", x=wfnd.lzin.x))
wfnd.set_output([("out", wfnd.add2.lzout.out)])
wf = Workflow(name="wf", input_spec=["x"], x=3)
wfnd.inputs.x = wf.lzin.x
wf.add(wfnd)
wf.set_output([("out", wf.wfnd.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert results.output.out == 5
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wfasnd_wfndupdate_rerun(plugin):
""" workflow as a node
workflow-node with one task and no splitter
wfasnode is run first and later is
updated to use the main workflow input
"""
wfnd = Workflow(name="wfnd", input_spec=["x"], x=2)
wfnd.add(add2(name="add2", x=wfnd.lzin.x))
wfnd.set_output([("out", wfnd.add2.lzout.out)])
with Submitter(plugin=plugin) as sub:
sub(wfnd)
wf = Workflow(name="wf", input_spec=["x"], x=3)
# trying to set before
wfnd.inputs.x = wf.lzin.x
wf.add(wfnd)
# trying to set after add...
wf.wfnd.inputs.x = wf.lzin.x
wf.set_output([("out", wf.wfnd.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert results.output.out == 5
assert wf.output_dir.exists()
# adding another layer of workflow
wf_o = Workflow(name="wf_o", input_spec=["x"], x=4)
wf.inputs.x = wf_o.lzin.x
wf_o.add(wf)
wf_o.set_output([("out", wf_o.wf.lzout.out)])
wf_o.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf_o)
results = wf_o.result()
assert results.output.out == 6
assert wf_o.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wfasnd_st_1(plugin):
""" workflow as a node
workflow-node with one task,
splitter for wfnd
"""
wfnd = Workflow(name="wfnd", input_spec=["x"])
wfnd.add(add2(name="add2", x=wfnd.lzin.x))
wfnd.set_output([("out", wfnd.add2.lzout.out)])
wfnd.split("x")
wfnd.inputs.x = [2, 4]
wf = Workflow(name="wf", input_spec=["x"])
wf.add(wfnd)
wf.set_output([("out", wf.wfnd.lzout.out)])
wf.plugin = plugin
checksum_before = wf.checksum
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.checksum == checksum_before
results = wf.result()
assert results.output.out == [4, 6]
# checking the output directory
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wfasnd_st_updatespl_1(plugin):
""" workflow as a node
workflow-node with one task,
splitter for wfnd is set after add
"""
wfnd = Workflow(name="wfnd", input_spec=["x"])
wfnd.add(add2(name="add2", x=wfnd.lzin.x))
wfnd.set_output([("out", wfnd.add2.lzout.out)])
wfnd.inputs.x = [2, 4]
wf = Workflow(name="wf", input_spec=["x"])
wf.add(wfnd)
wfnd.split("x")
wf.set_output([("out", wf.wfnd.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert results.output.out == [4, 6]
# checking the output directory
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wfasnd_ndst_1(plugin):
""" workflow as a node
workflow-node with one task,
splitter for node
"""
wfnd = Workflow(name="wfnd", input_spec=["x"])
wfnd.add(add2(name="add2", x=wfnd.lzin.x).split("x"))
wfnd.set_output([("out", wfnd.add2.lzout.out)])
# TODO: without this the test is failing
wfnd.plugin = plugin
wfnd.inputs.x = [2, 4]
wf = Workflow(name="wf", input_spec=["x"])
wf.add(wfnd)
wf.set_output([("out", wf.wfnd.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert results.output.out == [4, 6]
# checking the output directory
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wfasnd_ndst_updatespl_1(plugin):
""" workflow as a node
workflow-node with one task,
splitter for node added after add
"""
wfnd = Workflow(name="wfnd", input_spec=["x"])
wfnd.add(add2(name="add2", x=wfnd.lzin.x))
wfnd.set_output([("out", wfnd.add2.lzout.out)])
# TODO: without this the test is failing
wfnd.plugin = plugin
wfnd.inputs.x = [2, 4]
wf = Workflow(name="wf", input_spec=["x"])
wf.add(wfnd)
wfnd.add2.split("x")
wf.set_output([("out", wf.wfnd.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
assert results.output.out == [4, 6]
# checking the output directory
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wfasnd_wfst_1(plugin):
""" workflow as a node
workflow-node with one task,
splitter for the main workflow
"""
wf = Workflow(name="wf", input_spec=["x"])
wfnd = Workflow(name="wfnd", input_spec=["x"], x=wf.lzin.x)
wfnd.add(add2(name="add2", x=wfnd.lzin.x))
wfnd.set_output([("out", wfnd.add2.lzout.out)])
wf.add(wfnd)
wf.split("x")
wf.inputs.x = [2, 4]
wf.set_output([("out", wf.wfnd.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
# assert wf.output_dir.exists()
results = wf.result()
assert results[0].output.out == 4
assert results[1].output.out == 6
# checking all directories
assert wf.output_dir
for odir in wf.output_dir:
assert odir.exists()
# workflows with structures wf(A) -> B
@pytest.mark.parametrize("plugin", Plugins)
def test_wfasnd_st_2(plugin):
""" workflow as a node,
the main workflow has two tasks,
splitter for wfnd
"""
wfnd = Workflow(name="wfnd", input_spec=["x", "y"])
wfnd.add(multiply(name="mult", x=wfnd.lzin.x, y=wfnd.lzin.y))
wfnd.set_output([("out", wfnd.mult.lzout.out)])
wfnd.split(("x", "y"))
wfnd.inputs.x = [2, 4]
wfnd.inputs.y = [1, 10]
wf = Workflow(name="wf_st_3", input_spec=["x", "y"])
wf.add(wfnd)
wf.add(add2(name="add2", x=wf.wfnd.lzout.out))
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
# assert wf.output_dir.exists()
results = wf.result()
assert results.output.out == [4, 42]
# checking the output directory
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wfasnd_wfst_2(plugin):
""" workflow as a node,
the main workflow has two tasks,
splitter for the main workflow
"""
wf = Workflow(name="wf_st_3", input_spec=["x", "y"])
wfnd = Workflow(name="wfnd", input_spec=["x", "y"], x=wf.lzin.x, y=wf.lzin.y)
wfnd.add(multiply(name="mult", x=wfnd.lzin.x, y=wfnd.lzin.y))
wfnd.set_output([("out", wfnd.mult.lzout.out)])
wf.add(wfnd)
wf.add(add2(name="add2", x=wf.wfnd.lzout.out))
wf.split(("x", "y"))
wf.inputs.x = [2, 4]
wf.inputs.y = [1, 10]
wf.set_output([("out", wf.add2.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
# assert wf.output_dir.exists()
results = wf.result()
assert results[0].output.out == 4
assert results[1].output.out == 42
# checking all directories
assert wf.output_dir
for odir in wf.output_dir:
assert odir.exists()
# workflows with structures A -> wf(B)
@pytest.mark.parametrize("plugin", Plugins)
def test_wfasnd_ndst_3(plugin):
""" workflow as the second node,
the main workflow has two tasks,
splitter for the first task
"""
wf = Workflow(name="wf_st_3", input_spec=["x", "y"])
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y).split(("x", "y")))
wf.inputs.x = [2, 4]
wf.inputs.y = [1, 10]
wfnd = Workflow(name="wfnd", input_spec=["x"], x=wf.mult.lzout.out)
wfnd.add(add2(name="add2", x=wfnd.lzin.x))
wfnd.set_output([("out", wfnd.add2.lzout.out)])
wf.add(wfnd)
wf.set_output([("out", wf.wfnd.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
# assert wf.output_dir.exists()
results = wf.result()
assert results.output.out == [4, 42]
# checking the output directory
assert wf.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wfasnd_wfst_3(plugin):
""" workflow as the second node,
the main workflow has two tasks,
splitter for the main workflow
"""
wf = Workflow(name="wf_st_3", input_spec=["x", "y"])
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y))
wf.inputs.x = [2, 4]
wf.inputs.y = [1, 10]
wf.split(("x", "y"))
wfnd = Workflow(name="wfnd", input_spec=["x"], x=wf.mult.lzout.out)
wfnd.add(add2(name="add2", x=wfnd.lzin.x))
wfnd.set_output([("out", wfnd.add2.lzout.out)])
wf.add(wfnd)
wf.set_output([("out", wf.wfnd.lzout.out)])
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
# assert wf.output_dir.exists()
results = wf.result()
assert results[0].output.out == 4
assert results[1].output.out == 42
# checking all directories
assert wf.output_dir
for odir in wf.output_dir:
assert odir.exists()
# Testing caching
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_nostate_cachedir(plugin, tmpdir):
""" wf with provided cache_dir using pytest tmpdir"""
cache_dir = tmpdir.mkdir("test_wf_cache_1")
wf = Workflow(name="wf_2", input_spec=["x", "y"], cache_dir=cache_dir)
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y))
wf.add(add2(name="add2", x=wf.mult.lzout.out))
wf.set_output([("out", wf.add2.lzout.out)])
wf.inputs.x = 2
wf.inputs.y = 3
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.output_dir.exists()
results = wf.result()
assert 8 == results.output.out
shutil.rmtree(cache_dir)
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_nostate_cachedir_relativepath(tmpdir, plugin):
""" wf with provided cache_dir as relative path"""
tmpdir.chdir()
cache_dir = "test_wf_cache_2"
wf = Workflow(name="wf_2", input_spec=["x", "y"], cache_dir=cache_dir)
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y))
wf.add(add2(name="add2", x=wf.mult.lzout.out))
wf.set_output([("out", wf.add2.lzout.out)])
wf.inputs.x = 2
wf.inputs.y = 3
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.output_dir.exists()
results = wf.result()
assert 8 == results.output.out
shutil.rmtree(cache_dir)
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_nostate_cachelocations(plugin, tmpdir):
"""
Two identical wfs with provided cache_dir;
the second wf has cache_locations and should not recompute the results
"""
cache_dir1 = tmpdir.mkdir("test_wf_cache3")
cache_dir2 = tmpdir.mkdir("test_wf_cache4")
wf1 = Workflow(name="wf", input_spec=["x", "y"], cache_dir=cache_dir1)
wf1.add(multiply(name="mult", x=wf1.lzin.x, y=wf1.lzin.y))
wf1.add(add2_wait(name="add2", x=wf1.mult.lzout.out))
wf1.set_output([("out", wf1.add2.lzout.out)])
wf1.inputs.x = 2
wf1.inputs.y = 3
wf1.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf1)
t1 = time.time() - t0
results1 = wf1.result()
assert 8 == results1.output.out
wf2 = Workflow(
name="wf",
input_spec=["x", "y"],
cache_dir=cache_dir2,
cache_locations=cache_dir1,
)
wf2.add(multiply(name="mult", x=wf2.lzin.x, y=wf2.lzin.y))
wf2.add(add2_wait(name="add2", x=wf2.mult.lzout.out))
wf2.set_output([("out", wf2.add2.lzout.out)])
wf2.inputs.x = 2
wf2.inputs.y = 3
wf2.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf2)
t2 = time.time() - t0
results2 = wf2.result()
assert 8 == results2.output.out
# checking execution time
assert t1 > 3
assert t2 < 0.5
# checking if the second wf didn't run again
assert wf1.output_dir.exists()
assert not wf2.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_nostate_cachelocations_forcererun(plugin, tmpdir):
"""
Two identical wfs with provided cache_dir;
the second wf has cache_locations,
but submitter is called with rerun=True, so should recompute
"""
cache_dir1 = tmpdir.mkdir("test_wf_cache3")
cache_dir2 = tmpdir.mkdir("test_wf_cache4")
wf1 = Workflow(name="wf", input_spec=["x", "y"], cache_dir=cache_dir1)
wf1.add(multiply(name="mult", x=wf1.lzin.x, y=wf1.lzin.y))
wf1.add(add2_wait(name="add2", x=wf1.mult.lzout.out))
wf1.set_output([("out", wf1.add2.lzout.out)])
wf1.inputs.x = 2
wf1.inputs.y = 3
wf1.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf1)
t1 = time.time() - t0
results1 = wf1.result()
assert 8 == results1.output.out
wf2 = Workflow(
name="wf",
input_spec=["x", "y"],
cache_dir=cache_dir2,
cache_locations=cache_dir1,
)
wf2.add(multiply(name="mult", x=wf2.lzin.x, y=wf2.lzin.y))
wf2.add(add2_wait(name="add2", x=wf2.mult.lzout.out))
wf2.set_output([("out", wf2.add2.lzout.out)])
wf2.inputs.x = 2
wf2.inputs.y = 3
wf2.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf2, rerun=True)
t2 = time.time() - t0
results2 = wf2.result()
assert 8 == results2.output.out
# checking execution time
assert t1 > 3
assert t2 > 3
# checking if the second wf didn't run again
assert wf1.output_dir.exists()
assert wf2.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_nostate_cachelocations_wftaskrerun_propagateTrue(plugin, tmpdir):
"""
Two identical wfs with provided cache_dir and cache_locations for the second one;
submitter doesn't have rerun, but the second wf has rerun=True,
propagate_rerun is True as default, so everything should be rerun
"""
cache_dir1 = tmpdir.mkdir("test_wf_cache3")
cache_dir2 = tmpdir.mkdir("test_wf_cache4")
wf1 = Workflow(name="wf", input_spec=["x", "y"], cache_dir=cache_dir1)
wf1.add(multiply(name="mult", x=wf1.lzin.x, y=wf1.lzin.y))
wf1.add(add2_wait(name="add2", x=wf1.mult.lzout.out))
wf1.set_output([("out", wf1.add2.lzout.out)])
wf1.inputs.x = 2
wf1.inputs.y = 3
wf1.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf1)
t1 = time.time() - t0
results1 = wf1.result()
assert 8 == results1.output.out
wf2 = Workflow(
name="wf",
input_spec=["x", "y"],
cache_dir=cache_dir2,
cache_locations=cache_dir1,
rerun=True, # wh has to be rerun (default for propagate_rerun is True)
)
wf2.add(multiply(name="mult", x=wf2.lzin.x, y=wf2.lzin.y))
wf2.add(add2_wait(name="add2", x=wf2.mult.lzout.out))
wf2.set_output([("out", wf2.add2.lzout.out)])
wf2.inputs.x = 2
wf2.inputs.y = 3
wf2.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf2)
t2 = time.time() - t0
results2 = wf2.result()
assert 8 == results2.output.out
# checking if the second wf runs again
assert wf1.output_dir.exists()
assert wf2.output_dir.exists()
# everything has to be recomputed
assert len(list(Path(cache_dir1).glob("F*"))) == 2
assert len(list(Path(cache_dir2).glob("F*"))) == 2
assert t1 > 3
assert t2 > 3
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_nostate_cachelocations_wftaskrerun_propagateFalse(plugin, tmpdir):
"""
Two identical wfs with provided cache_dir and cache_locations for the second one;
submitter doesn't have rerun, but the second wf has rerun=True,
propagate_rerun is set to False, so wf will be triggered,
but tasks will not have rerun, so will use the previous results
"""
cache_dir1 = tmpdir.mkdir("test_wf_cache3")
cache_dir2 = tmpdir.mkdir("test_wf_cache4")
wf1 = Workflow(name="wf", input_spec=["x", "y"], cache_dir=cache_dir1)
wf1.add(multiply(name="mult", x=wf1.lzin.x, y=wf1.lzin.y))
wf1.add(add2_wait(name="add2", x=wf1.mult.lzout.out))
wf1.set_output([("out", wf1.add2.lzout.out)])
wf1.inputs.x = 2
wf1.inputs.y = 3
wf1.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf1)
t1 = time.time() - t0
results1 = wf1.result()
assert 8 == results1.output.out
wf2 = Workflow(
name="wf",
input_spec=["x", "y"],
cache_dir=cache_dir2,
cache_locations=cache_dir1,
rerun=True, # wh has to be rerun
propagate_rerun=False, # but rerun doesn't propagate to the tasks
)
wf2.add(multiply(name="mult", x=wf2.lzin.x, y=wf2.lzin.y))
wf2.add(add2_wait(name="add2", x=wf2.mult.lzout.out))
wf2.set_output([("out", wf2.add2.lzout.out)])
wf2.inputs.x = 2
wf2.inputs.y = 3
wf2.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf2)
t2 = time.time() - t0
results2 = wf2.result()
assert 8 == results2.output.out
# checking if the second wf runs again
assert wf1.output_dir.exists()
assert wf2.output_dir.exists()
# tasks should not be recomputed
assert len(list(Path(cache_dir1).glob("F*"))) == 2
assert len(list(Path(cache_dir2).glob("F*"))) == 0
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_nostate_cachelocations_taskrerun_wfrerun_propagateFalse(plugin, tmpdir):
"""
Two identical wfs with provided cache_dir, and cache_locations for teh second wf;
submitter doesn't have rerun, but wf has rerun=True,
since propagate_rerun=False, only tasks that have rerun=True will be rerun
"""
cache_dir1 = tmpdir.mkdir("test_wf_cache3")
cache_dir2 = tmpdir.mkdir("test_wf_cache4")
wf1 = Workflow(name="wf", input_spec=["x", "y"], cache_dir=cache_dir1)
wf1.add(multiply(name="mult", x=wf1.lzin.x, y=wf1.lzin.y))
wf1.add(add2_wait(name="add2", x=wf1.mult.lzout.out))
wf1.set_output([("out", wf1.add2.lzout.out)])
wf1.inputs.x = 2
wf1.inputs.y = 3
wf1.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf1)
t1 = time.time() - t0
results1 = wf1.result()
assert 8 == results1.output.out
wf2 = Workflow(
name="wf",
input_spec=["x", "y"],
cache_dir=cache_dir2,
cache_locations=cache_dir1,
rerun=True,
propagate_rerun=False, # rerun will not be propagated to each task
)
wf2.add(multiply(name="mult", x=wf2.lzin.x, y=wf2.lzin.y))
# rerun on the task level needed (wf.propagate_rerun is False)
wf2.add(add2_wait(name="add2", x=wf2.mult.lzout.out, rerun=True))
wf2.set_output([("out", wf2.add2.lzout.out)])
wf2.inputs.x = 2
wf2.inputs.y = 3
wf2.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf2)
t2 = time.time() - t0
results2 = wf2.result()
assert 8 == results2.output.out
# checking if the second wf doesn't runs again
assert wf1.output_dir.exists()
assert wf2.output_dir.exists()
# the second task should be recomputed
assert len(list(Path(cache_dir1).glob("F*"))) == 2
assert len(list(Path(cache_dir2).glob("F*"))) == 1
assert t1 > 3
assert t2 > 3
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_nostate_nodecachelocations(plugin, tmpdir):
"""
Two wfs with different input, but the second node has the same input;
the second wf has cache_locations and should recompute the wf,
but without recomputing the second node
"""
cache_dir1 = tmpdir.mkdir("test_wf_cache3")
cache_dir2 = tmpdir.mkdir("test_wf_cache4")
wf1 = Workflow(name="wf", input_spec=["x"], cache_dir=cache_dir1)
wf1.add(ten(name="ten", x=wf1.lzin.x))
wf1.add(add2_wait(name="add2", x=wf1.ten.lzout.out))
wf1.set_output([("out", wf1.add2.lzout.out)])
wf1.inputs.x = 3
wf1.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf1)
t1 = time.time() - t0
results1 = wf1.result()
assert 12 == results1.output.out
wf2 = Workflow(
name="wf",
input_spec=["x", "y"],
cache_dir=cache_dir2,
cache_locations=cache_dir1,
)
wf2.add(ten(name="ten", x=wf2.lzin.x))
wf2.add(add2_wait(name="add2", x=wf2.ten.lzout.out))
wf2.set_output([("out", wf2.add2.lzout.out)])
wf2.inputs.x = 2
wf2.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf2)
t2 = time.time() - t0
results2 = wf2.result()
assert 12 == results2.output.out
# checking if the second wf runs again, but runs only one task
assert wf1.output_dir.exists()
assert wf2.output_dir.exists()
# the second wf should rerun one task
assert len(list(Path(cache_dir1).glob("F*"))) == 2
assert len(list(Path(cache_dir2).glob("F*"))) == 1
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_nostate_nodecachelocations_upd(plugin, tmpdir):
"""
Two wfs with different input, but the second node has the same input;
the second wf has cache_locations (set after adding tasks) and should recompute,
but without recomputing the second node
"""
cache_dir1 = tmpdir.mkdir("test_wf_cache3")
cache_dir2 = tmpdir.mkdir("test_wf_cache4")
wf1 = Workflow(name="wf", input_spec=["x"], cache_dir=cache_dir1)
wf1.add(ten(name="ten", x=wf1.lzin.x))
wf1.add(add2_wait(name="add2", x=wf1.ten.lzout.out))
wf1.set_output([("out", wf1.add2.lzout.out)])
wf1.inputs.x = 3
wf1.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf1)
t1 = time.time() - t0
results1 = wf1.result()
assert 12 == results1.output.out
wf2 = Workflow(name="wf", input_spec=["x", "y"], cache_dir=cache_dir2)
wf2.add(ten(name="ten", x=wf2.lzin.x))
wf2.add(add2_wait(name="add2", x=wf2.ten.lzout.out))
wf2.set_output([("out", wf2.add2.lzout.out)])
wf2.inputs.x = 2
wf2.plugin = plugin
# updating cache_locations after adding the tasks
wf2.cache_locations = cache_dir1
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf2)
t2 = time.time() - t0
results2 = wf2.result()
assert 12 == results2.output.out
# checking if the second wf runs again, but runs only one task
assert wf1.output_dir.exists()
assert wf2.output_dir.exists()
# the second wf should have only one task run
assert len(list(Path(cache_dir1).glob("F*"))) == 2
assert len(list(Path(cache_dir2).glob("F*"))) == 1
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_state_cachelocations(plugin, tmpdir):
"""
Two identical wfs (with states) with provided cache_dir;
the second wf has cache_locations and should not recompute the results
"""
cache_dir1 = tmpdir.mkdir("test_wf_cache3")
cache_dir2 = tmpdir.mkdir("test_wf_cache4")
wf1 = Workflow(name="wf", input_spec=["x", "y"], cache_dir=cache_dir1)
wf1.add(multiply(name="mult", x=wf1.lzin.x, y=wf1.lzin.y))
wf1.add(add2_wait(name="add2", x=wf1.mult.lzout.out))
wf1.set_output([("out", wf1.add2.lzout.out)])
wf1.inputs.x = [2, 20]
wf1.inputs.y = [3, 4]
wf1.split(splitter=("x", "y"))
wf1.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf1)
t1 = time.time() - t0
results1 = wf1.result()
assert results1[0].output.out == 8
assert results1[1].output.out == 82
wf2 = Workflow(
name="wf",
input_spec=["x", "y"],
cache_dir=cache_dir2,
cache_locations=cache_dir1,
)
wf2.add(multiply(name="mult", x=wf2.lzin.x, y=wf2.lzin.y))
wf2.add(add2_wait(name="add2", x=wf2.mult.lzout.out))
wf2.set_output([("out", wf2.add2.lzout.out)])
wf2.inputs.x = [2, 20]
wf2.inputs.y = [3, 4]
wf2.split(splitter=("x", "y"))
wf2.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf2)
t2 = time.time() - t0
results2 = wf2.result()
assert results2[0].output.out == 8
assert results2[1].output.out == 82
# checking execution time
assert t1 > 3
assert t2 < 0.5
# checking all directories
assert wf1.output_dir
for odir in wf1.output_dir:
assert odir.exists()
# checking if the second wf didn't run again
# checking all directories
assert wf2.output_dir
for odir in wf2.output_dir:
assert not odir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_state_cachelocations_forcererun(plugin, tmpdir):
"""
Two identical wfs (with states) with provided cache_dir;
the second wf has cache_locations,
but submitter is called with rerun=True, so should recompute
"""
cache_dir1 = tmpdir.mkdir("test_wf_cache3")
cache_dir2 = tmpdir.mkdir("test_wf_cache4")
wf1 = Workflow(name="wf", input_spec=["x", "y"], cache_dir=cache_dir1)
wf1.add(multiply(name="mult", x=wf1.lzin.x, y=wf1.lzin.y))
wf1.add(add2_wait(name="add2", x=wf1.mult.lzout.out))
wf1.set_output([("out", wf1.add2.lzout.out)])
wf1.inputs.x = [2, 20]
wf1.inputs.y = [3, 4]
wf1.split(splitter=("x", "y"))
wf1.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf1)
t1 = time.time() - t0
results1 = wf1.result()
assert results1[0].output.out == 8
assert results1[1].output.out == 82
wf2 = Workflow(
name="wf",
input_spec=["x", "y"],
cache_dir=cache_dir2,
cache_locations=cache_dir1,
)
wf2.add(multiply(name="mult", x=wf2.lzin.x, y=wf2.lzin.y))
wf2.add(add2_wait(name="add2", x=wf2.mult.lzout.out))
wf2.set_output([("out", wf2.add2.lzout.out)])
wf2.inputs.x = [2, 20]
wf2.inputs.y = [3, 4]
wf2.split(splitter=("x", "y"))
wf2.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf2, rerun=True)
t2 = time.time() - t0
results2 = wf2.result()
assert results2[0].output.out == 8
assert results2[1].output.out == 82
# checking execution time
assert t1 > 3
assert t2 > 3
# checking all directories
assert wf1.output_dir
for odir in wf1.output_dir:
assert odir.exists()
# checking if the second wf run again
# checking all directories
assert wf2.output_dir
for odir in wf2.output_dir:
assert odir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_state_cachelocations_updateinp(plugin, tmpdir):
"""
Two identical wfs (with states) with provided cache_dir;
the second wf has cache_locations and should not recompute the results
(the lazy input of the node is updated to the correct one,
i.e. the same as in wf1, after adding the node to the wf)
"""
cache_dir1 = tmpdir.mkdir("test_wf_cache3")
cache_dir2 = tmpdir.mkdir("test_wf_cache4")
wf1 = Workflow(name="wf", input_spec=["x", "y"], cache_dir=cache_dir1)
wf1.add(multiply(name="mult", x=wf1.lzin.x, y=wf1.lzin.y))
wf1.add(add2_wait(name="add2", x=wf1.mult.lzout.out))
wf1.set_output([("out", wf1.add2.lzout.out)])
wf1.inputs.x = [2, 20]
wf1.inputs.y = [3, 4]
wf1.split(splitter=("x", "y"))
wf1.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf1)
t1 = time.time() - t0
results1 = wf1.result()
assert results1[0].output.out == 8
assert results1[1].output.out == 82
wf2 = Workflow(
name="wf",
input_spec=["x", "y"],
cache_dir=cache_dir2,
cache_locations=cache_dir1,
)
wf2.add(multiply(name="mult", x=wf2.lzin.x, y=wf2.lzin.x))
wf2.add(add2_wait(name="add2", x=wf2.mult.lzout.out))
wf2.set_output([("out", wf2.add2.lzout.out)])
wf2.inputs.x = [2, 20]
wf2.inputs.y = [3, 4]
wf2.split(splitter=("x", "y"))
wf2.plugin = plugin
wf2.mult.inputs.y = wf2.lzin.y
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf2)
t2 = time.time() - t0
results2 = wf2.result()
assert results2[0].output.out == 8
assert results2[1].output.out == 82
# checking execution time
assert t1 > 3
assert t2 < 0.5
# checking all directories
assert wf1.output_dir
for odir in wf1.output_dir:
assert odir.exists()
# checking if the second wf didn't run again
# checking all directories
assert wf2.output_dir
for odir in wf2.output_dir:
assert not odir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_state_n_nostate_cachelocations(plugin, tmpdir):
"""
Two wfs with provided cache_dir, the first one has no state, the second has;
the second wf has cache_locations and should not recompute only one element
"""
cache_dir1 = tmpdir.mkdir("test_wf_cache3")
cache_dir2 = tmpdir.mkdir("test_wf_cache4")
wf1 = Workflow(name="wf", input_spec=["x", "y"], cache_dir=cache_dir1)
wf1.add(multiply(name="mult", x=wf1.lzin.x, y=wf1.lzin.y))
wf1.add(add2_wait(name="add2", x=wf1.mult.lzout.out))
wf1.set_output([("out", wf1.add2.lzout.out)])
wf1.inputs.x = 2
wf1.inputs.y = 3
wf1.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf1)
results1 = wf1.result()
assert results1.output.out == 8
wf2 = Workflow(
name="wf",
input_spec=["x", "y"],
cache_dir=cache_dir2,
cache_locations=cache_dir1,
)
wf2.add(multiply(name="mult", x=wf2.lzin.x, y=wf2.lzin.y))
wf2.add(add2_wait(name="add2", x=wf2.mult.lzout.out))
wf2.set_output([("out", wf2.add2.lzout.out)])
wf2.inputs.x = [2, 20]
wf2.inputs.y = [3, 4]
wf2.split(splitter=("x", "y"))
wf2.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf2)
results2 = wf2.result()
assert results2[0].output.out == 8
assert results2[1].output.out == 82
# checking the directory from the first wf
assert wf1.output_dir.exists()
# checking directories from the second wf, only second element should be recomputed
assert not wf2.output_dir[0].exists()
assert wf2.output_dir[1].exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_nostate_cachelocations_updated(plugin, tmpdir):
"""
Two identical wfs with provided cache_dir;
the second wf has cache_locations in init,
that is later overwritten in Submitter.__call__;
the cache_locations from call doesn't exist so the second task should run again
"""
cache_dir1 = tmpdir.mkdir("test_wf_cache3")
cache_dir1_empty = tmpdir.mkdir("test_wf_cache3_empty")
cache_dir2 = tmpdir.mkdir("test_wf_cache4")
wf1 = Workflow(name="wf", input_spec=["x", "y"], cache_dir=cache_dir1)
wf1.add(multiply(name="mult", x=wf1.lzin.x, y=wf1.lzin.y))
wf1.add(add2_wait(name="add2", x=wf1.mult.lzout.out))
wf1.set_output([("out", wf1.add2.lzout.out)])
wf1.inputs.x = 2
wf1.inputs.y = 3
wf1.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf1)
t1 = time.time() - t0
results1 = wf1.result()
assert 8 == results1.output.out
wf2 = Workflow(
name="wf",
input_spec=["x", "y"],
cache_dir=cache_dir2,
cache_locations=cache_dir1,
)
wf2.add(multiply(name="mult", x=wf2.lzin.x, y=wf2.lzin.y))
wf2.add(add2_wait(name="add2", x=wf2.mult.lzout.out))
wf2.set_output([("out", wf2.add2.lzout.out)])
wf2.inputs.x = 2
wf2.inputs.y = 3
wf2.plugin = plugin
t0 = time.time()
# changing cache_locations to non-existing dir
with Submitter(plugin=plugin) as sub:
sub(wf2, cache_locations=cache_dir1_empty)
t2 = time.time() - t0
results2 = wf2.result()
assert 8 == results2.output.out
# checking execution time
assert t1 > 3
assert t2 > 3
# checking if both wf run
assert wf1.output_dir.exists()
assert wf2.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_nostate_cachelocations_recompute(plugin, tmpdir):
"""
Two wfs with the same inputs but slightly different graph;
the second wf should recompute the results,
but the second node should use the results from the first wf (has the same input)
"""
cache_dir1 = tmpdir.mkdir("test_wf_cache3")
cache_dir2 = tmpdir.mkdir("test_wf_cache4")
wf1 = Workflow(name="wf", input_spec=["x", "y"], cache_dir=cache_dir1)
wf1.add(multiply(name="mult", x=wf1.lzin.x, y=wf1.lzin.y))
wf1.add(add2_wait(name="add2", x=wf1.mult.lzout.out))
wf1.set_output([("out", wf1.add2.lzout.out)])
wf1.inputs.x = 2
wf1.inputs.y = 3
wf1.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf1)
t1 = time.time() - t0
results1 = wf1.result()
assert 8 == results1.output.out
wf2 = Workflow(
name="wf",
input_spec=["x", "y"],
cache_dir=cache_dir2,
cache_locations=cache_dir1,
)
# different argument assigment
wf2.add(multiply(name="mult", x=wf2.lzin.y, y=wf2.lzin.x))
wf2.add(add2_wait(name="add2", x=wf2.mult.lzout.out))
wf2.set_output([("out", wf2.add2.lzout.out)])
wf2.inputs.x = 2
wf2.inputs.y = 3
wf2.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf2)
t2 = time.time() - t0
results2 = wf2.result()
assert 8 == results2.output.out
# checking if both dir exists
assert wf1.output_dir.exists()
assert wf2.output_dir.exists()
# the second wf should have only one task run
assert len(list(Path(cache_dir1).glob("F*"))) == 2
assert len(list(Path(cache_dir2).glob("F*"))) == 1
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndstate_cachelocations(plugin, tmpdir):
"""
Two wfs with identical inputs and node states;
the second wf has cache_locations and should not recompute the results
"""
cache_dir1 = tmpdir.mkdir("test_wf_cache3")
cache_dir2 = tmpdir.mkdir("test_wf_cache4")
wf1 = Workflow(name="wf", input_spec=["x", "y"], cache_dir=cache_dir1)
wf1.add(
multiply(name="mult", x=wf1.lzin.x, y=wf1.lzin.y).split(splitter=("x", "y"))
)
wf1.add(add2_wait(name="add2", x=wf1.mult.lzout.out))
wf1.set_output([("out", wf1.add2.lzout.out)])
wf1.inputs.x = [2, 20]
wf1.inputs.y = [3, 4]
wf1.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf1)
t1 = time.time() - t0
results1 = wf1.result()
assert results1.output.out == [8, 82]
wf2 = Workflow(
name="wf",
input_spec=["x", "y"],
cache_dir=cache_dir2,
cache_locations=cache_dir1,
)
wf2.add(
multiply(name="mult", x=wf2.lzin.x, y=wf2.lzin.y).split(splitter=("x", "y"))
)
wf2.add(add2_wait(name="add2", x=wf2.mult.lzout.out))
wf2.set_output([("out", wf2.add2.lzout.out)])
wf2.inputs.x = [2, 20]
wf2.inputs.y = [3, 4]
wf2.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf2)
t2 = time.time() - t0
results2 = wf2.result()
assert results2.output.out == [8, 82]
# checking execution time
assert t1 > 3
assert t2 < 0.5
# checking all directories
assert wf1.output_dir.exists()
# checking if the second wf didn't run again
# checking all directories
assert not wf2.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndstate_cachelocations_forcererun(plugin, tmpdir):
"""
Two wfs with identical inputs and node states;
the second wf has cache_locations,
but submitter is called with rerun=True, so should recompute
"""
cache_dir1 = tmpdir.mkdir("test_wf_cache3")
cache_dir2 = tmpdir.mkdir("test_wf_cache4")
wf1 = Workflow(name="wf", input_spec=["x", "y"], cache_dir=cache_dir1)
wf1.add(
multiply(name="mult", x=wf1.lzin.x, y=wf1.lzin.y).split(splitter=("x", "y"))
)
wf1.add(add2_wait(name="add2", x=wf1.mult.lzout.out))
wf1.set_output([("out", wf1.add2.lzout.out)])
wf1.inputs.x = [2, 20]
wf1.inputs.y = [3, 4]
wf1.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf1)
t1 = time.time() - t0
results1 = wf1.result()
assert results1.output.out == [8, 82]
wf2 = Workflow(
name="wf",
input_spec=["x", "y"],
cache_dir=cache_dir2,
cache_locations=cache_dir1,
)
wf2.add(
multiply(name="mult", x=wf2.lzin.x, y=wf2.lzin.y).split(splitter=("x", "y"))
)
wf2.add(add2_wait(name="add2", x=wf2.mult.lzout.out))
wf2.set_output([("out", wf2.add2.lzout.out)])
wf2.inputs.x = [2, 20]
wf2.inputs.y = [3, 4]
wf2.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf2, rerun=True)
t2 = time.time() - t0
results2 = wf2.result()
assert results2.output.out == [8, 82]
# checking execution time
assert t1 > 3
assert t2 > 3
# checking all directories
assert wf1.output_dir.exists()
# checking if the second wf run again
assert wf2.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndstate_cachelocations_updatespl(plugin, tmpdir):
"""
Two wfs with identical inputs and node state (that is set after adding the node!);
the second wf has cache_locations and should not recompute the results
"""
cache_dir1 = tmpdir.mkdir("test_wf_cache3")
cache_dir2 = tmpdir.mkdir("test_wf_cache4")
wf1 = Workflow(name="wf", input_spec=["x", "y"], cache_dir=cache_dir1)
wf1.add(
multiply(name="mult", x=wf1.lzin.x, y=wf1.lzin.y).split(splitter=("x", "y"))
)
wf1.add(add2_wait(name="add2", x=wf1.mult.lzout.out))
wf1.set_output([("out", wf1.add2.lzout.out)])
wf1.inputs.x = [2, 20]
wf1.inputs.y = [3, 4]
wf1.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf1)
t1 = time.time() - t0
results1 = wf1.result()
assert results1.output.out == [8, 82]
wf2 = Workflow(
name="wf",
input_spec=["x", "y"],
cache_dir=cache_dir2,
cache_locations=cache_dir1,
)
wf2.add(multiply(name="mult", x=wf2.lzin.x, y=wf2.lzin.y))
wf2.add(add2_wait(name="add2", x=wf2.mult.lzout.out))
wf2.mult.split(splitter=("x", "y"))
wf2.set_output([("out", wf2.add2.lzout.out)])
wf2.inputs.x = [2, 20]
wf2.inputs.y = [3, 4]
wf2.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf2)
t2 = time.time() - t0
results2 = wf2.result()
assert results2.output.out == [8, 82]
# checking execution time
assert t1 > 3
assert t2 < 0.5
# checking all directories
assert wf1.output_dir.exists()
# checking if the second wf didn't run again
# checking all directories
assert not wf2.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_ndstate_cachelocations_recompute(plugin, tmpdir):
"""
Two wfs (with nodes with states) with provided cache_dir;
the second wf has cache_locations and should not recompute the results
"""
cache_dir1 = tmpdir.mkdir("test_wf_cache3")
cache_dir2 = tmpdir.mkdir("test_wf_cache4")
wf1 = Workflow(name="wf", input_spec=["x", "y"], cache_dir=cache_dir1)
wf1.add(
multiply(name="mult", x=wf1.lzin.x, y=wf1.lzin.y).split(splitter=("x", "y"))
)
wf1.add(add2_wait(name="add2", x=wf1.mult.lzout.out))
wf1.set_output([("out", wf1.add2.lzout.out)])
wf1.inputs.x = [2, 20]
wf1.inputs.y = [3, 4]
wf1.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf1)
t1 = time.time() - t0
results1 = wf1.result()
assert results1.output.out == [8, 82]
wf2 = Workflow(
name="wf",
input_spec=["x", "y"],
cache_dir=cache_dir2,
cache_locations=cache_dir1,
)
wf2.add(
multiply(name="mult", x=wf2.lzin.x, y=wf2.lzin.y).split(splitter=["x", "y"])
)
wf2.add(add2_wait(name="add2", x=wf2.mult.lzout.out))
wf2.set_output([("out", wf2.add2.lzout.out)])
wf2.inputs.x = [2, 20]
wf2.inputs.y = [3, 4]
wf2.plugin = plugin
t0 = time.time()
with Submitter(plugin=plugin) as sub:
sub(wf2)
t2 = time.time() - t0
results2 = wf2.result()
assert results2.output.out == [8, 10, 62, 82]
# checking execution time
assert t1 > 3
assert t2 > 3
# checking all directories
assert wf1.output_dir.exists()
# checking if the second wf didn't run again
# checking all directories
assert wf2.output_dir.exists()
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_nostate_runtwice_usecache(plugin, tmpdir):
"""
running worflow (without state) twice,
the second run should use the results from the first one
"""
cache_dir1 = tmpdir.mkdir("test_wf_cache3")
wf1 = Workflow(name="wf", input_spec=["x", "y"], cache_dir=cache_dir1)
wf1.add(multiply(name="mult", x=wf1.lzin.x, y=wf1.lzin.y))
wf1.add(add2_wait(name="add2", x=wf1.mult.lzout.out))
wf1.set_output([("out", wf1.add2.lzout.out)])
wf1.inputs.x = 2
wf1.inputs.y = 3
wf1.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf1)
results1 = wf1.result()
assert 8 == results1.output.out
# checkoing output_dir after the first run
assert wf1.output_dir.exists()
# saving the content of the cache dit after the first run
cache_dir_content = os.listdir(wf1.cache_dir)
# running workflow the second time
with Submitter(plugin=plugin) as sub:
sub(wf1)
results1 = wf1.result()
assert 8 == results1.output.out
# checking if no new directory is not created
assert cache_dir_content == os.listdir(wf1.cache_dir)
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_state_runtwice_usecache(plugin, tmpdir):
"""
running worflow with a state twice,
the second run should use the results from the first one
"""
cache_dir1 = tmpdir.mkdir("test_wf_cache3")
wf1 = Workflow(name="wf", input_spec=["x", "y"], cache_dir=cache_dir1)
wf1.add(multiply(name="mult", x=wf1.lzin.x, y=wf1.lzin.y))
wf1.add(add2_wait(name="add2", x=wf1.mult.lzout.out))
wf1.set_output([("out", wf1.add2.lzout.out)])
wf1.split(splitter=("x", "y"))
wf1.inputs.x = [2, 20]
wf1.inputs.y = [3, 30]
wf1.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf1)
results1 = wf1.result()
assert 8 == results1[0].output.out
assert 602 == results1[1].output.out
# checkoing output_dir after the first run
assert [odir.exists() for odir in wf1.output_dir]
# saving the content of the cache dit after the first run
cache_dir_content = os.listdir(wf1.cache_dir)
# running workflow the second time
with Submitter(plugin=plugin) as sub:
sub(wf1)
results1 = wf1.result()
assert 8 == results1[0].output.out
assert 602 == results1[1].output.out
# checking if no new directory is not created
assert cache_dir_content == os.listdir(wf1.cache_dir)
@pytest.fixture
def create_tasks():
wf = Workflow(name="wf", input_spec=["x"])
wf.inputs.x = 1
wf.add(add2(name="t1", x=wf.lzin.x))
wf.add(multiply(name="t2", x=wf.t1.lzout.out, y=2))
wf.set_output([("out", wf.t2.lzout.out)])
t1 = wf.name2obj["t1"]
t2 = wf.name2obj["t2"]
return wf, t1, t2
def test_cache_propagation1(tmpdir, create_tasks):
"""No cache set, all independent"""
wf, t1, t2 = create_tasks
wf(plugin="cf")
assert wf.cache_dir == t1.cache_dir == t2.cache_dir
wf.cache_dir = (tmpdir / "shared").strpath
wf(plugin="cf")
assert wf.cache_dir == t1.cache_dir == t2.cache_dir
def test_cache_propagation2(tmpdir, create_tasks):
"""Task explicitly states no inheriting"""
wf, t1, t2 = create_tasks
wf.cache_dir = (tmpdir / "shared").strpath
t2.allow_cache_override = False
wf(plugin="cf")
assert wf.cache_dir == t1.cache_dir != t2.cache_dir
def test_cache_propagation3(tmpdir, create_tasks):
"""Shared cache_dir with state"""
wf, t1, t2 = create_tasks
wf.inputs.x = [1, 2]
wf.split("x")
wf.cache_dir = (tmpdir / "shared").strpath
wf(plugin="cf")
assert wf.cache_dir == t1.cache_dir == t2.cache_dir
def test_workflow_combine1(tmpdir):
wf1 = Workflow(name="wf1", input_spec=["a", "b"], a=[1, 2], b=[2, 3])
wf1.add(power(name="power", a=wf1.lzin.a, b=wf1.lzin.b).split(["a", "b"]))
wf1.add(identity(name="identity1", x=wf1.power.lzout.out).combine("power.a"))
wf1.add(identity(name="identity2", x=wf1.identity1.lzout.out).combine("power.b"))
wf1.set_output(
{
"out_pow": wf1.power.lzout.out,
"out_iden1": wf1.identity1.lzout.out,
"out_iden2": wf1.identity2.lzout.out,
}
)
wf1.cache_dir = tmpdir
result = wf1(plugin="cf")
assert result.output.out_pow == [1, 1, 4, 8]
assert result.output.out_iden1 == [[1, 4], [1, 8]]
assert result.output.out_iden2 == [[1, 4], [1, 8]]
def test_workflow_combine2(tmpdir):
wf1 = Workflow(name="wf1", input_spec=["a", "b"], a=[1, 2], b=[2, 3])
wf1.add(
power(name="power", a=wf1.lzin.a, b=wf1.lzin.b).split(["a", "b"]).combine("a")
)
wf1.add(identity(name="identity", x=wf1.power.lzout.out).combine("power.b"))
wf1.set_output({"out_pow": wf1.power.lzout.out, "out_iden": wf1.identity.lzout.out})
wf1.cache_dir = tmpdir
result = wf1(plugin="cf")
assert result.output.out_pow == [[1, 4], [1, 8]]
assert result.output.out_iden == [[1, 4], [1, 8]]
# testing lzout.all to collect all of the results and let FunctionTask deal with it
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_lzoutall_1(plugin):
""" workflow with 2 tasks, no splitter
passing entire result object to add2_sub2_res function
by using lzout.all syntax
"""
wf = Workflow(name="wf_2", input_spec=["x", "y"])
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y))
wf.add(add2_sub2_res(name="add_sub", res=wf.mult.lzout.all_))
wf.set_output([("out", wf.add_sub.lzout.out_add)])
wf.inputs.x = 2
wf.inputs.y = 3
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.output_dir.exists()
results = wf.result()
assert 8 == results.output.out
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_lzoutall_1a(plugin):
""" workflow with 2 tasks, no splitter
passing entire result object to add2_res function
by using lzout.all syntax in the node connections and for wf output
"""
wf = Workflow(name="wf_2", input_spec=["x", "y"])
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y))
wf.add(add2_sub2_res(name="add_sub", res=wf.mult.lzout.all_))
wf.set_output([("out_all", wf.add_sub.lzout.all_)])
wf.inputs.x = 2
wf.inputs.y = 3
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.output_dir.exists()
results = wf.result()
assert results.output.out_all == {"out_add": 8, "out_sub": 4}
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_lzoutall_st_1(plugin):
""" workflow with 2 tasks, no splitter
passing entire result object to add2_res function
by using lzout.all syntax
"""
wf = Workflow(name="wf_2", input_spec=["x", "y"])
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y).split(["x", "y"]))
wf.add(add2_sub2_res(name="add_sub", res=wf.mult.lzout.all_))
wf.set_output([("out_add", wf.add_sub.lzout.out_add)])
wf.inputs.x = [2, 20]
wf.inputs.y = [3, 30]
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.output_dir.exists()
results = wf.result()
assert results.output.out_add == [8, 62, 62, 602]
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_lzoutall_st_1a(plugin):
""" workflow with 2 tasks, no splitter
passing entire result object to add2_res function
by using lzout.all syntax
"""
wf = Workflow(name="wf_2", input_spec=["x", "y"])
wf.add(multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y).split(["x", "y"]))
wf.add(add2_sub2_res(name="add_sub", res=wf.mult.lzout.all_))
wf.set_output([("out_all", wf.add_sub.lzout.all_)])
wf.inputs.x = [2, 20]
wf.inputs.y = [3, 30]
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.output_dir.exists()
results = wf.result()
assert results.output.out_all == [
{"out_add": 8, "out_sub": 4},
{"out_add": 62, "out_sub": 58},
{"out_add": 62, "out_sub": 58},
{"out_add": 602, "out_sub": 598},
]
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_lzoutall_st_2(plugin):
""" workflow with 2 tasks, no splitter
passing entire result object to add2_res function
by using lzout.all syntax
"""
wf = Workflow(name="wf_2", input_spec=["x", "y"])
wf.add(
multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y).split(["x", "y"]).combine("x")
)
wf.add(add2_sub2_res(name="add_sub", res=wf.mult.lzout.all_))
wf.set_output([("out_add", wf.add_sub.lzout.out_add)])
wf.inputs.x = [2, 20]
wf.inputs.y = [3, 30]
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.output_dir.exists()
results = wf.result()
assert results.output.out_add[0] == [8, 62]
assert results.output.out_add[1] == [62, 602]
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_lzoutall_st_2a(plugin):
""" workflow with 2 tasks, no splitter
passing entire result object to add2_res function
by using lzout.all syntax
"""
wf = Workflow(name="wf_2", input_spec=["x", "y"])
wf.add(
multiply(name="mult", x=wf.lzin.x, y=wf.lzin.y).split(["x", "y"]).combine("x")
)
wf.add(add2_sub2_res(name="add_sub", res=wf.mult.lzout.all_))
wf.set_output([("out_all", wf.add_sub.lzout.all_)])
wf.inputs.x = [2, 20]
wf.inputs.y = [3, 30]
wf.plugin = plugin
with Submitter(plugin=plugin) as sub:
sub(wf)
assert wf.output_dir.exists()
results = wf.result()
assert results.output.out_all == [
{"out_add": [8, 62], "out_sub": [4, 58]},
{"out_add": [62, 602], "out_sub": [58, 598]},
]
# worfklows that have files in the result, the files should be copied to the wf dir
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_resultfile_1(plugin):
""" workflow with a file in the result, file should be copied to the wf dir"""
wf = Workflow(name="wf_file_1", input_spec=["x"])
wf.add(fun_write_file(name="writefile", filename=wf.lzin.x))
wf.inputs.x = "file_1.txt"
wf.plugin = plugin
wf.set_output([("wf_out", wf.writefile.lzout.out)])
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
# checking if the file exists and if it is in the Workflow directory
assert results.output.wf_out.exists()
assert results.output.wf_out == wf.output_dir / "file_1.txt"
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_resultfile_2(plugin):
""" workflow with a list of files in the wf result,
all files should be copied to the wf dir
"""
wf = Workflow(name="wf_file_1", input_spec=["x"])
wf.add(fun_write_file_list(name="writefile", filename_list=wf.lzin.x))
file_list = ["file_1.txt", "file_2.txt", "file_3.txt"]
wf.inputs.x = file_list
wf.plugin = plugin
wf.set_output([("wf_out", wf.writefile.lzout.out)])
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
# checking if the file exists and if it is in the Workflow directory
for ii, file in enumerate(results.output.wf_out):
assert file.exists()
assert file == wf.output_dir / file_list[ii]
@pytest.mark.parametrize("plugin", Plugins)
def test_wf_resultfile_3(plugin):
""" workflow with a dictionaries of files in the wf result,
all files should be copied to the wf dir
"""
wf = Workflow(name="wf_file_1", input_spec=["x"])
wf.add(fun_write_file_list2dict(name="writefile", filename_list=wf.lzin.x))
file_list = ["file_1.txt", "file_2.txt", "file_3.txt"]
wf.inputs.x = file_list
wf.plugin = plugin
wf.set_output([("wf_out", wf.writefile.lzout.out)])
with Submitter(plugin=plugin) as sub:
sub(wf)
results = wf.result()
# checking if the file exists and if it is in the Workflow directory
for key, val in results.output.wf_out.items():
if key == "random_int":
assert val == 20
else:
assert val.exists()
ii = int(key.split("_")[1])
assert val == wf.output_dir / file_list[ii]
| 30.957898 | 93 | 0.627098 | 15,448 | 100,737 | 3.993786 | 0.026994 | 0.045805 | 0.021071 | 0.047815 | 0.93489 | 0.921632 | 0.90544 | 0.901112 | 0.886994 | 0.863476 | 0 | 0.036515 | 0.211878 | 100,737 | 3,253 | 94 | 30.967415 | 0.740582 | 0.167873 | 0 | 0.825868 | 0 | 0 | 0.051949 | 0 | 0 | 0 | 0 | 0.000615 | 0.185101 | 1 | 0.047989 | false | 0 | 0.003656 | 0 | 0.052102 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8342d1411e96ad0ba14d59541cbcfb31ca9b39da | 10,200 | py | Python | car_side/can_message_type.py | LucienMorey/pytelemgui | 4e83d8d8b6fa011fc492f0e55d1c9adf18c7c113 | [
"MIT"
] | null | null | null | car_side/can_message_type.py | LucienMorey/pytelemgui | 4e83d8d8b6fa011fc492f0e55d1c9adf18c7c113 | [
"MIT"
] | null | null | null | car_side/can_message_type.py | LucienMorey/pytelemgui | 4e83d8d8b6fa011fc492f0e55d1c9adf18c7c113 | [
"MIT"
] | null | null | null |
class can_msg_types:
def __init__(self, debug=False):
self.debug = debug
self.CAN_BUS_IDS = {"0x03A": "OrionBMS_Set1",
"0x03B": "OrionBMS_Set2",
"0x03C": "OrionBMS_Set3",
"0x03D": "OrionBMS_Set4",
"0x03E": "OrionBMS_Set5",
"0x500": "PDM15_STD",
"0x520": "PDM15_MSG0",
"0x521": "PDM115_MSG1",
"0x522": "PDM115_MSG2",
"0x250": "BSPD_FAULT",
"0x650": "BSPD_START",
"0x210": "BSPD_THROTTLE",
"0x245": "BSPD_BRAKE",
"0x190": "UNITEK",
"0x400": "M150_REGEN",}
def intepretID(id):
if hex(id) in self.CAN_BUS_IDS:
id_name = self.CAN_BUS_IDS[hex(id)]
else:
id_name = "NULL"
return id_name
def make_db_data(id_name, msg):
check = False
if id_name is "OrionBMS_Set1":
collection = database[id_name]
counter = msg.data[0]
data = {"Rolling Counter" : counter, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[1]
data = {"custom_flag_1" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[2]
data = {"custom_flag_2" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[3]
data = {"custom_flag_2" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[4]
data = {"checksum" : byte, "Timestamp": msg.timestamp}
elif id_name is "OrionBMS_Set2":
collection = database[id_name]
temp = msg.data[0]
data = {"High_temperature" : temp, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[1]
data = {"High_temp_id" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[2]
data = {"Low_temperature" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[3]
data = {"Low_temp_id" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[4]
data = {"checksum" : byte, "Timestamp": msg.timestamp}
elif id_name is "OrionBMS_Set3":
collection = database[id_name]
byte_0 = msg.data[0]
byte_1 = msg.data[1]
data = {"High_cell_voltage_byte_0" : byte_0, "High_cell_voltage_byte_1" : byte_1, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[2]
data = {"High_cell_volt_id" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte_0 = msg.data[3]
byte_1 = msg.data[4]
data = {"Low_cell_voltage_byte_0" : byte_0, "Low_cell_voltage_byte_1" : byte_1, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[5]
data = {"Low_cell_volt_id" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[6]
data = {"checksum" : byte, "Timestamp": msg.timestamp}
elif id_name is "OrionBMS_Set4":
collection = database[id_name]
byte_0 = msg.data[0]
byte_1 = msg.data[1]
data = {"Pack_inst_voltage_byte_0" : byte_0, "Pack_inst_voltage_byte_1" : byte_1, "Timestamp": msg.timestamp}
collection = database[id_name]
byte_0 = msg.data[2]
byte_1 = msg.data[3]
data = {"Pack_current_byte_0" : byte_0, "Pack_current_byte_1" : byte_1, "Timestamp": msg.timestamp}
collection = database[id_name]
byte_0 = msg.data[4]
byte_1 = msg.data[5]
data = {"Pack_summed_volt_byte_0" : byte_0, "Pack_summed_volt_byte_1" : byte_1, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[6]
data = {"checksum" : byte, "Timestamp": msg.timestamp}
elif id_name is "OrionBMS_Set5":
collection = database[id_name]
byte_0 = msg.data[0]
byte_1 = msg.data[1]
data = {"Avg_cell_resistance_byte_0" : byte_0, "Avg_cell_resistance_byte_0" : byte_1, "Timestamp": msg.timestamp}
collection = database[id_name]
byte_0 = msg.data[2]
byte_1 = msg.data[3]
data = {"Pack_ccl_byte_0" : byte_0, "Pack_ccl_byte_1" : byte_1, "Timestamp": msg.timestamp}
collection = database[id_name]
byte_0 = msg.data[4]
byte_1 = msg.data[5]
data = {"Pack_dcl_byte_0" : byte_0, "Pack_dcl_byte_0" : byte_1, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[6]
data = {"checksum" : byte, "Timestamp": msg.timestamp}
elif id_name is "PDM15_STD":
collection = database[id_name]
byte = msg.data[0]
data = {"Byte" : byte, "Timestamp": msg.timestamp}
elif id_name is "PDM15_MSG0":
collection = database[id_name]
byte = msg.data[0]
data = {"F_fault_hvs_bms" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[1]
data = {"F_fault_hvs_pdoc" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[2]
data = {"F_fault_hvs_imd" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[3]
data = {"F_fault_imd_imd" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[4]
data = {"F_hv_precharge" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[5]
data = {"F_hv_precharged" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[6]
data = {"F_ready_to_drive" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[7]
data = {"F_drive_enable" : byte, "Timestamp": msg.timestamp}
elif id_name is "PDM15_MSG1":
collection = database[id_name]
byte = msg.data[0]
data = {"F_brake_trigger" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[0]
data = {"F_bcm_control_on" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[0]
data = {"F_bcm_control" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[0]
data = {"F_shutdown_lock" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[0]
data = {"F_standby" : byte, "Timestamp": msg.timestamp}
elif id_name is "PDM15_MSG2":
collection = database[id_name]
byte = msg.data[0]
data = {"F_reset" : byte, "Timestamp": msg.timestamp}
elif id_name is "BSPD_FAULT":
collection = database[id_name]
byte = msg.data[0]
data = {"F_bspd_current_brake_fault" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[1]
data = {"sensor_failure" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[2]
data = {"throttle_brake_check" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[3]
data = {"throttle_implausbility" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte = msg.data[4]
data = {"torque_multi" : byte, "Timestamp": msg.timestamp}
elif id_name is "BSPD_START":
collection = database[id_name]
byte = msg.data[0]
data = {"start_btn_state" : byte, "Timestamp": msg.timestamp}
elif id_name is "BSPD_THROTTLE":
collection = database[id_name]
byte = msg.data[0]
data = {"byte_1" : byte, "Timestamp": msg.timestamp}
collection = database[id_name]
byte_0 = msg.data[1]
byte_1 = msg.data[2]
data = {"Throttle_byte_0" : byte_0, "Throttle_byte_1": byte_1, "Timestamp": msg.timestamp}
elif id_name is "BSPD_THROTTLE":
collection = database[id_name]
byte_0 = msg.data[0]
byte_1 = msg.data[1]
data = {"brake_avg_byte_0" : byte_0, "brake_avg_byte_1": byte_1, "Timestamp": msg.timestamp}
collection = database[id_name]
byte_0 = msg.data[2]
byte_1 = msg.data[3]
data = {"brake_rear_byte_0" : byte_0, "brake_rear_byte_1": byte_1, "Timestamp": msg.timestamp}
collection = database[id_name]
byte_0 = msg.data[4]
byte_1 = msg.data[5]
data = {"brake_front_byte_0" : byte_0, "brake_front_byte_1": byte_1, "Timestamp": msg.timestamp}
elif id_name is "UNITEK":
collection = database[id_name]
byte_0 = msg.data[0]
byte_1 = msg.data[1]
byte_2 = msg.data[2]
data = {"unitek_byte_0" : byte_0, "unitek_byte_1": byte_1, "unitek_byte_2": byte_2, "Timestamp": msg.timestamp}
elif id_name is "M150_REGEN":
collection = database[id_name]
byte_0 = msg.data[0]
data = {"regen_flag" : byte_0, "Timestamp": msg.timestamp}
return (collection, data)
| 36.428571 | 125 | 0.543235 | 1,191 | 10,200 | 4.38623 | 0.102435 | 0.080398 | 0.195253 | 0.234303 | 0.782542 | 0.751149 | 0.734877 | 0.72856 | 0.72856 | 0.672282 | 0 | 0.036631 | 0.338922 | 10,200 | 279 | 126 | 36.55914 | 0.738099 | 0 | 0 | 0.559809 | 0 | 0 | 0.184662 | 0.028244 | 0 | 0 | 0.007355 | 0 | 0 | 1 | 0.014354 | false | 0 | 0 | 0 | 0.028708 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
55c1a6f81636567f42399dbdf459bbe8fc11b167 | 35 | py | Python | libs/coda/testing/__init__.py | viridia/coda | 40671b655c2d2368bdc5f3a9ae1a45b57305f7a3 | [
"Apache-2.0"
] | 1 | 2017-09-06T16:50:13.000Z | 2017-09-06T16:50:13.000Z | libs/coda/testing/__init__.py | viridia/coda | 40671b655c2d2368bdc5f3a9ae1a45b57305f7a3 | [
"Apache-2.0"
] | null | null | null | libs/coda/testing/__init__.py | viridia/coda | 40671b655c2d2368bdc5f3a9ae1a45b57305f7a3 | [
"Apache-2.0"
] | null | null | null | from .assertions import Assertions
| 17.5 | 34 | 0.857143 | 4 | 35 | 7.5 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.114286 | 35 | 1 | 35 | 35 | 0.967742 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
55c65ff0f9718cb25373c89631c6e56161db1730 | 10,052 | py | Python | Lab_Dash/models.py | SimonSchubotz/Electronic-Laboratory-Notebook | a5dc3daa76b07370c1ee5b7e74fb6c780c3d3c97 | [
"Apache-2.0"
] | null | null | null | Lab_Dash/models.py | SimonSchubotz/Electronic-Laboratory-Notebook | a5dc3daa76b07370c1ee5b7e74fb6c780c3d3c97 | [
"Apache-2.0"
] | null | null | null | Lab_Dash/models.py | SimonSchubotz/Electronic-Laboratory-Notebook | a5dc3daa76b07370c1ee5b7e74fb6c780c3d3c97 | [
"Apache-2.0"
] | null | null | null | from django.db import models
from datetime import datetime
# Create your models here.
class OCA(models.Model):
"""OCA Saves all dash properties of the OCA measurements
Parameters
----------
models : [type]
[description]
"""
Name = models.TextField(unique=True, blank=True, null=True)
CA_high_degree = models.FloatField(blank=True, null=True)
CA_low_degree = models.FloatField(blank=True, null=True)
BD_high_mm = models.FloatField(blank=True, null=True)
BD_low_mm = models.FloatField(blank=True, null=True)
Time_high_sec = models.FloatField(blank=True, null=True)
Time_low_sec = models.FloatField(blank=True, null=True)
Time_diff_pump = models.FloatField(blank=True, null=True)
Cycle_drop_1_sec = models.FloatField(blank=True, null=True)
Cycle_drop_2_sec = models.FloatField(blank=True, null=True)
Cycle_drop_3_sec = models.FloatField(blank=True, null=True)
Cycle_drop_4_sec = models.FloatField(blank=True, null=True)
Cycle_drop_5_sec = models.FloatField(blank=True, null=True)
Cycle_drop_6_sec = models.FloatField(blank=True, null=True)
def __str__(self):
return str(self.Name)
def save(self, *args, **kwargs):#saves '' as none
if not self.Name:
self.Name = None
super(OCA, self).save(*args, **kwargs)
class RSD(models.Model):
"""RSD Saves all dash properties of the RSD measurements
Parameters
----------
models : [type]
[description]
"""
Name = models.TextField(unique=True, blank=True, null=True)
CA_high_degree = models.FloatField(blank=True, null=True)
CA_low_degree = models.FloatField(blank=True, null=True)
BD_high_mm = models.FloatField(blank=True, null=True)
BD_low_mm = models.FloatField(blank=True, null=True)
Time_high_sec = models.FloatField(blank=True, null=True)
Time_low_sec = models.FloatField(blank=True, null=True)
Time_diff_pump = models.FloatField(blank=True, null=True)
def __str__(self):
return str(self.Name)
def save(self, *args, **kwargs):#saves '' as none
if not self.Name:
self.Name = None
super(RSD, self).save(*args, **kwargs)
class SEL(models.Model):
"""SEL Saves all dash properties of the SEL measurements
Parameters
----------
models : [type]
[description]
"""
Name = models.TextField(unique=True, blank=True, null=True)
Start_datetime_elli = models.DateTimeField(default=datetime.now(), null=True, blank=True)
def __str__(self):
return str(self.Name)
def save(self, *args, **kwargs):#saves '' as none
if not self.Name:
self.Name = None
super(SEL, self).save(*args, **kwargs)
class ComparisonEntry(models.Model):
"""SEL Saves all dash properties of the SEL measurements
Parameters
----------
models : [type]
[description]
"""
Name = models.TextField(blank=True, null=True)
Label = models.TextField(blank=True, null=True)
ExpBaseID = models.IntegerField(blank=True, null=True)#Foreign key not possible because of circular import
X_high = models.FloatField(blank=True, null=True)
X_low = models.FloatField(blank=True, null=True)
Y_high = models.FloatField(blank=True, null=True)
Y_low = models.FloatField(blank=True, null=True)
X_shift = models.FloatField(blank=True, null=True)
Y_shift = models.FloatField(blank=True, null=True)
def __str__(self):
return str(self.Name)
def save(self, *args, **kwargs):#saves '' as none
if not self.Name:
self.Name = None
super(ComparisonEntry, self).save(*args, **kwargs)
class Comparison(models.Model):
"""SEL Saves all dash properties of the SEL measurements
Parameters
----------
models : [type]
[description]
"""
Name = models.TextField(blank=True, null=True)
Title = models.TextField(blank=True, null=True)
Entry = models.ManyToManyField(ComparisonEntry, blank=True)
X_shift = models.FloatField(blank=True, null=True)
Y_shift = models.FloatField(blank=True, null=True)
X_high = models.FloatField(blank=True, null=True)
X_low = models.FloatField(blank=True, null=True)
Y_high = models.FloatField(blank=True, null=True)
Y_low = models.FloatField(blank=True, null=True)
def __str__(self):
return str(self.Name)
def save(self, *args, **kwargs):#saves '' as none
if not self.Name:
self.Name = None
super(Comparison, self).save(*args, **kwargs)
class OszAnalysisEntry(models.Model):
"""SEL Saves all dash properties of the SEL measurements
Parameters
----------
models : [type]
[description]
"""
Name = models.TextField(blank=True, null=True)
Label = models.TextField(blank=True, null=True)
OszAnalysisID = models.IntegerField(blank=True, null=True)#Foreign key not possible because of circular import
X_high = models.FloatField(blank=True, null=True)
X_low = models.FloatField(blank=True, null=True)
Y_high = models.FloatField(blank=True, null=True)
Y_low = models.FloatField(blank=True, null=True)
X_shift = models.FloatField(blank=True, null=True)
Y_shift = models.FloatField(blank=True, null=True)
def __str__(self):
return str(self.Name)
def save(self, *args, **kwargs):#saves '' as none
if not self.Name:
self.Name = None
super(OszAnalysisEntry, self).save(*args, **kwargs)
class OszAnalysis(models.Model):
"""SEL Saves all dash properties of the SEL measurements
Parameters
----------
models : [type]
[description]
"""
Name = models.TextField(blank=True, null=True)
Title = models.TextField(blank=True, null=True)
Entry = models.ManyToManyField(OszAnalysisEntry, blank=True)
X_shift = models.FloatField(blank=True, null=True)
Y_shift = models.FloatField(blank=True, null=True)
X_high = models.FloatField(blank=True, null=True)
X_low = models.FloatField(blank=True, null=True)
Y_high = models.FloatField(blank=True, null=True)
Y_low = models.FloatField(blank=True, null=True)
def __str__(self):
return str(self.Name)
def save(self, *args, **kwargs):#saves '' as none
if not self.Name:
self.Name = None
super(OszAnalysis, self).save(*args, **kwargs)
class SFG(models.Model):
"""SEL Saves all dash properties of the SEL measurements
Parameters
----------
models : [type]
[description]
"""
Name = models.TextField(unique=True, blank=True, null=True)
def __str__(self):
return str(self.Name)
def save(self, *args, **kwargs):#saves '' as none
if not self.Name:
self.Name = None
super(SFG, self).save(*args, **kwargs)
class GRP(models.Model):
Name = models.TextField(unique=True, blank=True, null=True)
PossibleTyps = [('SFG_kin_3D', 'Sum frequency generation kinetic'), ('SFG_kin_drive', 'Sum frequency generation kinetic while changing the Position'),
('SFG_abrastern', 'Sum frequency generation at different locations'), ('SFG_cycle', 'Sum frequency generation cycle drops')]
Typ = models.TextField(choices=PossibleTyps, blank=True, null=True)
def __str__(self):
return str(self.Name)
class SFG_cycle(models.Model):
Name = models.TextField(unique=True)
Graph_distance = models.FloatField(blank=True, null=True)
Signal_high = models.FloatField(blank=True, null=True)
Signal_low = models.FloatField(blank=True, null=True)
Wavenumber_high = models.FloatField(blank=True, null=True)
Wavenumber_low = models.FloatField(blank=True, null=True)
Group = models.ForeignKey(GRP, on_delete=models.CASCADE, blank=True, null=True)
def __str__(self):
return str(self.Name)
class SFG_abrastern(models.Model):
Name = models.TextField(unique=True)
Graph_distance = models.FloatField(blank=True, null=True)
Signal_high = models.FloatField(blank=True, null=True)
Signal_low = models.FloatField(blank=True, null=True)
Wavenumber_high = models.FloatField(blank=True, null=True)
Wavenumber_low = models.FloatField(blank=True, null=True)
Group = models.ForeignKey(GRP, on_delete=models.CASCADE, blank=True, null=True)
def __str__(self):
return str(self.Name)
class SFG_kin_3D(models.Model):
Name = models.TextField(unique=True)
Time_high_sec = models.FloatField(blank=True, null=True)
Time_low_sec = models.FloatField(blank=True, null=True)
Signal_high = models.FloatField(blank=True, null=True)
Signal_low = models.FloatField(blank=True, null=True)
Wavenumber_high = models.FloatField(blank=True, null=True)
Wavenumber_low = models.FloatField(blank=True, null=True)
Group = models.ForeignKey(GRP, on_delete=models.CASCADE, blank=True, null=True)
def __str__(self):
return str(self.Name)
class SFG_kin_drive(models.Model):
Name = models.TextField(unique=True)
Time_high_sec = models.FloatField(blank=True, null=True)
Time_low_sec = models.FloatField(blank=True, null=True)
Signal_high = models.FloatField(blank=True, null=True)
Signal_low = models.FloatField(blank=True, null=True)
Wavenumber_high = models.FloatField(blank=True, null=True)
Wavenumber_low = models.FloatField(blank=True, null=True)
Group = models.ForeignKey(GRP, on_delete=models.CASCADE, blank=True, null=True)
def __str__(self):
return str(self.Name)
class KUR(models.Model):
Name = models.TextField(unique=True)
CA_high_degree = models.FloatField(blank=True, null=True)
CA_low_degree = models.FloatField(blank=True, null=True)
BD_high_mm = models.FloatField(blank=True, null=True)
BD_low_mm = models.FloatField(blank=True, null=True)
Time_high_sec = models.FloatField(blank=True, null=True)
Time_low_sec = models.FloatField(blank=True, null=True)
def __str__(self):
return str(self.Name) | 40.532258 | 154 | 0.679467 | 1,328 | 10,052 | 5.006024 | 0.086596 | 0.12861 | 0.179904 | 0.235259 | 0.904633 | 0.876955 | 0.868833 | 0.861763 | 0.860409 | 0.819645 | 0 | 0.000988 | 0.194389 | 10,052 | 248 | 155 | 40.532258 | 0.819956 | 0.113709 | 0 | 0.786517 | 0 | 0 | 0.025314 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.123596 | false | 0 | 0.011236 | 0.078652 | 0.859551 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
55d9fd77cee81daf4745b22df03173e2af292afe | 149 | py | Python | torch_ac/__init__.py | Nikunj-Gupta/torch-ac | af26e45f25326edea4efe51b855c1bb597a63611 | [
"MIT"
] | null | null | null | torch_ac/__init__.py | Nikunj-Gupta/torch-ac | af26e45f25326edea4efe51b855c1bb597a63611 | [
"MIT"
] | null | null | null | torch_ac/__init__.py | Nikunj-Gupta/torch-ac | af26e45f25326edea4efe51b855c1bb597a63611 | [
"MIT"
] | null | null | null | from torch_ac.algos import A2CAlgo, PPOAlgo, PPOAlgoOriginal
from torch_ac.model import ACModel, RecurrentACModel
from torch_ac.utils import DictList | 49.666667 | 60 | 0.865772 | 21 | 149 | 6 | 0.619048 | 0.214286 | 0.261905 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007407 | 0.09396 | 149 | 3 | 61 | 49.666667 | 0.925926 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
55dfbf52f2aa92b9314e8fd3d1e61dde72ec22c1 | 33,134 | py | Python | Server/elasticsearch.py | cbabs/netDiag | 59a74acd9edcb6c7fb90d222e96c63077b53c99c | [
"MIT"
] | 1 | 2020-07-26T02:02:57.000Z | 2020-07-26T02:02:57.000Z | Server/elasticsearch.py | cbabs/netDiag | 59a74acd9edcb6c7fb90d222e96c63077b53c99c | [
"MIT"
] | null | null | null | Server/elasticsearch.py | cbabs/netDiag | 59a74acd9edcb6c7fb90d222e96c63077b53c99c | [
"MIT"
] | null | null | null | from Server import app
import requests
import json
# http://628205e71c04.sn.mynetname.net:9201/
class NetDiagElastic(object):
def __init__(self):
self.dbServers = app.config['DB_HOST']
self.dbPort = app.config['DB_PORT']
self.dbIndex = app.config['DB_NAME']
self.srv = None
for srv in self.dbServers:
req = r"http://{}:{}/_cluster/health?pretty".format(srv, self.dbPort)
try:
r = requests.get(req)
if r.status_code == 200:
print('Success ' + srv)
self.srv = srv
break
except:
pass
if self.srv == None:
raise Exception("Could not connect to any ES servers")
def getAvailSrv(self):
print(self.srv)
def procClntData(self, jsnData):
pass
def getData(self, url):
req = r"http://{}:{}/{}".format(self.srv, self.dbPort, url)
r = requests.get(req)
print(r.text)
def postData(self, url, data=None):
urlReq = r"http://{}:{}/{}".format(self.srv, self.dbPort, url)
if data:
headers = {"Content-Type": "application/json"}
r = requests.post(url, data=json.dumps(data), headers=headers)
else:
r = requests.post(urlReq)
print(r.text)
def putData(self, url, data):
url = r"http://{}:{}/{}".format(self.srv, self.dbPort, url)
headers = {"Content-Type": "application/json"}
print(url)
r = requests.put(url, data=json.dumps(data), headers=headers)
print(r.text)
def delData(self, url):
req = r"http://{}:{}/{}".format(self.srv, self.dbPort, url)
r = requests.delete(req)
print(r.text)
print(r.status_code)
def main():
dbSrvs = ["10.8.4.128"]
db = NetDiagElastic(dbSrvs, 30434)
#urlGet = r"netdiag/diag/_mapping"
statcMappn = {
"mappings": {
"properties": {
"dateUserRan": {
"type": "date"
}}
}
}
dynMappings = {
"mappings": {
"diag": {
"properties": {
"dateUserRan": {
"type": "date",
"format": "yyyy-MM-dd HH:mm:ss"
}},
"dynamic_templates": [
{
"ip_addreseses": {
"path_match": "ipAddr*",
"mapping": {
"type": "ip"
}
}
},
{
"shorts": {
"path_match": "latency*",
"mapping": {
"type": "short"
}
}
}
]
}
}
}
#04/14/2019:13:15:30.66
goodInetClintDiag = {'dateSrvImpt': '2019-04-20 11:29:39', 'epochSrvImpt': 1555777779.8870053, 'dateUserRan': '2019-04-14 16:39:03', 'userId': 'ag0394v ', 'ticketNum': '12345 ', 'ipconfig': [{'Windows IP Configuration': {'Host Name': 'DESKTOP-RGFH0PI', 'Primary Dns Suffix': '', 'Node Type': 'Peer-Peer', 'IP Routing Enabled.': 'No', 'WINS Proxy Enabled.': 'No', 'DNS Suffix Search List.': 'attlocal.net'}}, {'Ethernet adapter Ethernet:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'Realtek PCIe FE Family Controller', 'Physical Address.': 'EC-8E-B5-0C-C7-B2', 'DHCP Enabled.': 'No', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter vEthernet (Default Switch):': {'Connection-specific DNS Suffix': '', 'Description': 'Hyper-V Virtual Ethernet Adapter', 'Physical Address.': '02-15-22-9F-A0-43', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes', 'Link-local IPv6 Address': 'fe80::6014:470c:d593:2cf0%3(Preferred) ', 'IPv4 Address.': '172.27.182.17(Preferred) ', 'Subnet Mask': '255.255.255.240', 'Default Gateway': '', 'DHCPv6 IAID': '872420701', 'DHCPv6 Client DUID.': '00-01-00-01-21-16-57-87-EC-8E-B5-0C-C7-B2', 'DNS Servers': ['fec0:0:0:ffff::1%1', 'fec0:0:0:ffff::2%1', 'fec0:0:0:ffff::3%1'], 'NetBIOS over Tcpip.': 'Disabled'}}, {'Ethernet adapter vEthernet (nat):': {'Connection-specific DNS Suffix': '', 'Description': 'Hyper-V Virtual Ethernet Adapter #3', 'Physical Address.': '00-15-5D-92-E0-4F', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes', 'Link-local IPv6 Address': 'fe80::292d:8cf7:2db4:6e76%30(Preferred) ', 'IPv4 Address.': '172.18.48.1(Preferred) ', 'Subnet Mask': '255.255.240.0', 'Default Gateway': '', 'DHCPv6 IAID': '1107301725', 'DHCPv6 Client DUID.': '00-01-00-01-21-16-57-87-EC-8E-B5-0C-C7-B2', 'DNS Servers': ['fec0:0:0:ffff::1%1', 'fec0:0:0:ffff::2%1', 'fec0:0:0:ffff::3%1'], 'NetBIOS over Tcpip.': 'Enabled'}}, {'Ethernet adapter VirtualBox Host-Only Network #3:': {'Connection-specific DNS Suffix': '', 'Description': 'VirtualBox Host-Only Ethernet Adapter #3', 'Physical Address.': '0A-00-27-00-00-24', 'DHCP Enabled.': 'No', 'Autoconfiguration Enabled': 'Yes', 'Link-local IPv6 Address': 'fe80::b5de:6b0a:a2e6:4ad5%36(Preferred) ', 'IPv4 Address.': '192.168.211.1(Preferred) ', 'Subnet Mask': '255.255.255.0', 'Default Gateway': '', 'DHCPv6 IAID': '1208614951', 'DHCPv6 Client DUID.': '00-01-00-01-21-16-57-87-EC-8E-B5-0C-C7-B2', 'DNS Servers': ['fec0:0:0:ffff::1%1', 'fec0:0:0:ffff::2%1', 'fec0:0:0:ffff::3%1'], 'NetBIOS over Tcpip.': 'Enabled'}}, {'Ethernet adapter VirtualBox Host-Only Network #4:': {'Connection-specific DNS Suffix': '', 'Description': 'VirtualBox Host-Only Ethernet Adapter #4', 'Physical Address.': '0A-00-27-00-00-0A', 'DHCP Enabled.': 'No', 'Autoconfiguration Enabled': 'Yes', 'Link-local IPv6 Address': 'fe80::8c29:c79c:39aa:9f98%10(Preferred) ', 'IPv4 Address.': '192.168.99.1(Preferred) ', 'Subnet Mask': '255.255.255.0', 'Default Gateway': '', 'DHCPv6 IAID': '1292501031', 'DHCPv6 Client DUID.': '00-01-00-01-21-16-57-87-EC-8E-B5-0C-C7-B2', 'DNS Servers': ['fec0:0:0:ffff::1%1', 'fec0:0:0:ffff::2%1', 'fec0:0:0:ffff::3%1'], 'NetBIOS over Tcpip.': 'Enabled'}}, {'Wireless LAN adapter Local Area Connection* 2:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'Microsoft Wi-Fi Direct Virtual Adapter', 'Physical Address.': 'B8-81-98-2F-26-1A', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Wireless LAN adapter Local Area Connection* 3:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'Microsoft Wi-Fi Direct Virtual Adapter #3', 'Physical Address.': 'BA-81-98-2F-26-19', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter Ethernet 2:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'TAP-Windows Adapter V9', 'Physical Address.': '00-FF-5C-E5-67-EA', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter Ethernet 5:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'TAP-Windows Adapter V9 #2', 'Physical Address.': '00-FF-87-B7-B9-1D', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter Ethernet 6:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'TAP-Windows Adapter V9 #3', 'Physical Address.': '00-FF-F3-ED-B3-9B', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter Ethernet 7:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'TAP-Windows Adapter V9 #4', 'Physical Address.': '00-FF-45-5B-CF-0D', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter Ethernet 8:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'TAP-Windows Adapter V9 #5', 'Physical Address.': '00-FF-E1-EE-6E-9E', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter Ethernet 9:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'TAP-Windows Adapter V9 #6', 'Physical Address.': '00-FF-08-0C-E4-0E', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter Ethernet 10:': {'Connection-specific DNS Suffix': '', 'Description': 'TAP-Windows Adapter V9 #7', 'Physical Address.': '00-FF-8B-56-70-F8', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes', 'Link-local IPv6 Address': 'fe80::ddd7:7061:6190:3430%21(Preferred) ', 'IPv4 Address.': '10.8.0.10(Preferred) ', 'Subnet Mask': '255.255.255.252', 'Lease Obtained.': 'Sunday, April 14, 2019 2:19:11 PM', 'Lease Expires': 'Monday, April 13, 2020 2:19:12 PM', 'Default Gateway': '10.8.0.9', 'DHCP Server': '10.8.0.9', 'DHCPv6 IAID': '1174470539', 'DHCPv6 Client DUID.': '00-01-00-01-21-16-57-87-EC-8E-B5-0C-C7-B2', 'DNS Servers': ['8.8.8.8', '4.2.2.2'], 'NetBIOS over Tcpip.': 'Disabled'}}, {'Wireless LAN adapter Wi-Fi:': {'Connection-specific DNS Suffix': 'attlocal.net', 'Description': 'Intel(R) Dual Band Wireless-AC 3165', 'Physical Address.': 'B8-81-98-2F-26-19', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes', 'IPv6 Address.': '2600:1700:89c0:2da0:d4bc:5074:91d6:4edb(Preferred) ', 'Lease Obtained.': 'Sunday, April 14, 2019 2:18:43 PM', 'Lease Expires': 'Monday, April 15, 2019 4:07:33 PM', 'Temporary IPv6 Address.': '2600:1700:89c0:2da0:49e8:721d:3913:f453(Preferred) ', 'Link-local IPv6 Address': 'fe80::d4bc:5074:91d6:4edb%109(Preferred) ', 'IPv4 Address.': '192.168.1.108(Preferred) ', 'Subnet Mask': '255.255.255.0', 'Default Gateway': 'fe80::eea:c9ff:fec9:ec10%109', 'DHCP Server': '192.168.1.254', 'DHCPv6 IAID': '1840808344', 'DHCPv6 Client DUID.': '00-01-00-01-21-16-57-87-EC-8E-B5-0C-C7-B2', 'DNS Servers': ['2600:1700:89c0:2da0::1', '192.168.1.254'], 'NetBIOS over Tcpip.': 'Disabled'}}, {'Ethernet adapter Bluetooth Network Connection 2:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'Bluetooth Device (Personal Area Network) #2', 'Physical Address.': 'B8-81-98-2F-26-1D', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}], 'traceGoglDns': {'ipAddr': '8.8.8.8', 'traceList': [{'tracOrdrNum': '1', 'trip1time': '77', 'trip2time': '78', 'trip3time': '78', 'ipAddrHop': '10.8.0.1'}, {'tracOrdrNum': '2', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipAddrHop': '.'}, {'tracOrdrNum': '3', 'trip1time': '79', 'trip2time': '77', 'trip3time': '86', 'ipAddrHop': '192.168.250.253'}, {'tracOrdrNum': '4', 'trip1time': '99', 'trip2time': '99', 'trip3time': '99', 'ipAddrHop': '38.140.174.89'}, {'tracOrdrNum': '5', 'trip1time': '102', 'trip2time': '103', 'trip3time': '103', 'ipAddrHop': '154.54.31.25'}, {'tracOrdrNum': '6', 'trip1time': '99', 'trip2time': '114', 'trip3time': '100', 'ipAddrHop': '154.54.6.102'}, {'tracOrdrNum': '7', 'trip1time': '99', 'trip2time': '99', 'trip3time': '126', 'ipAddrHop': '154.54.11.226'}, {'tracOrdrNum': '8', 'trip1time': '125', 'trip2time': '115', 'trip3time': '112', 'ipAddrHop': '64.86.113.149'}, {'tracOrdrNum': '9', 'trip1time': '105', 'trip2time': '102', 'trip3time': '101', 'ipAddrHop': '64.86.113.110'}, {'tracOrdrNum': '10', 'trip1time': '108', 'trip2time': '102', 'trip3time': '104', 'ipAddrHop': '108.170.249.33'}, {'tracOrdrNum': '11', 'trip1time': '100', 'trip2time': '100', 'trip3time': '100', 'ipAddrHop': '216.239.54.129'}, {'tracOrdrNum': '12', 'trip1time': '100', 'trip2time': '100', 'trip3time': '100', 'ipAddrHop': '8.8.8.8'}]}, 'traceSdcDns': {'ipAddr': '10.15.98.64', 'traceList': [{'tracOrdrNum': '1', 'trip1time': '77', 'trip2time': '77', 'trip3time': '77', 'ipAddrHop': '10.8.0.1'}, {'tracOrdrNum': '2', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipAddrHop': '.'}, {'tracOrdrNum': '3', 'trip1time': '77', 'trip2time': '77', 'trip3time': '77', 'ipAddrHop': '192.168.250.253'}, {'tracOrdrNum': '4', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipAddrHop': '.'}, {'tracOrdrNum': '5', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipAddrHop': '.'}, {'tracOrdrNum': '6', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipAddrHop': '.'}, {'tracOrdrNum': '7', 'trip1time': ' 7 * * 38.140.174.89 reports: Destination net unreachable.'}]}, 'traceNdcDns': {'ipAddr': '10.23.98.64', 'traceList': [{'tracOrdrNum': '1', 'trip1time': '77', 'trip2time': '90', 'trip3time': '82', 'ipAddrHop': '10.8.0.1'}, {'tracOrdrNum': '2', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipAddrHop': '.'}, {'tracOrdrNum': '3', 'trip1time': '80', 'trip2time': '86', 'trip3time': '81', 'ipAddrHop': '192.168.250.253'}, {'tracOrdrNum': '4', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipAddrHop': '.'}, {'tracOrdrNum': '5', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipAddrHop': '.'}, {'tracOrdrNum': '6', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipAddrHop': '.'}, {'tracOrdrNum': '7', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipAddrHop': '.'}, {'tracOrdrNum': '8', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipAddrHop': '.'}, {'tracOrdrNum': '9', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipAddrHop': '.'}, {'tracOrdrNum': '10', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipAddrHop': '.'}]}, 'traceTnGov': {'ipAddr': '170.141.221.177', 'traceList': [{'tracOrdrNum': '1', 'trip1time': '77', 'trip2time': '78', 'trip3time': '77', 'ipAddrHop': '10.8.0.1'}, {'tracOrdrNum': '2', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipAddrHop': '.'}, {'tracOrdrNum': '3', 'trip1time': '78', 'trip2time': '88', 'trip3time': '77', 'ipAddrHop': '192.168.250.253'}, {'tracOrdrNum': '4', 'trip1time': '99', 'trip2time': '99', 'trip3time': '99', 'ipAddrHop': '38.140.174.89'}, {'tracOrdrNum': '5', 'trip1time': '104', 'trip2time': '104', 'trip3time': '102', 'ipAddrHop': '154.54.31.25'}, {'tracOrdrNum': '6', 'trip1time': '99', 'trip2time': '102', 'trip3time': '114', 'ipAddrHop': '154.54.24.250'}, {'tracOrdrNum': '7', 'trip1time': '105', 'trip2time': '134', 'trip3time': '112', 'ipAddrHop': '192.205.36.237'}, {'tracOrdrNum': '8', 'trip1time': '113', 'trip2time': '112', 'trip3time': '111', 'ipAddrHop': '12.122.117.122'}, {'tracOrdrNum': '9', 'trip1time': '110', 'trip2time': '111', 'trip3time': '112', 'ipAddrHop': '12.122.2.42'}, {'tracOrdrNum': '10', 'trip1time': '111', 'trip2time': '109', 'trip3time': '110', 'ipAddrHop': '12.122.163.129'}]}, 'pingGoglDns': {'ipAddr': '8.8.8.8', 'pingLossPrct': '0', 'latencyMin': '100', 'latencyMax': '103', 'latencyAvg': '100'}, 'pingSdcDns': {'ipAddr': '10.15.98.64', 'pingLossPrct': '100'}, 'pingNdclDns': {'ipAddr': '10.23.98.64', 'pingLossPrct': '100'}, 'pingTnGov': {'ipAddr': '170.141.221.177', 'pingLossPrct': '100'}}
#===========================================================================
# urlDel = r"netdiag"
# db.delData(urlDel)
#
# urlPut = r"netdiag"
# db.putData(urlPut, dynMappings)
#
# urlPut = r"netdiag/diag/1"
# db.putData(urlPut, goodInetClintDiag)
#===========================================================================
addReplToEs = {"index" : {
"number_of_replicas" : 1
}
}
urlPut = r"graylog_*/_settings"
db.putData(urlPut, addReplToEs)
#db.getData(urlGet)
noInetClientDiag = {'dateSrvImpt': '2019-04-14 13:11:26', 'epochSrvImpt': 1555265486.0635986, 'dateUserRan': '04/14/2019:12:46:49.36 ', 'userId': 'ag0394v ', 'ticketNum': '123546 ', 'ipconfig': [{'Windows IP Configuration': {'Host Name': 'DESKTOP-RGFH0PI', 'Primary Dns Suffix': '', 'Node Type': 'Peer-Peer', 'IP Routing Enabled.': 'No', 'WINS Proxy Enabled.': 'No'}}, {'Ethernet adapter Ethernet:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'Realtek PCIe FE Family Controller', 'Physical Address.': 'EC-8E-B5-0C-C7-B2', 'DHCP Enabled.': 'No', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter vEthernet (Default Switch):': {'Connection-specific DNS Suffix': '', 'Description': 'Hyper-V Virtual Ethernet Adapter', 'Physical Address.': '02-15-22-9F-A0-43', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes', 'Link-local IPv6 Address': 'fe80::6014:470c:d593:2cf0%3(Preferred) ', 'IPv4 Address.': '172.27.182.17(Preferred) ', 'Subnet Mask': '255.255.255.240', 'Default Gateway': '', 'DHCPv6 IAID': '872420701', 'DHCPv6 Client DUID.': '00-01-00-01-21-16-57-87-EC-8E-B5-0C-C7-B2', 'DNS Servers': ['fec0:0:0:ffff::1%1', 'fec0:0:0:ffff::2%1', 'fec0:0:0:ffff::3%1'], 'NetBIOS over Tcpip.': 'Disabled'}}, {'Ethernet adapter vEthernet (nat):': {'Connection-specific DNS Suffix': '', 'Description': 'Hyper-V Virtual Ethernet Adapter #3', 'Physical Address.': '00-15-5D-92-E0-4F', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes', 'Link-local IPv6 Address': 'fe80::292d:8cf7:2db4:6e76%30(Preferred) ', 'IPv4 Address.': '172.18.48.1(Preferred) ', 'Subnet Mask': '255.255.240.0', 'Default Gateway': '', 'DHCPv6 IAID': '1107301725', 'DHCPv6 Client DUID.': '00-01-00-01-21-16-57-87-EC-8E-B5-0C-C7-B2', 'DNS Servers': ['fec0:0:0:ffff::1%1', 'fec0:0:0:ffff::2%1', 'fec0:0:0:ffff::3%1'], 'NetBIOS over Tcpip.': 'Enabled'}}, {'Ethernet adapter VirtualBox Host-Only Network #3:': {'Connection-specific DNS Suffix': '', 'Description': 'VirtualBox Host-Only Ethernet Adapter #3', 'Physical Address.': '0A-00-27-00-00-24', 'DHCP Enabled.': 'No', 'Autoconfiguration Enabled': 'Yes', 'Link-local IPv6 Address': 'fe80::b5de:6b0a:a2e6:4ad5%36(Preferred) ', 'IPv4 Address.': '192.168.211.1(Preferred) ', 'Subnet Mask': '255.255.255.0', 'Default Gateway': '', 'DHCPv6 IAID': '1208614951', 'DHCPv6 Client DUID.': '00-01-00-01-21-16-57-87-EC-8E-B5-0C-C7-B2', 'DNS Servers': ['fec0:0:0:ffff::1%1', 'fec0:0:0:ffff::2%1', 'fec0:0:0:ffff::3%1'], 'NetBIOS over Tcpip.': 'Enabled'}}, {'Ethernet adapter VirtualBox Host-Only Network #4:': {'Connection-specific DNS Suffix': '', 'Description': 'VirtualBox Host-Only Ethernet Adapter #4', 'Physical Address.': '0A-00-27-00-00-0A', 'DHCP Enabled.': 'No', 'Autoconfiguration Enabled': 'Yes', 'Link-local IPv6 Address': 'fe80::8c29:c79c:39aa:9f98%10(Preferred) ', 'IPv4 Address.': '192.168.99.1(Preferred) ', 'Subnet Mask': '255.255.255.0', 'Default Gateway': '', 'DHCPv6 IAID': '1292501031', 'DHCPv6 Client DUID.': '00-01-00-01-21-16-57-87-EC-8E-B5-0C-C7-B2', 'DNS Servers': ['fec0:0:0:ffff::1%1', 'fec0:0:0:ffff::2%1', 'fec0:0:0:ffff::3%1'], 'NetBIOS over Tcpip.': 'Enabled'}}, {'Wireless LAN adapter Local Area Connection* 2:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'Microsoft Wi-Fi Direct Virtual Adapter', 'Physical Address.': 'B8-81-98-2F-26-1A', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Wireless LAN adapter Local Area Connection* 3:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'Microsoft Wi-Fi Direct Virtual Adapter #3', 'Physical Address.': 'BA-81-98-2F-26-19', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter Ethernet 2:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'TAP-Windows Adapter V9', 'Physical Address.': '00-FF-5C-E5-67-EA', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter Ethernet 5:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'TAP-Windows Adapter V9 #2', 'Physical Address.': '00-FF-87-B7-B9-1D', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter Ethernet 6:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'TAP-Windows Adapter V9 #3', 'Physical Address.': '00-FF-F3-ED-B3-9B', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter Ethernet 7:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'TAP-Windows Adapter V9 #4', 'Physical Address.': '00-FF-45-5B-CF-0D', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter Ethernet 8:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'TAP-Windows Adapter V9 #5', 'Physical Address.': '00-FF-E1-EE-6E-9E', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter Ethernet 9:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'TAP-Windows Adapter V9 #6', 'Physical Address.': '00-FF-08-0C-E4-0E', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter Ethernet 10:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'TAP-Windows Adapter V9 #7', 'Physical Address.': '00-FF-8B-56-70-F8', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter Bluetooth Network Connection 2:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'Bluetooth Device (Personal Area Network) #2', 'Physical Address.': 'B8-81-98-2F-26-1D', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Wireless LAN adapter Wi-Fi:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': 'local.tld', 'Description': 'Intel(R) Dual Band Wireless-AC 3165', 'Physical Address.': 'B8-81-98-2F-26-19', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}], 'traceGoglDns': {'ipAddr': '8.8.8.8', 'traceList': [{'tracOrdrNum': '1', 'trip1time': ' 1 Transmit error: code 1232.'}]}, 'traceSdcDns': {'ipAddr': '10.15.98.64', 'traceList': [{'tracOrdrNum': '1', 'trip1time': ' 1 Transmit error: code 1232.'}]}, 'traceNdcDns': {'ipAddr': '10.23.98.64', 'traceList': [{'tracOrdrNum': '1', 'trip1time': ' 1 Transmit error: code 1232.'}]}, 'traceTnGov': {'ipAddr': '', 'traceList': [{'tracOrdrNum': '.', 'trip1time': 'Unable to resolve target system name tn.gov.'}]}, 'pingGoglDns': {'ipAddr': '8.8.8.8', 'pingLossPrct': '100'}, 'pingSdcDns': {'ipAddr': '10.15.98.64', 'pingLossPrct': '100'}, 'pingNdclDns': {'ipAddr': '10.23.98.64', 'pingLossPrct': '100'}, 'pingTnGov': {}}
goodInetClintDiag = {'dateSrvImpt': '2019-04-14 13:22:42', 'epochSrvImpt': 1555266162.3966477, 'dateUserRan': '04/14/2019:13:15:30.66 ', 'userId': 'ag0394v ', 'ticketNum': '123456 ', 'ipconfig': [{'Windows IP Configuration': {'Host Name': 'DESKTOP-RGFH0PI', 'Primary Dns Suffix': '', 'Node Type': 'Peer-Peer', 'IP Routing Enabled.': 'No', 'WINS Proxy Enabled.': 'No'}}, {'Ethernet adapter Ethernet:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'Realtek PCIe FE Family Controller', 'Physical Address.': 'EC-8E-B5-0C-C7-B2', 'DHCP Enabled.': 'No', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter vEthernet (Default Switch):': {'Connection-specific DNS Suffix': '', 'Description': 'Hyper-V Virtual Ethernet Adapter', 'Physical Address.': '02-15-22-9F-A0-43', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes', 'Link-local IPv6 Address': 'fe80::6014:470c:d593:2cf0%3(Preferred) ', 'IPv4 Address.': '172.27.182.17(Preferred) ', 'Subnet Mask': '255.255.255.240', 'Default Gateway': '', 'DHCPv6 IAID': '872420701', 'DHCPv6 Client DUID.': '00-01-00-01-21-16-57-87-EC-8E-B5-0C-C7-B2', 'DNS Servers': ['fec0:0:0:ffff::1%1', 'fec0:0:0:ffff::2%1', 'fec0:0:0:ffff::3%1'], 'NetBIOS over Tcpip.': 'Disabled'}}, {'Ethernet adapter vEthernet (nat):': {'Connection-specific DNS Suffix': '', 'Description': 'Hyper-V Virtual Ethernet Adapter #3', 'Physical Address.': '00-15-5D-92-E0-4F', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes', 'Link-local IPv6 Address': 'fe80::292d:8cf7:2db4:6e76%30(Preferred) ', 'IPv4 Address.': '172.18.48.1(Preferred) ', 'Subnet Mask': '255.255.240.0', 'Default Gateway': '', 'DHCPv6 IAID': '1107301725', 'DHCPv6 Client DUID.': '00-01-00-01-21-16-57-87-EC-8E-B5-0C-C7-B2', 'DNS Servers': ['fec0:0:0:ffff::1%1', 'fec0:0:0:ffff::2%1', 'fec0:0:0:ffff::3%1'], 'NetBIOS over Tcpip.': 'Enabled'}}, {'Ethernet adapter VirtualBox Host-Only Network #3:': {'Connection-specific DNS Suffix': '', 'Description': 'VirtualBox Host-Only Ethernet Adapter #3', 'Physical Address.': '0A-00-27-00-00-24', 'DHCP Enabled.': 'No', 'Autoconfiguration Enabled': 'Yes', 'Link-local IPv6 Address': 'fe80::b5de:6b0a:a2e6:4ad5%36(Preferred) ', 'IPv4 Address.': '192.168.211.1(Preferred) ', 'Subnet Mask': '255.255.255.0', 'Default Gateway': '', 'DHCPv6 IAID': '1208614951', 'DHCPv6 Client DUID.': '00-01-00-01-21-16-57-87-EC-8E-B5-0C-C7-B2', 'DNS Servers': ['fec0:0:0:ffff::1%1', 'fec0:0:0:ffff::2%1', 'fec0:0:0:ffff::3%1'], 'NetBIOS over Tcpip.': 'Enabled'}}, {'Ethernet adapter VirtualBox Host-Only Network #4:': {'Connection-specific DNS Suffix': '', 'Description': 'VirtualBox Host-Only Ethernet Adapter #4', 'Physical Address.': '0A-00-27-00-00-0A', 'DHCP Enabled.': 'No', 'Autoconfiguration Enabled': 'Yes', 'Link-local IPv6 Address': 'fe80::8c29:c79c:39aa:9f98%10(Preferred) ', 'IPv4 Address.': '192.168.99.1(Preferred) ', 'Subnet Mask': '255.255.255.0', 'Default Gateway': '', 'DHCPv6 IAID': '1292501031', 'DHCPv6 Client DUID.': '00-01-00-01-21-16-57-87-EC-8E-B5-0C-C7-B2', 'DNS Servers': ['fec0:0:0:ffff::1%1', 'fec0:0:0:ffff::2%1', 'fec0:0:0:ffff::3%1'], 'NetBIOS over Tcpip.': 'Enabled'}}, {'Wireless LAN adapter Local Area Connection* 2:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'Microsoft Wi-Fi Direct Virtual Adapter', 'Physical Address.': 'B8-81-98-2F-26-1A', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Wireless LAN adapter Local Area Connection* 3:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'Microsoft Wi-Fi Direct Virtual Adapter #3', 'Physical Address.': 'BA-81-98-2F-26-19', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter Ethernet 2:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'TAP-Windows Adapter V9', 'Physical Address.': '00-FF-5C-E5-67-EA', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter Ethernet 5:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'TAP-Windows Adapter V9 #2', 'Physical Address.': '00-FF-87-B7-B9-1D', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter Ethernet 6:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'TAP-Windows Adapter V9 #3', 'Physical Address.': '00-FF-F3-ED-B3-9B', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter Ethernet 7:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'TAP-Windows Adapter V9 #4', 'Physical Address.': '00-FF-45-5B-CF-0D', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter Ethernet 8:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'TAP-Windows Adapter V9 #5', 'Physical Address.': '00-FF-E1-EE-6E-9E', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter Ethernet 9:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'TAP-Windows Adapter V9 #6', 'Physical Address.': '00-FF-08-0C-E4-0E', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Ethernet adapter Ethernet 10:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'TAP-Windows Adapter V9 #7', 'Physical Address.': '00-FF-8B-56-70-F8', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}, {'Wireless LAN adapter Wi-Fi:': {'Connection-specific DNS Suffix': '', 'Description': 'Intel(R) Dual Band Wireless-AC 3165', 'Physical Address.': 'B8-81-98-2F-26-19', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes', 'Link-local IPv6 Address': 'fe80::d4bc:5074:91d6:4edb%109(Preferred) ', 'IPv4 Address.': '192.168.43.140(Preferred) ', 'Subnet Mask': '255.255.255.0', 'Lease Obtained.': 'Sunday, April 14, 2019 12:58:00 PM', 'Lease Expires': 'Sunday, April 14, 2019 1:57:59 PM', 'Default Gateway': '192.168.43.1', 'DHCP Server': '192.168.43.1', 'DHCPv6 IAID': '1840808344', 'DHCPv6 Client DUID.': '00-01-00-01-21-16-57-87-EC-8E-B5-0C-C7-B2', 'DNS Servers': ['192.168.43.1', ''], 'NetBIOS over Tcpip.': 'Disabled'}}, {'Ethernet adapter Bluetooth Network Connection 2:': {'Media State': 'Media disconnected', 'Connection-specific DNS Suffix': '', 'Description': 'Bluetooth Device (Personal Area Network) #2', 'Physical Address.': 'B8-81-98-2F-26-1D', 'DHCP Enabled.': 'Yes', 'Autoconfiguration Enabled': 'Yes'}}], 'traceGoglDns': {'ipAddr': '8.8.8.8', 'traceList': [{'tracOrdrNum': '1', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipHop': '.'}, {'tracOrdrNum': '2', 'trip1time': '322', 'trip2time': '31', 'trip3time': '37', 'ipHop': '10.198.37.33'}, {'tracOrdrNum': '3', 'trip1time': '42', 'trip2time': '27', 'trip3time': '39', 'ipHop': '10.0.115.1'}, {'tracOrdrNum': '4', 'trip1time': '94', 'trip2time': '52', 'trip3time': '35', 'ipHop': '10.198.37.1'}, {'tracOrdrNum': '5', 'trip1time': '35', 'trip2time': '41', 'trip3time': '55', 'ipHop': '10.165.50.218'}, {'tracOrdrNum': '6', 'trip1time': '241', 'trip2time': '250', 'trip3time': '40', 'ipHop': '10.165.50.221'}, {'tracOrdrNum': '7', 'trip1time': '43', 'trip2time': '32', 'trip3time': '24', 'ipHop': '10.164.176.231'}, {'tracOrdrNum': '8', 'trip1time': '73', 'trip2time': '31', 'trip3time': '30', 'ipHop': '209.85.174.14'}, {'tracOrdrNum': '9', 'trip1time': '67', 'trip2time': '31', 'trip3time': '29', 'ipHop': '108.170.247.129'}, {'tracOrdrNum': '10', 'trip1time': '73', 'trip2time': '41', 'trip3time': '37', 'ipHop': '209.85.240.35'}, {'tracOrdrNum': '11', 'trip1time': '66', 'trip2time': '32', 'trip3time': '28', 'ipHop': '8.8.8.8'}]}, 'traceSdcDns': {'ipAddr': '10.15.98.64', 'traceList': [{'tracOrdrNum': '1', 'trip1time': '11', 'trip2time': '9', 'trip3time': '3', 'ipHop': '192.168.43.1'}, {'tracOrdrNum': '2', 'trip1time': '46', 'trip2time': '30', 'trip3time': '36', 'ipHop': '10.198.37.33'}, {'tracOrdrNum': '3', 'trip1time': '50', 'trip2time': '28', 'trip3time': '28', 'ipHop': '10.0.125.1'}, {'tracOrdrNum': '4', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipHop': '.'}, {'tracOrdrNum': '5', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipHop': '.'}, {'tracOrdrNum': '6', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipHop': '.'}, {'tracOrdrNum': '7', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipHop': '.'}, {'tracOrdrNum': '8', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipHop': '.'}, {'tracOrdrNum': '9', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipHop': '.'}, {'tracOrdrNum': '10', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipHop': '.'}]}, 'traceNdcDns': {'ipAddr': '10.23.98.64', 'traceList': [{'tracOrdrNum': '1', 'trip1time': '13', 'trip2time': '8', 'trip3time': '6', 'ipHop': '192.168.43.1'}, {'tracOrdrNum': '2', 'trip1time': '59', 'trip2time': '29', 'trip3time': '29', 'ipHop': '10.198.37.33'}, {'tracOrdrNum': '3', 'trip1time': '56', 'trip2time': '39', 'trip3time': '58', 'ipHop': '10.0.115.1'}, {'tracOrdrNum': '4', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipHop': '.'}, {'tracOrdrNum': '5', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipHop': '.'}, {'tracOrdrNum': '6', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipHop': '.'}, {'tracOrdrNum': '7', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipHop': '.'}, {'tracOrdrNum': '8', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipHop': '.'}, {'tracOrdrNum': '9', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipHop': '.'}, {'tracOrdrNum': '10', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipHop': '.'}]}, 'traceTnGov': {'ipAddr': '170.141.221.177', 'traceList': [{'tracOrdrNum': '1', 'trip1time': '4', 'trip2time': '5', 'trip3time': '4', 'ipHop': '192.168.43.1'}, {'tracOrdrNum': '2', 'trip1time': '72', 'trip2time': '26', 'trip3time': '29', 'ipHop': '10.198.37.33'}, {'tracOrdrNum': '3', 'trip1time': '423', 'trip2time': '44', 'trip3time': '28', 'ipHop': '10.0.115.1'}, {'tracOrdrNum': '4', 'trip1time': '83', 'trip2time': '36', 'trip3time': '33', 'ipHop': '10.198.37.1'}, {'tracOrdrNum': '5', 'trip1time': '69', 'trip2time': '29', 'trip3time': '45', 'ipHop': '10.165.50.218'}, {'tracOrdrNum': '6', 'trip1time': '40', 'trip2time': '28', 'trip3time': '26', 'ipHop': '10.165.50.221'}, {'tracOrdrNum': '7', 'trip1time': '38', 'trip2time': '28', 'trip3time': '30', 'ipHop': '10.164.176.231'}, {'tracOrdrNum': '8', 'trip1time': '68', 'trip2time': '42', 'trip3time': '27', 'ipHop': '4.71.136.21'}, {'tracOrdrNum': '9', 'trip1time': '*', 'trip2time': '*', 'trip3time': '*', 'ipHop': '.'}, {'tracOrdrNum': '10', 'trip1time': '334', 'trip2time': '130', 'trip3time': '380', 'ipHop': '4.68.62.226'}]}, 'pingGoglDns': {'ipAddr': '8.8.8.8', 'pingLossPrct': '0', 'latncyMin': '40', 'latncyMax': '496', 'latncyAvg': '199'}, 'pingSdcDns': {'ipAddr': '10.15.98.64', 'pingLossPrct': '75'}, 'pingNdclDns': {'ipAddr': '10.23.98.64', 'pingLossPrct': '100'}, 'pingTnGov': {'ipAddr': '170.141.221.177', 'pingLossPrct': '100'}}
if __name__ == "__main__":
main()
| 193.766082 | 11,875 | 0.622352 | 4,188 | 33,134 | 4.917383 | 0.11724 | 0.042245 | 0.048946 | 0.062931 | 0.8249 | 0.8047 | 0.799262 | 0.78149 | 0.743663 | 0.733612 | 0 | 0.118884 | 0.121627 | 33,134 | 170 | 11,876 | 194.905882 | 0.588716 | 0.012706 | 0 | 0.20202 | 0 | 0.151515 | 0.679705 | 0.052633 | 0 | 0 | 0 | 0 | 0 | 1 | 0.080808 | false | 0.020202 | 0.030303 | 0 | 0.121212 | 0.080808 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
55e8a766376fbbbd50714838d008ec679dba09bf | 52,210 | py | Python | samples/fields/field.py | zoho/zohocrm-python-sdk-2.1 | cde6fcd1c5c8f7a572154ebb2b947ec697c24209 | [
"Apache-2.0"
] | null | null | null | samples/fields/field.py | zoho/zohocrm-python-sdk-2.1 | cde6fcd1c5c8f7a572154ebb2b947ec697c24209 | [
"Apache-2.0"
] | null | null | null | samples/fields/field.py | zoho/zohocrm-python-sdk-2.1 | cde6fcd1c5c8f7a572154ebb2b947ec697c24209 | [
"Apache-2.0"
] | null | null | null | from zcrmsdk.src.com.zoho.crm.api.fields import *
from zcrmsdk.src.com.zoho.crm.api import ParameterMap
class Field(object):
@staticmethod
def get_fields(module_api_name):
"""
This method is used to get metadata about all the fields of a module and print the response.
:param module_api_name: The API Name of the module to get fields
"""
"""
example
module_api_name = "Leads";
"""
# Get instance of FieldsOperations Class that takes module_api_name as parameter
fields_operations = FieldsOperations(module_api_name)
# Get instance of ParameterMap Class
param_instance = ParameterMap()
# Possible parameters for get_fields operation
# param_instance.add(GetFieldsParam.type, "unused")
# Call get_fields method that takes paramInstance as parameter
response = fields_operations.get_fields(param_instance)
if response is not None:
# Get the status code from response
print('Status Code: ' + str(response.get_status_code()))
if response.get_status_code() in [204, 304]:
print('No Content' if response.get_status_code()
== 204 else 'Not Modified')
return
# Get object from response
response_object = response.get_object()
if response_object is not None:
# Check if expected ResponseWrapper instance is received
if isinstance(response_object, ResponseWrapper):
# Get the list of obtained Field instances
fields_list = response_object.get_fields()
for field in fields_list:
# Get the Webhook of each Field
print("Webhook: " + str(field.get_webhook()))
# Get the JsonType of each Field
print("JsonType: " + str(field.get_json_type()))
# Get the DisplayLabel of each Field
print("DisplayLabel: " + field.get_display_label())
# Get the SystemMandatory of each Field
print("SystemMandatory: " +
str(field.get_system_mandatory()))
print("\n Field is Private :")
print(field.get_private())
print("\n Field is UiType :")
print(field.get_ui_type())
print("\n Field PickListValuesSortedLexically :")
print(field.get_pick_list_values_sorted_lexically())
# Get the DataType of each Field
print("DataType: " + field.get_data_type())
# Get the ColumnName of each Field
print("ColumnName: " + str(field.get_column_name()))
# Get the PersonalityName of each Field
print("PersonalityName: " +
str(field.get_personality_name()))
# Get the ID of each Field
print("ID: " + str(field.get_id()))
# Get the Sortable of each Field
print("Sortable: " + str(field.get_sortable()))
# Get the TransitionSequence of each Field
print("TransitionSequence: " +
str(field.get_transition_sequence()))
if field.get_mandatory() is not None:
# Get the Mandatory of each Field
print("Mandatory: " +
str(field.get_mandatory()))
if field.get_external() is not None:
external = field.get_external()
# Get the External Show of each Field
print("External Show: " +
str(external.get_show()))
# Get the External Type of each Field
print("External Type: " +
str(external.get_type()))
# Get the External Type of each Field
print("External Type: " +
str(external.get_allow_multiple_config()))
if field.get_unique() is not None:
# Get the Mandatory of each Field
print(
"Mandatory: ")
print(field.get_unique().get_casesensitive())
if field.get_history_tracking() is not None:
# Get the HistoryTracking of each Field
history_tracking = field.get_history_tracking()
module = history_tracking.get_module()
if module is not None:
module_layout = module.get_layout()
if module_layout is not None:
print("Module layout id: " +
str(module_layout.get_id()))
print("Module display label: " +
str(module.get_api_name()))
print("Module api name: " +
str(module.get_id()))
print("Module module: " +
str(module.get_module()))
print("Module module name: " +
str(module.get_module_name()))
duration_configured = history_tracking.get_duration_configured_field()
if duration_configured is not None:
print(
"historytracking duration configured field: " + str(duration_configured.get_id()))
# Get the obtained Layout instance
layout = field.get_layouts()
# Check if layout is not null
if layout is not None:
# Get the ID of the Layout
print("Layout ID: " + str(layout.get_id()))
# Get the Name of the Layout
print("Layout Name: " + str(layout.get_name()))
# Get the APIName of each Field
print("APIName : " + str(field.get_api_name()))
# Get the Content of each Field
print("Content: " + str(field.get_content()))
# Get the Message of each Field
print("Message :" + str(field.get_message()))
# Get the obtained Crypt instance
crypt = field.get_crypt()
if crypt is not None:
print("Crypt Details")
# Get the Crypt Mode
print("Mode: " + crypt.get_mode())
# Get the Crypt Column
print("Column: ")
print(crypt.get_column())
# Get the Crypt Table
print("Table: ")
print(crypt.get_table())
# Get the Crypt Status
print("Status: " )
print(crypt.get_status())
print("\n Crypt Notify:")
print(crypt.get_notify())
enc_fld_ids = crypt.get_encfldids()
if enc_fld_ids is not None:
print("\nEncFldIds : ")
for enc_fld_id in enc_fld_ids:
print(enc_fld_id)
# Get the FieldLabel of each Field
print("FieldLabel: " + str(field.get_field_label()))
tool_tip = field.get_tooltip()
if tool_tip is not None:
# Get the Name of the ToolTip
print("ToolTip Name: " + tool_tip.get_name())
# Get the Value of the ToolTip
print("ToolTip Value: " + tool_tip.get_value())
currency = field.get_currency()
if currency is not None:
# Get the RoundingOption of the Currency
print("Currency RoundingOption: ")
print(currency.get_rounding_option())
# Get the Precision of the Currency
print("Currency Precision: ")
print(currency.get_precision())
# Get the CreatedSource of each Field
print("CreatedSource: " +
str(field.get_created_source()))
if field.get_display_type() is not None:
# Get the DisplayType of the Field
print("Field DisplayType: ")
print(field.get_display_type().get_value())
# Get the FieldReadOnly of each Field
print("FieldReadOnly: " +
str(field.get_field_read_only()))
# Get the Filterable of each Field
print("Filterable: " + str(field.get_filterable()))
# Get the Criteria of each Field
criteria = field.get_criteria()
if criteria is not None:
Field.print_criteria(criteria)
# Get the Related Details of each Field
related_details = field.get_related_details()
if related_details is not None:
# Get the display label of related detail
if related_details.get_display_label() is not None:
print("RelatedDetails Display Label: " +
related_details.get_display_label())
# Get the API Name of related detail
print("Related Details API Name: " +
str(related_details.get_api_name()))
# Get the module of related detail
if related_details.get_module() is not None:
module = related_details.get_module()
# Get the layout of the module
if module.get_layout() is not None:
layout = module.get_layout()
print(
"Related Details Module Layout ID: " + layout.get_id())
print(
"Related Details Module Layout Name: " + layout.get_name())
# Get the display label of the module
if module.get_display_label() is not None:
print(
"Related Details Module Display Label: " + module.get_display_label())
# Get the Module API Name of the Related detail module
print(
"Related Details Module API Name: " + str(module.get_api_name()))
# Get the Module of the Related detail module
print("Related Details Module: " +
str(module.get_module()))
# Get the Module Name of the Related detail module
print("Related Details Module Name: " +
module.get_module_name())
# Get the ID of the Related detail
print("Related Details ID: " +
str(related_details.get_id()))
# Get the Type of the Related detail
print("Related Details Type: " +
str(related_details.get_type()))
# Get the ReadOnly of each Field
if field.get_read_only() is not None:
print("ReadOnly: " + str(field.get_read_only()))
# Get the obtained AssociationDetails instance
association_details = field.get_association_details()
if association_details is not None:
# Get the obtained LookupField instance
lookup_field = association_details.get_lookup_field()
if lookup_field is not None:
# Get the ID of the LookupField
print(
"AssociationDetails LookupField ID: " + lookup_field.get_id())
# Get the Name of the LookupField
print(
'AssociationDetails LookupField Name: ' + lookup_field.get_name())
# Get the obtained LookupField instance
related_field = association_details.get_related_field()
if related_field is not None:
# Get the ID of the RelatedField
print(
"AssociationDetails RelatedField ID: " + related_field.get_id())
# Get the Name of the RelatedField
print(
'AssociationDetails RelatedField Name: ' + related_field.get_name())
if field.get_quick_sequence_number() is not None:
# Get the QuickSequenceNumber of each Field
print('QuickSequenceNumber: ' +
str(field.get_quick_sequence_number()))
# Get the DisplayLabel of each Field
print("DisplayLabel: " + field.get_display_label())
if field.get_custom_field() is not None:
# Get if the Field is a CustomField
print("CustomField: " +
str(field.get_custom_field()))
if field.get_visible() is not None:
# Get the Visible of each Field
print("Visible: " + str(field.get_visible()))
if field.get_length() is not None:
# Get the Length of each Field
print("Length: " + str(field.get_length()))
if field.get_decimal_place() is not None:
# Get the DecimalPlace of each Field
print("DecimalPlace: " +
str(field.get_decimal_place()))
# Get the ViewType of each Field
view_type = field.get_view_type()
if view_type is not None:
# Get the View of the ViewType
print("View: " + str(view_type.get_view()))
# Get the Edit of the ViewType
print("Edit: " + str(view_type.get_edit()))
# Get the Create of the ViewType
print("Create: " + str(view_type.get_create()))
# Get the QuickCreate of the ViewType
print("QuickCreate: " +
str(view_type.get_quick_create()))
pick_list_values = field.get_pick_list_values()
if pick_list_values is not None:
for pick_list_value in pick_list_values:
Field.print_pick_list_value(pick_list_value)
multi_module_lookup = field.get_multi_module_lookup()
if multi_module_lookup is not None:
print("Lookup name : " +
str(multi_module_lookup.get_id()))
print("Lookup Id: " +
str(multi_module_lookup.get_name()))
module = multi_module_lookup.get_module()
if module is not None:
print("module Id: "+module.get_id())
print("module Id: " +
module.get_api_name())
multi_select_lookup = field.get_multiselectlookup()
# Check if multiSelectLookup is not None
if multi_select_lookup is not None:
# Get the DisplayLabel of the MultiSelectLookup
print(
"DisplayLabel: " + str(multi_select_lookup.get_display_label()))
# Get the LinkingModule of the MultiSelectLookup
print(
"LinkingModule: " + str(multi_select_lookup.get_linking_module()))
# Get the LookupApiname of the MultiSelectLookup
print(
"LookupApiname: " + str(multi_select_lookup.get_lookup_apiname()))
# Get the APIName of the MultiSelectLookup
print("APIName: " +
str(multi_select_lookup.get_api_name()))
# Get the ConnectedlookupApiname of the MultiSelectLookup
print(
"ConnectedlookupApiname: " + str(multi_select_lookup.get_connectedlookup_apiname()))
# Get the ID of the MultiSelectLookup
print("ID: " + str(multi_select_lookup.get_id()))
# Get the Connected Module of the MultiSelectLookup
print(
"Connected Module: " + str(multi_select_lookup.get_connected_module()))
multi_user_lookup = field.get_multiuserlookup()
# Check if MultiUserLookup is not None
if multi_user_lookup is not None:
# Get the DisplayLabel of the MultiUserLookup
print("DisplayLabel: " +
str(multi_user_lookup.get_display_label()))
# Get the LinkingModule of the MultiUserLookup
print("LinkingModule: " +
str(multi_user_lookup.get_linking_module()))
# Get the LookupApiname of the MultiUserLookup
print("LookupApiname: " +
str(multi_user_lookup.get_lookup_apiname()))
# Get the APIName of the MultiUserLookup
print("APIName: " +
str(multi_user_lookup.get_api_name()))
# Get the ConnectedlookupApiname of the MultiUserLookup
print(
"ConnectedlookupApiname: " + str(multi_user_lookup.get_connectedlookup_apiname()))
# Get the ID of the MultiUserLookup
print("ID: " + str(multi_user_lookup.get_id()))
# Get the Connected Module of the MultiUserLookup
print("Connected Module: " +
str(multi_user_lookup.get_connected_module()))
lookup = field.get_lookup()
if lookup.get_nil() is not None:
layout = lookup.get_layout()
if layout is not None:
#Get the ID of the Layout
print("\n Field ModuleLookup Layout ID: ")
print(layout.get_id())
#Get the Name of the Layout
print("\n Field ModuleLookup Layout Name: ")
print(layout.get_name())
formula = field.get_formula()
# Check if formula is not null
if formula is not None:
# Get the ReturnType of the Formula
print("\nField Formula ReturnType : ")
print(formula.get_return_type())
# Get the Expression of the Formula
if formula.get_expression() is not None:
print("\nField Formula Expression : ")
print(formula.get_expression())
if field.get_decimal_place() is not None:
# Get the DecimalPlace of each Field
print("\nField DecimalPlace: ")
print(field.get_decimal_place())
#Get the DisplayLabel of the Module
print("Field ModuleLookup DisplayLabel: ")
print(lookup.get_display_label())
#Get the APIName of the Module
print("Field ModuleLookup APIName: ")
print(lookup.get_api_name())
#Get the Module of the Module
print("Field ModuleLookup Module: ")
print(lookup.get_module1())
print("Field ModuleLookup ID: ")
print(lookup.get_id())
auto_number = field.get_auto_number()
# Check if ConvertMapping is not None
if field.get_convert_mapping() is not None:
# Get the ConvertMapping dict
for key, value in field.get_convert_mapping().items():
print(key + " : " + str(value))
profiles = field.get_profiles()
for profile in profiles:
print("\n Field Profile PermissionType: ")
print(profile.get_permission_type())
print("\n Field Profile Name: ")
print(profile.get_name())
print("\n Field Profile ID: ")
print(profile.get_id())
# Check if autoNumber is not None
if auto_number is not None:
# Get the Prefix of the AutoNumber
print('Prefix: ' + str(auto_number.get_prefix()))
# Get the Suffix of the AutoNumber
print('Suffix: ' + str(auto_number.get_suffix()))
if auto_number.get_start_number() is not None:
# Get the StartNumber of the AutoNumber
print('Start Number: ' +
str(auto_number.get_start_number()))
# Check if the request returned an exception
elif isinstance(response_object, APIException):
# Get the Status
print("Status: " + response_object.get_status().get_value())
# Get the Code
print("Code: " + response_object.get_code().get_value())
print("Details")
# Get the details dict
details = response_object.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + response_object.get_message().get_value())
@staticmethod
def get_field(module_api_name, field_id):
"""
This method is used to get metadata about a single field of a module with fieldID and print the response.
:param module_api_name: The API Name of the field's module
:param field_id: The ID of the field to be obtained
"""
"""
example
module_api_name = "Leads"
field_id = 34096432293043
"""
# Get instance of FieldsOperations Class that takes module_api_name as parameter
fields_operations = FieldsOperations(module_api_name)
# Call get_field method which takes field_id as parameter
response = fields_operations.get_field(field_id)
if response is not None:
# Get the status code from response
print('Status Code: ' + str(response.get_status_code()))
if response.get_status_code() in [204, 304]:
print('No Content' if response.get_status_code()
== 204 else 'Not Modified')
return
# Get object from response
response_object = response.get_object()
if response_object is not None:
# Check if expected ResponseWrapper instance is received
if isinstance(response_object, ResponseWrapper):
fields = response_object.get_fields()
for field in fields:
# Get the Webhook of each Field
print("Webhook: " + str(field.get_webhook()))
# Get the JsonType of each Field
print("JsonType: " + str(field.get_json_type()))
# Get the DisplayLabel of each Field
print("DisplayLabel: " + field.get_display_label())
# Get the SystemMandatory of each Field
print("SystemMandatory: " +
str(field.get_system_mandatory()))
print("\n Field is Private :")
print(field.get_private())
print("\n Field is UiType :")
print(field.get_ui_type())
print("\n Field PickListValuesSortedLexically :")
print(field.get_pick_list_values_sorted_lexically())
# Get the DataType of each Field
print("DataType: " + field.get_data_type())
# Get the ColumnName of each Field
print("ColumnName: " + str(field.get_column_name()))
# Get the PersonalityName of each Field
print("PersonalityName: " +
str(field.get_personality_name()))
# Get the ID of each Field
print("ID: " + str(field.get_id()))
# Get the Sortable of each Field
print("Sortable: " + str(field.get_sortable()))
# Get the TransitionSequence of each Field
print("TransitionSequence: " +
str(field.get_transition_sequence()))
multi_module_lookup = field.get_multi_module_lookup()
if multi_module_lookup is not None:
print("Lookup name : " +
str(multi_module_lookup.get_id()))
print("Lookup Id: " +
str(multi_module_lookup.get_name()))
module = multi_module_lookup.get_module()
if module is not None:
print("module Id: " + module.get_id())
print("module Id: " + module.get_api_name())
if field.get_mandatory() is not None:
# Get the Mandatory of each Field
print("Mandatory: " + str(field.get_mandatory()))
if field.get_external() is not None:
external = field.get_external()
# Get the External Show of each Field
print("External Show: " + str(external.get_show()))
# Get the External Type of each Field
print("External Type: " + str(external.get_type()))
# Get the External Type of each Field
print("External Type: " +
str(external.get_allow_multiple_config()))
if field.get_unique() is not None:
# Get the Mandatory of each Field
print("Mandatory: " +
str(field.get_unique().get_casesensitive()))
if field.get_history_tracking() is not None:
# Get the HistoryTracking of each Field
history_tracking = field.get_history_tracking()
module = history_tracking.get_module()
if module is not None:
module_layout = module.get_layout()
if module_layout is not None:
print("Module layout id: " +
str(module_layout.get_id()))
print("Module display label: " +
str(module.get_api_name()))
print("Module api name: " +
str(module.get_id()))
print("Module module: " +
str(module.get_module()))
print("Module module name: " +
str(module.get_module_name()))
duration_configured = history_tracking.get_duration_configured_field()
if duration_configured is not None:
print(
"historytracking duration configured field: " + str(duration_configured.get_id()))
# Get the obtained Layout instance
layout = field.get_layouts()
# Check if layout is not null
if layout is not None:
# Get the ID of the Layout
print("Layout ID: " + str(layout.get_id()))
# Get the Name of the Layout
print("Layout Name: " + str(layout.get_name()))
# Get the APIName of each Field
print("APIName : " + str(field.get_api_name()))
# Get the Content of each Field
print("Content: " + str(field.get_content()))
# Get the Message of each Field
print("Message :" + str(field.get_message()))
# Get the obtained Crypt instance
crypt = field.get_crypt()
if crypt is not None:
print("Crypt Details")
# Get the Crypt Mode
print("Mode: " + crypt.get_mode())
# Get the Crypt Column
print("Column: ")
print(crypt.get_column())
# Get the Crypt Table
print("Table: ")
print(crypt.get_table())
# Get the Crypt Status
print("Status: " + str(crypt.get_status()))
print("\n Crypt Notify:")
print(crypt.get_notify())
enc_fld_ids = crypt.get_encfldids()
if enc_fld_ids is not None:
print("\nEncFldIds : ")
for enc_fld_id in enc_fld_ids:
print(enc_fld_id)
# Get the FieldLabel of each Field
print("FieldLabel: " + str(field.get_field_label()))
tool_tip = field.get_tooltip()
if tool_tip is not None:
# Get the Name of the ToolTip
print("ToolTip Name: " + tool_tip.get_name())
# Get the Value of the ToolTip
print("ToolTip Value: " + tool_tip.get_value())
currency = field.get_currency()
if currency is not None:
# Get the RoundingOption of the Currency
print("Currency RoundingOption: " +
str(currency.get_rounding_option()))
# Get the Precision of the Currency
print("Currency Precision: " +
str(currency.get_precision()))
# Get the CreatedSource of each Field
print("CreatedSource: " +
str(field.get_created_source()))
if field.get_display_type() is not None:
# Get the DisplayType of the Field
print("Field DisplayType: " +
str(field.get_display_type()))
# Get the FieldReadOnly of each Field
print("FieldReadOnly: " +
str(field.get_field_read_only()))
# Get the Filterable of each Field
print("Filterable: " + str(field.get_filterable()))
# Get the Criteria of each Field
criteria = field.get_criteria()
if criteria is not None:
Field.print_criteria(criteria)
# Get the Related Details of each Field
related_details = field.get_related_details()
if related_details is not None:
# Get the display label of related detail
if related_details.get_display_label() is not None:
print("RelatedDetails Display Label: " +
related_details.get_display_label())
# Get the API Name of related detail
print("Related Details API Name: " +
str(related_details.get_api_name()))
# Get the module of related detail
if related_details.get_module() is not None:
module = related_details.get_module()
# Get the layout of the module
if module.get_layout() is not None:
layout = module.get_layout()
print(
"Related Details Module Layout ID: " + layout.get_id())
print(
"Related Details Module Layout Name: " + layout.get_name())
# Get the display label of the module
if module.get_display_label() is not None:
print(
"Related Details Module Display Label: " + module.get_display_label())
# Get the Module API Name of the Related detail module
print("Related Details Module API Name: " +
str(module.get_api_name()))
# Get the Module of the Related detail module
print("Related Details Module: " +
str(module.get_module()))
# Get the Module Name of the Related detail module
print("Related Details Module Name: " +
module.get_module_name())
# Get the ID of the Related detail
print("Related Details ID: " +
str(related_details.get_id()))
# Get the Type of the Related detail
print("Related Details Type: " +
str(related_details.get_type()))
# Get the ReadOnly of each Field
if field.get_read_only() is not None:
print("ReadOnly: " + str(field.get_read_only()))
# Get the obtained AssociationDetails instance
association_details = field.get_association_details()
if association_details is not None:
# Get the obtained LookupField instance
lookup_field = association_details.get_lookup_field()
if lookup_field is not None:
# Get the ID of the LookupField
print(
"AssociationDetails LookupField ID: " + lookup_field.get_id())
# Get the Name of the LookupField
print(
'AssociationDetails LookupField Name: ' + lookup_field.get_name())
# Get the obtained LookupField instance
related_field = association_details.get_related_field()
if related_field is not None:
# Get the ID of the RelatedField
print(
"AssociationDetails RelatedField ID: " + related_field.get_id())
# Get the Name of the RelatedField
print(
'AssociationDetails RelatedField Name: ' + related_field.get_name())
if field.get_quick_sequence_number() is not None:
# Get the QuickSequenceNumber of each Field
print('QuickSequenceNumber: ' +
str(field.get_quick_sequence_number()))
# Get the DisplayLabel of each Field
print("DisplayLabel: " + field.get_display_label())
if field.get_custom_field() is not None:
# Get if the Field is a CustomField
print("CustomField: " +
str(field.get_custom_field()))
if field.get_visible() is not None:
# Get the Visible of each Field
print("Visible: " + str(field.get_visible()))
if field.get_length() is not None:
# Get the Length of each Field
print("Length: " + str(field.get_length()))
if field.get_decimal_place() is not None:
# Get the DecimalPlace of each Field
print("DecimalPlace: " +
str(field.get_decimal_place()))
multi_user_lookup = field.get_multiuserlookup()
# Check if MultiUserLookup is not None
if multi_user_lookup is not None:
# Get the DisplayLabel of the MultiUserLookup
print("DisplayLabel: " +
str(multi_user_lookup.get_display_label()))
# Get the LinkingModule of the MultiUserLookup
print("LinkingModule: " +
str(multi_user_lookup.get_linking_module()))
# Get the LookupApiname of the MultiUserLookup
print("LookupApiname: " +
str(multi_user_lookup.get_lookup_apiname()))
# Get the APIName of the MultiUserLookup
print("APIName: " +
str(multi_user_lookup.get_api_name()))
# Get the ConnectedlookupApiname of the MultiUserLookup
print("ConnectedlookupApiname: " +
str(multi_user_lookup.get_connectedlookup_apiname()))
# Get the ID of the MultiUserLookup
print("ID: " + str(multi_user_lookup.get_id()))
# Get the Connected Module of the MultiUserLookup
print("Connected Module: " +
str(multi_user_lookup.get_connected_module()))
# Get the ViewType of each Field
view_type = field.get_view_type()
if view_type is not None:
# Get the View of the ViewType
print("View: " + str(view_type.get_view()))
# Get the Edit of the ViewType
print("Edit: " + str(view_type.get_edit()))
# Get the Create of the ViewType
print("Create: " + str(view_type.get_create()))
# Get the QuickCreate of the ViewType
print("QuickCreate: " +
str(view_type.get_quick_create()))
pick_list_values = field.get_pick_list_values()
if pick_list_values is not None:
for pick_list_value in pick_list_values:
Field.print_pick_list_value(pick_list_value)
multi_select_lookup = field.get_multiselectlookup()
# Check if multiSelectLookup is not None
if multi_select_lookup is not None:
# Get the DisplayLabel of the MultiSelectLookup
print("DisplayLabel: " +
str(multi_select_lookup.get_display_label()))
# Get the LinkingModule of the MultiSelectLookup
print("LinkingModule: " +
str(multi_select_lookup.get_linking_module()))
# Get the LookupApiname of the MultiSelectLookup
print("LookupApiname: " +
str(multi_select_lookup.get_lookup_apiname()))
# Get the APIName of the MultiSelectLookup
print("APIName: " +
str(multi_select_lookup.get_api_name()))
# Get the ConnectedlookupApiname of the MultiSelectLookup
print("ConnectedlookupApiname: " +
str(multi_select_lookup.get_connectedlookup_apiname()))
# Get the ID of the MultiSelectLookup
print("ID: " + str(multi_select_lookup.get_id()))
# Get the Connected Module of the MultiSelectLookup
print("Connected Module: " +
str(multi_select_lookup.get_connected_module()))
lookup = field.get_lookup()
if lookup is not None:
layout = lookup.get_layout()
if layout is not None:
#Get the ID of the Layout
print("\n Field ModuleLookup Layout ID: ")
print(layout.get_id())
#Get the Name of the Layout
print("\n Field ModuleLookup Layout Name: ")
print(layout.get_name())
formula = field.get_formula()
# Check if formula is not null
if formula is not None:
# Get the ReturnType of the Formula
print("\nField Formula ReturnType : ")
print(formula.get_return_type())
# Get the Expression of the Formula
if formula.get_expression() is not None:
print("\nField Formula Expression : ")
print(formula.get_expression())
if field.get_decimal_place() is not None:
# Get the DecimalPlace of each Field
print("\nField DecimalPlace: ")
print(field.get_decimal_place())
#Get the DisplayLabel of the Module
print("Field ModuleLookup DisplayLabel: ")
print(lookup.get_display_label())
#Get the APIName of the Module
print("Field ModuleLookup APIName: ")
print(lookup.get_api_name())
#Get the Module of the Module
print("Field ModuleLookup Module: ")
print(lookup.get_module())
print("Field ModuleLookup ID: ")
print(lookup.get_id())
auto_number = field.get_auto_number()
# Check if ConvertMapping is not None
if field.get_convert_mapping() is not None:
# Get the ConvertMapping dict
for key, value in field.get_convert_mapping().items():
print(key + " : " + str(value))
profiles = field.get_profiles()
for profile in profiles:
print("\n Field Profile PermissionType: ")
print(profile.get_permission_type())
print("\n Field Profile Name: ")
print(profile.get_name())
print("\n Field Profile ID: ")
print(profile.get_id())
# Check if autoNumber is not None
if auto_number is not None:
# Get the Prefix of the AutoNumber
print('Prefix: ' + str(auto_number.get_prefix()))
# Get the Suffix of the AutoNumber
print('Suffix: ' + str(auto_number.get_suffix()))
if auto_number.get_start_number() is not None:
# Get the StartNumber of the AutoNumber
print('Start Number: ' +
str(auto_number.get_start_number()))
# Check if the request returned an exception
elif isinstance(response_object, APIException):
# Get the Status
print("Status: " + response_object.get_status().get_value())
# Get the Code
print("Code: " + response_object.get_code().get_value())
print("Details")
# Get the details dict
details = response_object.get_details()
for key, value in details.items():
print(key + ' : ' + str(value))
# Get the Message
print("Message: " + response_object.get_message().get_value())
@staticmethod
def print_pick_list_value(pick_list_value):
# Get the DisplayValue of each PickListValue
print("\n DisplayValue:")
print(pick_list_value.get_display_value())
# Get the SequenceNumber of each PickListValue
print("\n SequenceNumber:")
print(pick_list_value.get_sequence_number())
# Get the ExpectedDataType of each PickListValue
print("\n ExpectedDataType:")
print(pick_list_value.get_expected_data_type())
# Get the ActualValue of each PickListValue
print("\n ActualValue :")
print(pick_list_value.get_actual_value)
if pick_list_value.get_maps() is not None:
for map in pick_list_value.get_maps():
# Get each value from the map
print("\n")
print(map)
pick_list_values = map.get_pick_list_values
if pick_list_values is not None:
for plv in pick_list_values:
Field.print_pick_list_value(plv)
# Get the SysRefName of each PickListValues
print("\nField PickListValue SysRefName: ")
print(pick_list_value.get_sys_ref_name())
# Get the Type of each PickListValues
print("\nField PickListValue Type: ")
print(pick_list_value.get_type())
| 48.840037 | 120 | 0.437004 | 4,454 | 52,210 | 4.936013 | 0.052537 | 0.054856 | 0.040118 | 0.040755 | 0.957971 | 0.941733 | 0.934273 | 0.930407 | 0.923766 | 0.921992 | 0 | 0.001265 | 0.500421 | 52,210 | 1,068 | 121 | 48.885768 | 0.841621 | 0.161272 | 0 | 0.864112 | 0 | 0 | 0.091723 | 0.003462 | 0 | 0 | 0 | 0 | 0 | 1 | 0.005226 | false | 0 | 0.003484 | 0 | 0.013937 | 0.496516 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 9 |
360c08450dff06f0f32bfb1201b30aa538e62d7f | 20,941 | py | Python | copct-master/baxter_corpus/demo_um.py | jhomble/electron435 | 2a94a901679a1ebbdeea01bb9e888d365d536bec | [
"MIT"
] | 4 | 2016-10-26T13:58:44.000Z | 2018-11-13T13:03:52.000Z | copct-master/baxter_corpus/demo_um.py | jhomble/electron435 | 2a94a901679a1ebbdeea01bb9e888d365d536bec | [
"MIT"
] | 4 | 2020-03-31T01:10:26.000Z | 2020-03-31T03:06:28.000Z | copct-master/baxter_corpus/demo_um.py | jhomble/electron435 | 2a94a901679a1ebbdeea01bb9e888d365d536bec | [
"MIT"
] | 1 | 2020-03-03T06:22:08.000Z | 2020-03-03T06:22:08.000Z | demo = (
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","nothing")),
),
"move arm and grasp",
(
2.000000,
"U_blk1",
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","U_blk1")),
),
"move grasped object",
(
2.000000,
"table",
(
(-0.999843, 0.017692, -0.000017, ),
(-0.017692, -0.999843, -0.000041, ),
(-0.000018, -0.000041, 1.000000, ),
),
(
(-13.268801, ),
(-8.297660, ),
(4.168101, ),
),
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","U_blk1")),
),
"release",
(
2.000000,
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","nothing")),
),
"move arm and grasp",
(
2.000000,
"U_blk4",
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","U_blk4")),
),
"move grasped object",
(
2.000000,
"U_blk1",
(
(-0.999999, 0.001061, -0.000087, ),
(-0.001061, -0.999999, 0.000965, ),
(-0.000086, 0.000965, 1.000000, ),
),
(
(-3.272454, ),
(-0.171383, ),
(3.704703, ),
),
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","U_blk4")),
),
"release",
(
2.000000,
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","nothing")),
),
"move arm and grasp",
(
2.000000,
"U_blk3",
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","U_blk3")),
),
"move grasped object",
(
2.000000,
"U_blk4",
(
(0.999747, -0.022447, 0.001617, ),
(0.022449, 0.999747, -0.001397, ),
(-0.001585, 0.001433, 0.999998, ),
),
(
(-0.071792, ),
(0.070832, ),
(3.614694, ),
),
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","U_blk3")),
),
"release",
(
2.000000,
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","nothing")),
),
"move arm and grasp",
(
2.000000,
"U_blk2",
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","U_blk2")),
),
"move grasped object",
(
2.000000,
"U_blk1",
(
(-0.999984, -0.005534, -0.001333, ),
(0.005535, -0.999985, -0.000563, ),
(-0.001330, -0.000571, 0.999999, ),
),
(
(3.349657, ),
(0.264758, ),
(3.525416, ),
),
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","U_blk2")),
),
"release",
(
2.000000,
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","nothing")),
),
"move arm and grasp",
(
2.000000,
"U_blk5",
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","U_blk5")),
),
"move grasped object",
(
2.000000,
"U_blk2",
(
(0.999917, 0.011731, -0.005248, ),
(-0.011717, 0.999928, 0.002750, ),
(0.005280, -0.002689, 0.999982, ),
),
(
(-0.004390, ),
(0.030385, ),
(3.707272, ),
),
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","U_blk5")),
),
"release",
(
2.000000,
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","nothing")),
),
"move arm and grasp",
(
2.000000,
"M_blk3",
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","M_blk3")),
),
"move grasped object",
(
2.000000,
"table",
(
(0.999990, -0.004510, -0.000234, ),
(0.004510, 0.999990, 0.000214, ),
(0.000233, -0.000215, 1.000000, ),
),
(
(1.320885, ),
(-8.684838, ),
(4.175274, ),
),
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","M_blk3")),
),
"release",
(
2.000000,
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","nothing")),
),
"move arm and grasp",
(
2.000000,
"M_blk6",
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","M_blk6")),
),
"move grasped object",
(
2.000000,
"M_blk3",
(
(0.999904, 0.000730, 0.013857, ),
(-0.000743, 0.999999, 0.000975, ),
(-0.013856, -0.000985, 0.999904, ),
),
(
(-0.011148, ),
(-0.039330, ),
(3.557707, ),
),
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","M_blk6")),
),
"release",
(
2.000000,
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","nothing")),
),
"move arm and grasp",
(
2.000000,
"M_blk4",
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","M_blk4")),
),
"move grasped object",
(
2.000000,
"table",
(
(0.999968, -0.007966, 0.000070, ),
(0.007966, 0.999968, 0.000458, ),
(-0.000073, -0.000457, 1.000000, ),
),
(
(7.887043, ),
(-8.530935, ),
(4.169541, ),
),
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","M_blk4")),
),
"release",
(
2.000000,
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","nothing")),
),
"move arm and grasp",
(
2.000000,
"M_blk7",
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","M_blk7")),
),
"move grasped object",
(
2.000000,
"M_blk4",
(
(0.999494, 0.031798, -0.000597, ),
(-0.031798, 0.999494, 0.000870, ),
(0.000625, -0.000851, 0.999999, ),
),
(
(0.057220, ),
(0.077963, ),
(3.589450, ),
),
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","M_blk7")),
),
"release",
(
2.000000,
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","nothing")),
),
"move arm and grasp",
(
2.000000,
"M_blk1",
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","M_blk1")),
),
"move grasped object",
(
2.000000,
"M_blk6",
(
(-0.999960, -0.007850, -0.004377, ),
(0.007845, -0.999969, 0.001098, ),
(-0.004386, 0.001063, 0.999990, ),
),
(
(2.410455, ),
(0.290089, ),
(3.660411, ),
),
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","M_blk1")),
),
"release",
(
2.000000,
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","nothing")),
),
"move arm and grasp",
(
2.000000,
"M_blk5",
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","M_blk5")),
),
"move grasped object",
(
2.000000,
"table",
(
(0.999936, 0.011252, 0.001328, ),
(-0.011249, 0.999935, -0.001875, ),
(-0.001349, 0.001860, 0.999997, ),
),
(
(14.051610, ),
(-8.605640, ),
(4.175774, ),
),
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","M_blk5")),
),
"release",
(
2.000000,
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","nothing")),
),
"move arm and grasp",
(
2.000000,
"M_blk8",
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","M_blk8")),
),
"move grasped object",
(
2.000000,
"M_blk5",
(
(0.999998, 0.001698, 0.000664, ),
(-0.001694, 0.999981, -0.005862, ),
(-0.000674, 0.005861, 0.999983, ),
),
(
(0.032341, ),
(0.023818, ),
(3.642627, ),
),
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","M_blk8")),
),
"release",
(
2.000000,
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","nothing")),
),
"move arm and grasp",
(
2.000000,
"M_blk2",
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","M_blk2")),
),
"move grasped object",
(
2.000000,
"M_blk8",
(
(-0.994319, 0.106435, -0.001123, ),
(-0.106434, -0.994319, -0.000771, ),
(-0.001199, -0.000647, 0.999999, ),
),
(
(-2.419946, ),
(0.153725, ),
(3.472966, ),
),
)
),
(
(
("workspace", "Workspace"),
("table", "Block"),
("M_blk1", "Block"),
("M_blk2", "Block"),
("M_blk3", "Block"),
("M_blk4", "Block"),
("M_blk5", "Block"),
("M_blk6", "Block"),
("M_blk7", "Block"),
("M_blk8", "Block"),
("U_blk1", "Block"),
("U_blk2", "Block"),
("U_blk3", "Block"),
("U_blk4", "Block"),
("U_blk5", "Block"),
("discard-bin", "Block"),
("gripping", ("nothing","M_blk2")),
),
"release",
(
2.000000,
)
),
)
| 18.46649 | 39 | 0.483071 | 2,480 | 20,941 | 3.855242 | 0.066935 | 0.195795 | 0.093819 | 0.114214 | 0.880138 | 0.880138 | 0.861835 | 0.849284 | 0.843008 | 0.843008 | 0 | 0.113506 | 0.191395 | 20,941 | 1,133 | 40 | 18.482789 | 0.451131 | 0 | 0 | 0.764342 | 0 | 0 | 0.422568 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
3615d14a054f15999b7eb526ce8c09bd166db890 | 1,255 | py | Python | client/landmarks_distance_calculation2.py | MiltonCastilloG/lip_reading_first_approach | bc3a1693607654320b1532580c96197cd4e6e1d2 | [
"MIT"
] | null | null | null | client/landmarks_distance_calculation2.py | MiltonCastilloG/lip_reading_first_approach | bc3a1693607654320b1532580c96197cd4e6e1d2 | [
"MIT"
] | 2 | 2020-01-30T14:44:32.000Z | 2020-03-27T11:12:57.000Z | client/landmarks_distance_calculation2.py | MiltonCastilloG/lip_reading_first_approach | bc3a1693607654320b1532580c96197cd4e6e1d2 | [
"MIT"
] | null | null | null | import math
def calculate_distance(mouse_landmarks):
landmarks_distances=[]
for i in range(12):
x_diference=mouse_landmarks[3][0]-mouse_landmarks[i][0]
y_diference=mouse_landmarks[3][1]-mouse_landmarks[i][1]
distance=math.sqrt(pow(x_diference,2)+pow(y_diference,2))
landmarks_distances.append(distance)
for i in range(12,20):
x_diference=mouse_landmarks[14][0]-mouse_landmarks[i][0]
y_diference=mouse_landmarks[14][1]-mouse_landmarks[i][1]
distance=math.sqrt(pow(x_diference,2)+pow(y_diference,2))
landmarks_distances.append(distance)
for i in range(12):
x_diference_2=mouse_landmarks[0][0]-mouse_landmarks[i][0]
y_diference_2=mouse_landmarks[0][1]-mouse_landmarks[i][1]
distance=math.sqrt(pow(x_diference_2,2)+pow(y_diference_2,2))
landmarks_distances.append(distance)
for i in range(12,20):
x_diference_2=mouse_landmarks[19][0]-mouse_landmarks[i][0]
y_diference_2=mouse_landmarks[19][1]-mouse_landmarks[i][1]
distance=math.sqrt(pow(x_diference_2,2)+pow(y_diference_2,2))
landmarks_distances.append(distance)
return landmarks_distances | 43.275862 | 73 | 0.670916 | 182 | 1,255 | 4.362637 | 0.148352 | 0.299748 | 0.151134 | 0.055416 | 0.848867 | 0.803526 | 0.803526 | 0.761965 | 0.761965 | 0.65869 | 0 | 0.056112 | 0.204781 | 1,255 | 29 | 74 | 43.275862 | 0.739479 | 0 | 0 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.041667 | false | 0 | 0.041667 | 0 | 0.125 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
363ecd212155c80b4858efdee15a0e13b70459a4 | 126 | py | Python | src/polyswarmclient/liveness/exceptions.py | polyswarm/polyswarm-client | 1ce057725d7db59c3582e4cd3cf148cde7ddddeb | [
"MIT"
] | 21 | 2018-09-15T00:12:42.000Z | 2020-10-28T00:42:59.000Z | src/polyswarmclient/liveness/exceptions.py | polyswarm/polyswarm-client | 1ce057725d7db59c3582e4cd3cf148cde7ddddeb | [
"MIT"
] | 435 | 2018-09-05T18:53:21.000Z | 2021-11-30T17:32:10.000Z | src/polyswarmclient/liveness/exceptions.py | polyswarm/polyswarm-client | 1ce057725d7db59c3582e4cd3cf148cde7ddddeb | [
"MIT"
] | 3 | 2019-07-26T00:14:47.000Z | 2021-04-26T10:57:56.000Z | from polyswarmclient.exceptions import PolyswarmClientException
class LivenessReadError(PolyswarmClientException):
pass
| 21 | 63 | 0.865079 | 9 | 126 | 12.111111 | 0.888889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.103175 | 126 | 5 | 64 | 25.2 | 0.964602 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
364adb518f2613c30023064e85a03b0d68d523c6 | 12,053 | py | Python | tests/p4rt/test_p4rt_mirror.py | pins/sonic-swss-public | 4a443eaa33a3d354d99daa3c340cdcff882fc061 | [
"Apache-2.0"
] | null | null | null | tests/p4rt/test_p4rt_mirror.py | pins/sonic-swss-public | 4a443eaa33a3d354d99daa3c340cdcff882fc061 | [
"Apache-2.0"
] | 3 | 2021-11-19T21:46:50.000Z | 2021-11-19T22:14:08.000Z | tests/p4rt/test_p4rt_mirror.py | pins/sonic-swss-public | 4a443eaa33a3d354d99daa3c340cdcff882fc061 | [
"Apache-2.0"
] | 1 | 2021-11-19T19:42:07.000Z | 2021-11-19T19:42:07.000Z | from swsscommon import swsscommon
import time
import util
import json
class P4RtMirrorSessionWrapper(util.DBInterface):
"""Interface to interact with APP DB and ASIC DB tables for P4RT mirror session object."""
# database and SAI constants
APP_DB_TBL_NAME = swsscommon.APP_P4RT_TABLE_NAME
TBL_NAME = swsscommon.APP_P4RT_MIRROR_SESSION_TABLE_NAME
ACTION = "action"
PORT = "port"
SRC_IP = "src_ip"
DST_IP = "dst_ip"
SRC_MAC = "src_mac"
DST_MAC = "dst_mac"
TTL = "ttl"
TOS = "tos"
ASIC_DB_TBL_NAME = "ASIC_STATE:SAI_OBJECT_TYPE_MIRROR_SESSION"
SAI_MIRROR_SESSION_ATTR_MONITOR_PORT = "SAI_MIRROR_SESSION_ATTR_MONITOR_PORT"
SAI_MIRROR_SESSION_ATTR_TYPE = "SAI_MIRROR_SESSION_ATTR_TYPE"
SAI_MIRROR_SESSION_ATTR_ERSPAN_ENCAPSULATION_TYPE = "SAI_MIRROR_SESSION_ATTR_ERSPAN_ENCAPSULATION_TYPE"
SAI_MIRROR_SESSION_ATTR_IPHDR_VERSION = "SAI_MIRROR_SESSION_ATTR_IPHDR_VERSION"
SAI_MIRROR_SESSION_ATTR_TOS = "SAI_MIRROR_SESSION_ATTR_TOS"
SAI_MIRROR_SESSION_ATTR_TTL = "SAI_MIRROR_SESSION_ATTR_TTL"
SAI_MIRROR_SESSION_ATTR_SRC_IP_ADDRESS = "SAI_MIRROR_SESSION_ATTR_SRC_IP_ADDRESS"
SAI_MIRROR_SESSION_ATTR_DST_IP_ADDRESS = "SAI_MIRROR_SESSION_ATTR_DST_IP_ADDRESS"
SAI_MIRROR_SESSION_ATTR_SRC_MAC_ADDRESS = "SAI_MIRROR_SESSION_ATTR_SRC_MAC_ADDRESS"
SAI_MIRROR_SESSION_ATTR_DST_MAC_ADDRESS = "SAI_MIRROR_SESSION_ATTR_DST_MAC_ADDRESS"
SAI_MIRROR_SESSION_ATTR_GRE_PROTOCOL_TYPE = "SAI_MIRROR_SESSION_ATTR_GRE_PROTOCOL_TYPE"
def generate_app_db_key(self, mirror_session_id):
d = {}
d[util.prepend_match_field("mirror_session_id")] = mirror_session_id
key = json.dumps(d, separators=(",", ":"))
return self.TBL_NAME + ":" + key
class TestP4RTMirror(object):
def _set_up(self, dvs):
self._p4rt_mirror_session_wrapper = P4RtMirrorSessionWrapper()
self._p4rt_mirror_session_wrapper.set_up_databases(dvs)
self._response_consumer = swsscommon.NotificationConsumer(
self._p4rt_mirror_session_wrapper.appl_db, "APPL_DB_P4RT_RESPONSE_CHANNEL")
def test_MirrorSessionAddModifyAndDelete(self, dvs, testlog):
# Initialize database connectors
self._set_up(dvs)
# Maintain list of original Application and ASIC DB entries before adding
# new mirror session
original_appl_mirror_entries = util.get_keys(
self._p4rt_mirror_session_wrapper.appl_db,
self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME + ":" + self._p4rt_mirror_session_wrapper.TBL_NAME)
original_appl_state_mirror_entries = util.get_keys(
self._p4rt_mirror_session_wrapper.appl_state_db,
self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME + ":" + self._p4rt_mirror_session_wrapper.TBL_NAME)
original_asic_mirror_entries = util.get_keys(
self._p4rt_mirror_session_wrapper.asic_db, self._p4rt_mirror_session_wrapper.ASIC_DB_TBL_NAME)
# 1. Create mirror session
mirror_session_id = "mirror_session1"
action = "mirror_as_ipv4_erspan"
port = "Ethernet8"
src_ip = "10.206.196.31"
dst_ip = "172.20.0.203"
src_mac = "00:02:03:04:05:06"
dst_mac = "00:1A:11:17:5F:80"
ttl = "0x40"
tos = "0x00"
attr_list_in_app_db = [(self._p4rt_mirror_session_wrapper.ACTION, action),
(util.prepend_param_field(self._p4rt_mirror_session_wrapper.PORT), port),
(util.prepend_param_field(self._p4rt_mirror_session_wrapper.SRC_IP), src_ip),
(util.prepend_param_field(self._p4rt_mirror_session_wrapper.DST_IP), dst_ip),
(util.prepend_param_field(self._p4rt_mirror_session_wrapper.SRC_MAC), src_mac),
(util.prepend_param_field(self._p4rt_mirror_session_wrapper.DST_MAC), dst_mac),
(util.prepend_param_field(self._p4rt_mirror_session_wrapper.TTL), ttl),
(util.prepend_param_field(self._p4rt_mirror_session_wrapper.TOS), tos)]
mirror_session_key = self._p4rt_mirror_session_wrapper.generate_app_db_key(
mirror_session_id)
self._p4rt_mirror_session_wrapper.set_app_db_entry(
mirror_session_key, attr_list_in_app_db)
util.verify_response(
self._response_consumer, mirror_session_key, attr_list_in_app_db, "SWSS_RC_SUCCESS")
# Query application database for mirror entries
appl_mirror_entries = util.get_keys(
self._p4rt_mirror_session_wrapper.appl_db,
self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME + ":" + self._p4rt_mirror_session_wrapper.TBL_NAME)
assert len(appl_mirror_entries) == len(original_appl_mirror_entries) + 1
# Query application database for newly created mirror key
(status, fvs) = util.get_key(self._p4rt_mirror_session_wrapper.appl_db,
self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME,
mirror_session_key)
assert status == True
util.verify_attr(fvs, attr_list_in_app_db)
# Query application state database for mirror entries
appl_state_mirror_entries = util.get_keys(
self._p4rt_mirror_session_wrapper.appl_state_db,
self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME + ":" + self._p4rt_mirror_session_wrapper.TBL_NAME)
assert len(appl_state_mirror_entries) == len(original_appl_state_mirror_entries) + 1
# Query application state database for newly created mirror key
(status, fvs) = util.get_key(self._p4rt_mirror_session_wrapper.appl_state_db,
self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME,
mirror_session_key)
assert status == True
util.verify_attr(fvs, attr_list_in_app_db)
# Query ASIC database for mirror entries
asic_mirror_entries = util.get_keys(self._p4rt_mirror_session_wrapper.asic_db,
self._p4rt_mirror_session_wrapper.ASIC_DB_TBL_NAME)
assert len(asic_mirror_entries) == len(original_asic_mirror_entries) + 1
# Query ASIC database for newly created mirror key
asic_db_key = None
for key in asic_mirror_entries:
# Get newly created entry
if key not in original_asic_mirror_entries:
asic_db_key = key
break
assert asic_db_key is not None
(status, fvs) = util.get_key(self._p4rt_mirror_session_wrapper.asic_db,
self._p4rt_mirror_session_wrapper.ASIC_DB_TBL_NAME,
asic_db_key)
assert status == True
# Get oid of Ethernet8
port_oid = util.get_port_oid_by_name(dvs, port)
assert port_oid != None
expected_attr_list_in_asic_db = [
(self._p4rt_mirror_session_wrapper.SAI_MIRROR_SESSION_ATTR_MONITOR_PORT, port_oid),
(self._p4rt_mirror_session_wrapper.SAI_MIRROR_SESSION_ATTR_TYPE, "SAI_MIRROR_SESSION_TYPE_ENHANCED_REMOTE"),
(self._p4rt_mirror_session_wrapper.SAI_MIRROR_SESSION_ATTR_ERSPAN_ENCAPSULATION_TYPE, "SAI_ERSPAN_ENCAPSULATION_TYPE_MIRROR_L3_GRE_TUNNEL"),
(self._p4rt_mirror_session_wrapper.SAI_MIRROR_SESSION_ATTR_IPHDR_VERSION, "4"), # MIRROR_SESSION_DEFAULT_IP_HDR_VER
(self._p4rt_mirror_session_wrapper.SAI_MIRROR_SESSION_ATTR_TOS, "0"),
(self._p4rt_mirror_session_wrapper.SAI_MIRROR_SESSION_ATTR_TTL, "64"),
(self._p4rt_mirror_session_wrapper.SAI_MIRROR_SESSION_ATTR_SRC_IP_ADDRESS, src_ip),
(self._p4rt_mirror_session_wrapper.SAI_MIRROR_SESSION_ATTR_DST_IP_ADDRESS, dst_ip),
(self._p4rt_mirror_session_wrapper.SAI_MIRROR_SESSION_ATTR_SRC_MAC_ADDRESS, src_mac),
(self._p4rt_mirror_session_wrapper.SAI_MIRROR_SESSION_ATTR_DST_MAC_ADDRESS, dst_mac),
(self._p4rt_mirror_session_wrapper.SAI_MIRROR_SESSION_ATTR_GRE_PROTOCOL_TYPE, "35006") # GRE_PROTOCOL_ERSPAN 0x88be
]
util.verify_attr(fvs, expected_attr_list_in_asic_db)
# 2. Modify the existing mirror session.
new_dst_mac = "00:1A:11:17:5F:FF"
attr_list_in_app_db[5] = (util.prepend_param_field(self._p4rt_mirror_session_wrapper.DST_MAC), new_dst_mac)
self._p4rt_mirror_session_wrapper.set_app_db_entry(
mirror_session_key, attr_list_in_app_db)
util.verify_response(
self._response_consumer, mirror_session_key, attr_list_in_app_db, "SWSS_RC_SUCCESS")
# Query application database for the modified mirror key
(status, fvs) = util.get_key(self._p4rt_mirror_session_wrapper.appl_db,
self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME,
mirror_session_key)
assert status == True
util.verify_attr(fvs, attr_list_in_app_db)
# Query application state database for the modified mirror key
(status, fvs) = util.get_key(self._p4rt_mirror_session_wrapper.appl_state_db,
self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME,
mirror_session_key)
assert status == True
util.verify_attr(fvs, attr_list_in_app_db)
# Query ASIC DB about the modified mirror session.
expected_attr_list_in_asic_db[9] = (self._p4rt_mirror_session_wrapper.SAI_MIRROR_SESSION_ATTR_DST_MAC_ADDRESS, new_dst_mac)
(status, fvs) = util.get_key(self._p4rt_mirror_session_wrapper.asic_db,
self._p4rt_mirror_session_wrapper.ASIC_DB_TBL_NAME,
asic_db_key)
assert status == True
util.verify_attr(fvs, expected_attr_list_in_asic_db)
# 3. Delete the mirror session.
self._p4rt_mirror_session_wrapper.remove_app_db_entry(
mirror_session_key)
util.verify_response(
self._response_consumer, mirror_session_key, [], "SWSS_RC_SUCCESS")
# Query application database for mirror entries
appl_mirror_entries = util.get_keys(
self._p4rt_mirror_session_wrapper.appl_db,
self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME + ":" + self._p4rt_mirror_session_wrapper.TBL_NAME)
assert len(appl_mirror_entries) == len(original_appl_mirror_entries)
# Query application database for the deleted mirror key
(status, fvs) = util.get_key(self._p4rt_mirror_session_wrapper.appl_db,
self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME,
mirror_session_key)
assert status == False
# Query application state database for mirror entries
appl_state_mirror_entries = util.get_keys(
self._p4rt_mirror_session_wrapper.appl_state_db,
self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME + ":" + self._p4rt_mirror_session_wrapper.TBL_NAME)
assert len(appl_state_mirror_entries) == len(original_appl_state_mirror_entries)
# Query application state database for the deleted mirror key
(status, fvs) = util.get_key(self._p4rt_mirror_session_wrapper.appl_state_db,
self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME,
mirror_session_key)
assert status == False
# Query ASIC database for mirror entries
asic_mirror_entries = util.get_keys(self._p4rt_mirror_session_wrapper.asic_db,
self._p4rt_mirror_session_wrapper.ASIC_DB_TBL_NAME)
assert len(asic_mirror_entries) == len(original_asic_mirror_entries)
# Query ASIC state database for the deleted mirror key
(status, fvs) = util.get_key(self._p4rt_mirror_session_wrapper.asic_db,
self._p4rt_mirror_session_wrapper.ASIC_DB_TBL_NAME,
asic_db_key)
assert status == False
| 54.292793 | 150 | 0.702813 | 1,588 | 12,053 | 4.778338 | 0.102015 | 0.226147 | 0.161307 | 0.193727 | 0.800079 | 0.772404 | 0.741961 | 0.71903 | 0.712968 | 0.64233 | 0 | 0.016975 | 0.232639 | 12,053 | 221 | 151 | 54.538462 | 0.803438 | 0.100141 | 0 | 0.431138 | 0 | 0 | 0.074804 | 0.053537 | 0 | 0 | 0.00074 | 0 | 0.101796 | 1 | 0.017964 | false | 0 | 0.023952 | 0 | 0.191617 | 0 | 0 | 0 | 0 | null | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.